skynet-scrapper avatar
skynet-scrapper
Try for free

No credit card required

View all Actors
skynet-scrapper

skynet-scrapper

tech_simphony/skynet-scrapper
Try for free

No credit card required

.actor/Dockerfile

1# Specify the base Docker image. You can read more about
2# the available images at https://docs.apify.com/sdk/js/docs/guides/docker-images
3# You can also use any other image from Docker Hub.
4FROM apify/actor-node:20
5
6# Copy just package.json and package-lock.json
7# to speed up the build using Docker layer cache.
8COPY package*.json ./
9
10# Install NPM packages, skip optional and development dependencies to
11# keep the image small. Avoid logging too much and print the dependency
12# tree for debugging
13RUN npm --quiet set progress=false \
14    && npm install --omit=dev --omit=optional \
15    && echo "Installed NPM packages:" \
16    && (npm list --omit=dev --all || true) \
17    && echo "Node.js version:" \
18    && node --version \
19    && echo "NPM version:" \
20    && npm --version \
21    && rm -r ~/.npm
22
23# Next, copy the remaining files and directories with the source code.
24# Since we do this after NPM install, quick build will be really fast
25# for most source file changes.
26COPY . ./
27
28
29# Run the image.
30CMD npm run start

.actor/actor.json

1{
2    "actorSpecification": 1,
3    "name": "skinet-scraper",
4    "title": "skinet scraper",
5    "version": "1.0.0",
6    "input": "./input_schema.json",
7    "dockerfile": "./Dockerfile",
8    "storages": {
9        "dataset": "./dataset_schema.json"
10    }
11}

.actor/dataset_schema.json

1{
2    "actorSpecification": 1,
3    "fields": {},
4    "views": {
5        "overview": {
6            "title": "Overview",
7            "transformation": {},
8            "display": {
9                "component": "table"
10            }
11        }
12    }
13}

.actor/input_schema.json

1{
2    "title": "Scrape data from a web page",
3    "type": "object",
4    "schemaVersion": 1,
5    "properties": {
6        "queries": {
7            "title": "Search Query",
8            "type": "string",
9            "description": "Search query to use on Google",
10            "editor": "textfield",
11            "prefill": "[tow truck near me california]"
12        },
13        "maxRequestsPerCrawl": {
14            "title": "Max Requests per Crawl",
15            "type": "integer",
16            "description": "Maximum number of requests per crawl",
17            "editor": "number",
18            "prefill": 200
19        }
20    },
21    "required": ["queries"]
22}

.actor/output_schema.json

1{
2    "actorSpecification": 1,
3    "name": "skynet-scraper",
4    "title": "skynet Scraper",
5    "description": "",
6    "version": "1.0.0",
7    "properties": {
8        "url": {
9            "type": "string",
10            "title": "URL",
11            "description": "URL to scrape",
12            "required": true
13        },
14        "title": {
15            "type": "string",
16            "title": "title",
17            "description": "title  to scrape",
18            "required": true
19        },
20        "phoneNumber": {
21            "type": "string",
22            "title": "phoneNumber",
23            "description": "phoneNumber  to scrape",
24            "required": true
25        }
26    },
27    "fields": {},
28    "views": {
29        "overview": {
30            "title": "Overview",
31            "transformation": {},
32            "display": {}
33        }
34    }
35  
36}

src/main.js

1import { Actor, Dataset } from "apify";
2import { CheerioCrawler } from "crawlee";
3import fs from 'fs';
4
5try {
6    await Actor.init();
7
8    const input = await Actor.getInput();
9    let { queries, maxRequestsPerCrawl } = input; 
10    if (typeof queries === 'string') {
11    queries = queries.replace(/^\[|\]$/g, '');
12    queries = `[${queries}]`;
13}
14    const searchQueries = Array.isArray(queries) ? queries : [queries];
15    const searchQuery = searchQueries.join(" ");
16    const searchUrl = `https://www.google.com/search?q=${encodeURIComponent(searchQuery)}`;
17    console.log("URL ", searchUrl);
18
19    const phoneNumberRegex = /(\d{3}[-.\s]??\d{3}[-.\s]??\d{4}|\(\d{3}\)\s*\d{3}[-.\s]??\d{4}|\d{3}[-.\s]??\d{4})/;
20
21    const resultsData = {
22        searchQuery: {
23            term: queries,
24            url: searchUrl,
25            device: "MOBILE",
26            page: 1,
27            type: "SEARCH",
28            domain: "google.com",
29            countryCode: "US",
30            languageCode: "en",
31            locationUule: null,
32            resultsPerPage: 10
33        },
34        resultsTotal: "N/A",
35        relatedQueries: [],
36        paidResults: [],
37        paidProducts: [],
38        organicResults: [],
39        peopleAlsoAsk: []
40    };
41
42    const crawler = new CheerioCrawler({
43        maxRequestsPerCrawl,
44        handlePageFunction: async ({ $ }) => {
45            const searchResults = $("div.g");
46            console.log("Número de resultados encontrados:", searchResults.length);
47
48            if (searchResults.length === 0) {
49                console.log("No se encontraron resultados de búsqueda.");
50            }
51
52            searchResults.each((index, element) => {
53                const $result = $(element);
54                const title = $result.find("h3").text().trim();
55                const url = $result.find("a").attr("href") || '';
56                const description = $result.find('span.VwiC3b').text().trim() || '';
57                let phoneNumber = title.match(phoneNumberRegex) ? title.match(phoneNumberRegex)[0] : null;
58
59                if (!phoneNumber) {
60                    const $rgk4aeElements = $result.find('.OSrXXb');
61                    $rgk4aeElements.each((index, rgk4aeElement) => {
62                        const $phoneElement = $(rgk4aeElement).find('.xBevyb');
63                        const phoneText = $phoneElement.text();
64                        const phoneMatch = phoneText.match(phoneNumberRegex);
65                        if (phoneMatch) {
66                            phoneNumber = phoneMatch[0];
67                        }
68                    });
69                }
70
71                const isSponsored = $result.hasClass('sponsored') || $result.find('span').text().toLowerCase().includes('ad');
72
73                if ((title || url || description) && phoneNumber) {
74                    if (isSponsored) {
75                        resultsData.paidResults.push({
76                            title,
77                            url,
78                            phoneNumber,
79                            displayedUrl: url,
80                            description,
81                            emphasizedKeywords: [],
82                            siteLinks: [],
83                            type: "paid",
84                            adPosition: index + 1
85                        });
86                    } else {
87                        resultsData.organicResults.push({
88                            title,
89                            url,
90                            phoneNumber,
91                            displayedUrl: url,
92                            description,
93                            emphasizedKeywords: [],
94                            siteLinks: [],
95                            productInfo: {},
96                            type: "organic",
97                            position: index + 1
98                        });
99                    }
100                } else {
101                    console.log(`Resultado ${index + 1} está vacío o incompleto.`);
102                }
103            });
104
105            // People Also Ask
106            const peopleAlsoAskElements = $('div.related-question-pair');
107            if (peopleAlsoAskElements.length === 0) {
108                console.log("No se encontraron 'People Also Ask'.");
109            }
110
111            peopleAlsoAskElements.each((index, element) => {
112                const $question = $(element).find('.yuRUbf').text().trim();
113                const $answer = $(element).find('.VwiC3b').text().trim();
114                const $url = $(element).find('a').attr('href') || '';
115
116                if ($question || $answer || $url) {
117                    resultsData.peopleAlsoAsk.push({
118                        question: $question,
119                        answer: $answer,
120                        url: $url,
121                        title: $question,
122                        date: ''
123                    });
124                } else {
125                    console.log(`'People Also Ask' ${index + 1} está vacío o incompleto.`);
126                }
127            });
128
129            // Related Queries
130            const relatedQueriesElements = $('a[data-hveid="CAEQAw"]');
131            if (relatedQueriesElements.length === 0) {
132                console.log("No se encontraron 'Related Queries'.");
133            }
134
135            relatedQueriesElements.each((index, element) => {
136                const $relatedQuery = $(element).text().trim();
137                const $relatedUrl = $(element).attr('href') || '';
138
139                if ($relatedQuery || $relatedUrl) {
140                    resultsData.relatedQueries.push({
141                        title: $relatedQuery,
142                        url: `https://www.google.com${$relatedUrl}`
143                    });
144                } else {
145                    console.log(`'Related Query' ${index + 1} está vacío o incompleto.`);
146                }
147            });
148        },
149    });
150
151    await crawler.run([searchUrl]);
152
153    if (!resultsData.paidResults.length && !resultsData.organicResults.length) {
154        console.log("No se encontraron resultados de búsqueda.");
155    }
156
157    // Guardar los resultados en el dataset de Apify
158    await Actor.setValue('OUTPUT', resultsData);
159    await Dataset.pushData(resultsData);
160
161    // Escribir los resultados en un archivo JSON
162    fs.writeFileSync('./output.json', JSON.stringify(resultsData, null, 2));
163    console.log('Datos guardados en output.json');
164
165    await Actor.exit();
166} catch (error) {
167    console.error("Se produjo un error durante la ejecución del actor:", error);
168    process.exit(1);
169}

.dockerignore

1# configurations
2.idea
3
4# crawlee and apify storage folders
5apify_storage
6crawlee_storage
7storage
8
9# installed files
10node_modules
11
12# git folder
13.git

.editorconfig

1root = true
2
3[*]
4indent_style = space
5indent_size = 4
6charset = utf-8
7trim_trailing_whitespace = true
8insert_final_newline = true
9end_of_line = lf

.eslintrc

1{
2    "extends": "@apify",
3    "root": true
4}

.gitignore

1# This file tells Git which files shouldn't be added to source control
2
3.DS_Store
4.idea
5dist
6node_modules
7apify_storage
8storage

package.json

1{
2	"name": "my-web-scrapper",
3	"version": "0.0.1",
4	"type": "module",
5	"description": "This is an example of an Apify actor.",
6	"engines": {
7		"node": ">=18.0.0"
8	},
9	"dependencies": {
10		"@crawlee/http": "^3.9.2",
11		"apify": "^3.1.10",
12		"apify-client": "^2.9.3",
13		"axios": "^1.5.0",
14		"cheerio": "^1.0.0-rc.12",
15		"crawlee": "^3.9.2",
16		"random-useragent": "^0.5.0"
17	},
18	"scripts": {
19		"start": "node ./src/main.js",
20		"test": "echo \"Error: oops, the actor has no tests yet, sad!\" && exit 1"
21	},
22	"author": "It's not you it's me",
23	"license": "ISC"
24}
Developer
Maintained by Community
Actor metrics
  • 2 monthly users
  • 1 star
  • 100.0% runs succeeded
  • Created in May 2024
  • Modified 15 days ago
Categories