skyBox-AmazonScrapper
Deprecated
Pricing
Pay per usage
Go to Store
skyBox-AmazonScrapper
Deprecated
It is a Amazon scrapper that scraps product ranking at a specific category and sellers information such as seller name, address, and contact information.
0.0 (0)
Pricing
Pay per usage
1
Total users
10
Monthly users
6
Last modified
2 years ago
.actor/Dockerfile
# Specify the base Docker image. You can read more about# the available images at https://crawlee.dev/docs/guides/docker-images# You can also use any other image from Docker Hub.FROM apify/actor-node-puppeteer-chrome:16
# Copy just package.json and package-lock.json# to speed up the build using Docker layer cache.COPY package*.json ./
# Install NPM packages, skip optional and development dependencies to# keep the image small. Avoid logging too much and print the dependency# tree for debuggingRUN npm --quiet set progress=false \ && npm install --omit=dev --omit=optional \ && echo "Installed NPM packages:" \ && (npm list --omit=dev --all || true) \ && echo "Node.js version:" \ && node --version \ && echo "NPM version:" \ && npm --version \ && rm -r ~/.npm
# Next, copy the remaining files and directories with the source code.# Since we do this after NPM install, quick build will be really fast# for most source file changes.COPY . ./
# Run the image. If you know you won't need headful browsers,# you can remove the XVFB start script for a micro perf gain.CMD ./start_xvfb_and_run_cmd.sh && npm start --silent
.actor/actor.json
{ "actorSpecification": 1, "name": "my-actor-1", "title": "Project Puppeteer Crawler JavaScript", "description": "Crawlee and Puppeteer project in JavaScript.", "version": "0.0", "meta": { "templateId": "js-crawlee-puppeteer-chrome" }, "input": "./input_schema.json", "dockerfile": "./Dockerfile"}
.actor/input_schema.json
{ "title": "PlaywrightCrawler Template", "type": "object", "schemaVersion": 1, "properties": { "startUrls": { "title": "Start URLs", "type": "array", "description": "URLs to start with.", "editor": "requestListSources", "prefill": [ { "url": "https://apify.com" } ] }, "ASIN": { "title": "ASIN Number", "type": "string", "description": "The Asin number of the amazon product", "editor": "textfield" }, "MaxPageRetries":{ "title":"Max Page Retry", "type": "integer", "description": "The maximum number of retries after failure.", "minimum": 0, "default": 5 }, "MaxConcurrency":{ "title": "Max Concurrency", "type": "integer", "description": "The maximum number of cuncurrent tasks running at a time.", "minimum": 0, "default": 5 }, "Proxy":{ "sectionCaption": "Proxy Configuration", "title": "Proxy configuration", "type": "object", "description": "Select proxies to be used by your crawler.", "prefill": { "useApifyProxy": true }, "editor": "proxy" } }
}
src/main.js
1import { Actor } from 'apify';2import { PuppeteerCrawler } from 'crawlee';3import cheerio from 'cheerio';4import { router } from './routes.js';5
6// Initialize the Apify SDK7await Actor.init();8
9const input = await Actor.getInput();10const { MaxPageRetries, startUrls } = input;11
12const proxyConfiguration = await Actor.createProxyConfiguration({13 groups: ['RESIDENTIAL'],14 countryCode: 'GB',15});16
17const crawler = new PuppeteerCrawler({18 proxyConfiguration,19 maxRequestRetries: MaxPageRetries,20 failedRequestHandler: async ({ request }) => {21 console.log(`Request ${request.url} failed too many times.`);22 },23 requestHandler: router,24});25
26await crawler.run([startUrls[0].url]);27
28await Actor.exit()
src/routes.js
1import { Actor } from 'apify';2// import { PuppeteerCrawler } from 'crawlee';3import cheerio from 'cheerio';4import getSellerInformation from './sellerInfo.js';5import { createPuppeteerRouter } from 'crawlee';6
7await Actor.init();8
9const input = await Actor.getInput();10const { ASIN } = input;11
12export const router = createPuppeteerRouter();13
14router.addDefaultHandler(async ({ page, request }) => {15 const content = await page.content();16
17 let $ = cheerio.load(content);18 19 const asinNumbers = [];20 21 $('div[data-asin][data-component-type="s-search-result"]').each((index, element) => {22 asinNumbers.push($(element).attr('data-asin'));23 });24
25 console.log(asinNumbers);26
27 const specificASIN = ASIN; 28 const placement = asinNumbers.indexOf(specificASIN) + 1; 29 console.log(`Product ASIN ${specificASIN} is placed at position: ${placement} on the page`);30
31
32 // getting seller information from here...33 if(ASIN){34 // getSellerInformation(page, ASIN);35 await page.goto(`https://www.amazon.com/dp/${ASIN}`)36 await page.waitForSelector('#sellerProfileTriggerId');37
38 await page.click('#sellerProfileTriggerId');39 await page.waitForSelector('#effective-timeperiod-rating-year-description');40
41 const sellerPageContent = await page.content();42 $ = cheerio.load(sellerPageContent);43
44 const sellerRating = $('#effective-timeperiod-rating-year-description').text().trim(); 45 const sellerName = $('#page-section-detail-seller-info > div > div > div > div:nth-child(2) > span:nth-child(2)').text().trim();46 const sellerAddress = $('#page-section-detail-seller-info > div > div > div > div:nth-child(4)').text().trim();47
48 console.log("Sellers Rating: ", sellerRating);49 console.log("Seller Name: ", sellerName);50 console.log("Seller Address: ", sellerAddress);51
52 const dataset = await Actor.openDataset('SKYBOX');53 await dataset.pushData({ASIN, placement, sellerRating, sellerName, sellerAddress});54 }55 // end of seller information.56});57
58router.addHandler('detail', async ({ page, request }) => {59
60});
src/sellerInfo.js
1import cheerio from 'cheerio';2
3
4export default async function getSellerInformation(page, ASIN){5 page.goto(`https://www.amazon.com/dp/${ASIN}`)6 await page.waitForSelector('#sellerProfileTriggerId');7 console.log("hello")8
9 await page.click('#sellerProfileTriggerId');10 console.log('1')11 await page.waitForSelector('#effective-timeperiod-rating-year-description');12 console.log('2')13
14 const sellerPageContent = await page.content();15 console.log('3')16
17 const $ = cheerio.load(sellerPageContent);18 console.log('4')19
20 const sellerRating = $('#effective-timeperiod-rating-year-description').text().trim(); 21 const sellerName = $('#page-section-detail-seller-info > div > div > div > div:nth-child(2) > span:nth-child(2)').text().trim();22 const sellerAddress = $('#page-section-detail-seller-info > div > div > div > div:nth-child(4)').text().trim();23
24 console.log("Sellers Rating: ", sellerRating);25 console.log("Seller Name: ", sellerName);26 console.log("Seller Address: ", sellerAddress);27}
.dockerignore
# configurations.idea
# crawlee and apify storage foldersapify_storagecrawlee_storagestorage
# installed filesnode_modules
# git folder.git
.editorconfig
root = true
[*]indent_style = spaceindent_size = 4charset = utf-8trim_trailing_whitespace = trueinsert_final_newline = trueend_of_line = lf
.eslintrc
{ "extends": "@apify", "root": true}
.gitignore
# This file tells Git which files shouldn't be added to source control
.DS_Store.ideadistnode_modulesapify_storagestorage
package.json
{ "name": "crawlee-puppeteer-javascript", "version": "0.0.1", "type": "module", "description": "This is an example of an Apify actor.", "dependencies": { "apify": "^3.0.0", "crawlee": "^3.0.0", "puppeteer": "*" }, "devDependencies": { "@apify/eslint-config": "^0.3.1", "eslint": "^8.36.0" }, "scripts": { "start": "node src/main.js", "test": "echo \"Error: oops, the actor has no tests yet, sad!\" && exit 1" }, "author": "It's not you it's me", "license": "ISC"}