Bzj Actor avatar
Bzj Actor

Deprecated

Pricing

Pay per usage

Go to Store
Bzj Actor

Bzj Actor

Deprecated

Developed by

Bazer

Bazer

Maintained by Community

this is test

0.0 (0)

Pricing

Pay per usage

0

Total users

2

Monthly users

2

Last modified

4 months ago

.dockerignore

# configurations
.idea
.vscode
# crawlee and apify storage folders
apify_storage
crawlee_storage
storage
# installed files
node_modules
# git folder
.git

.editorconfig

root = true
[*]
indent_style = space
indent_size = 4
charset = utf-8
trim_trailing_whitespace = true
insert_final_newline = true
end_of_line = lf

.eslintrc

{
"root": true,
"env": {
"browser": true,
"es2020": true,
"node": true
},
"extends": [
"@apify/eslint-config-ts"
],
"parserOptions": {
"project": "./tsconfig.json",
"ecmaVersion": 2020
},
"ignorePatterns": [
"node_modules",
"dist",
"**/*.d.ts"
]
}

.gitignore

# This file tells Git which files shouldn't be added to source control
.DS_Store
.idea
.vscode
dist
node_modules
apify_storage
storage
# Added by Apify CLI
.venv

package.json

{
"name": "bzj-actor",
"version": "0.0.1",
"type": "module",
"description": "This is an example of an Apify actor.",
"engines": {
"node": ">=18.0.0"
},
"dependencies": {
"apify": "^3.2.6",
"crawlee": "^3.11.5",
"puppeteer": "*",
"scrapeless-sdk-node": "^0.0.3"
},
"devDependencies": {
"@apify/eslint-config-ts": "^0.3.0",
"@apify/tsconfig": "^0.1.0",
"@typescript-eslint/eslint-plugin": "^7.18.0",
"@typescript-eslint/parser": "^7.18.0",
"eslint": "^8.50.0",
"tsx": "^4.6.2",
"typescript": "^5.3.3"
},
"scripts": {
"start": "npm run start:dev",
"start:prod": "node dist/main.js",
"start:dev": "tsx src/main.ts",
"build": "tsc",
"lint": "eslint ./src --ext .ts",
"lint:fix": "eslint ./src --ext .ts --fix",
"test": "echo \"Error: oops, the actor has no tests yet, sad!\" && exit 1"
},
"author": "It's not you it's me",
"license": "ISC"
}

tsconfig.json

{
"extends": "@apify/tsconfig",
"compilerOptions": {
"module": "NodeNext",
"moduleResolution": "NodeNext",
"target": "ES2022",
"outDir": "dist",
"noUnusedLocals": false,
"skipLibCheck": true,
"esModuleInterop": true,
"allowSyntheticDefaultImports": true,
"lib": [
"DOM"
]
},
"include": [
"./src/**/*"
]
}

.actor/actor.json

{
"actorSpecification": 1,
"name": "bzj-actor",
"title": "Project Puppeteer Crawler Typescript",
"description": "Crawlee and Puppeteer project in typescript.",
"version": "0.0",
"meta": {
"templateId": "ts-crawlee-puppeteer-chrome"
},
"input": "./input_schema.json",
"dockerfile": "./Dockerfile",
"storages": {
"dataset": "./dataset_schema.json"
}
}

.actor/dataset_schema.json

{
"actorSpecification": 1,
"views": {
"overview": {
"title": "Overview",
"transformation": {
"fields": [
"count",
"data",
"code",
"message"
]
},
"display": {
"component": "table",
"properties": {
"count": {
"label": "count",
"format": "text"
},
"data": {
"label": "data",
"format": "object"
},
"code": {
"label": "code",
"format": "text"
},
"message": {
"label": "message",
"format": "text"
}
}
}
}
}
}

.actor/Dockerfile

# Specify the base Docker image. You can read more about
# the available images at https://crawlee.dev/docs/guides/docker-images
# You can also use any other image from Docker Hub.
FROM apify/actor-node-puppeteer-chrome:20 AS builder
# Check preinstalled packages
RUN npm ls crawlee apify puppeteer playwright
# Copy just package.json and package-lock.json
# to speed up the build using Docker layer cache.
COPY --chown=myuser package*.json ./
# Install all dependencies. Don't audit to speed up the installation.
RUN npm install --include=dev --audit=false
# Next, copy the source files using the user set
# in the base image.
COPY --chown=myuser . ./
# Install all dependencies and build the project.
# Don't audit to speed up the installation.
RUN npm run build
# Create final image
FROM apify/actor-node-puppeteer-chrome:20
# Check preinstalled packages
RUN npm ls crawlee apify puppeteer playwright
# Copy just package.json and package-lock.json
# to speed up the build using Docker layer cache.
COPY --chown=myuser package*.json ./
# Install NPM packages, skip optional and development dependencies to
# keep the image small. Avoid logging too much and print the dependency
# tree for debugging
RUN npm --quiet set progress=false \
&& npm install --omit=dev --omit=optional \
&& echo "Installed NPM packages:" \
&& (npm list --omit=dev --all || true) \
&& echo "Node.js version:" \
&& node --version \
&& echo "NPM version:" \
&& npm --version \
&& rm -r ~/.npm
# Copy built JS files from builder image
COPY --from=builder --chown=myuser /home/myuser/dist ./dist
# Next, copy the remaining files and directories with the source code.
# Since we do this after NPM install, quick build will be really fast
# for most source file changes.
COPY --chown=myuser . ./
# Run the image. If you know you won't need headful browsers,
# you can remove the XVFB start script for a micro perf gain.
CMD ./start_xvfb_and_run_cmd.sh && npm run start:prod --silent

.actor/input_schema.json

{
"title": "Actor BZJ",
"type": "object",
"schemaVersion": 1,
"properties": {
"url": {
"title": "Shopee URL",
"type": "string",
"editor": "textfield",
"description": "Shopee URL that needs to be crawled",
"prefill": "https://shopee.sg/api/v4/search/search_items?keyword=hp%201020%20toner&limit=30&newest=0&order=desc&page_type=search&scenario=page_global_search&version=2"
},
"apiKey": {
"title": "API Key",
"type": "string",
"editor": "textfield",
"description": "Start using the API with your [API KEY](https://app.scrapeless.com/dashboard/account?tab=apiKey)"
},
"actor": {
"title": "Shopee Actor",
"type": "string",
"enum": [
"scraper.shopee",
"scraper.shopee.retry",
"scraper.shopee.stock",
"scraper.shopee.mobile",
"scraper.shopee.mobilev2",
"scraper.shopee.mobilev3",
"scraper.shopee.webv1"
],
"description": "Shopee Actor to use for scraping",
"prefill": "scraper.shopee"
},
"action": {
"title": "Shopee Action",
"type": "string",
"enum": [
"shopee.product",
"shopee.search",
"shopee.live",
"shopee.rcmd"
],
"description": "Shopee Action to use for scraping",
"prefill": "shopee.search"
},
"webhook": {
"title": "webhook",
"type": "string",
"editor": "textfield",
"description": "webhook URL to send the data to",
"default": ""
}
},
"required": ["url", "actor", "apiKey", "action"]
}

src/main.ts

1// Apify SDK - toolkit for building Apify Actors (Read more at https://docs.apify.com/sdk/js/).
2// Web scraping and browser automation library (Read more at https://crawlee.dev)
3// import { PuppeteerCrawler, Request } from 'crawlee';
4// import { router } from './routes.js';
5import { Actor } from 'apify';
6import Scrapeless from 'scrapeless-sdk-node';
7
8// The init() call configures the Actor for its environment. It's recommended to start every Actor with an init().
9await Actor.init();
10
11// "enumTitles": [
12// "No retries allowed",
13// "Retry is allowed, and the task timeout is 55 seconds",
14// "Allow retry and get inventory information, only supported in `shopee.product`",
15// "only supported in `shopee.product`",
16// "only supported in `shopee.product`",
17// "only supported in `shopee.product`",
18// "only supported in `shopee.product`"
19// ],
20
21enum ShopeeActorEnum {
22 // No retries allowed
23 default = 'scraper.shopee',
24 // Retry is allowed, and the task timeout is 55 seconds
25 retry = 'scraper.shopee.retry',
26 // Allow retry and get inventory information, only supported in "shopee.product"
27 stock = 'scraper.shopee.stock',
28 // only supported in "shopee.product"
29 mobile = 'scraper.shopee.mobile',
30 // only supported in "shopee.product"
31 mobilev2 = 'scraper.shopee.mobilev2',
32 // only supported in "shopee.product"
33 mobilev3 = 'scraper.shopee.mobilev3',
34 // only supported in "shopee.product"
35 webv1 = 'scraper.shopee.webv1',
36}
37
38enum ShopeeActionEnum {
39 product = 'shopee.product',
40 search = 'shopee.search',
41 live = 'shopee.live',
42 rcmd = 'shopee.rcmd',
43 ratings = 'shopee.ratings',
44}
45
46interface Input {
47 apiKey: string;
48 actor: ShopeeActorEnum;
49 action: ShopeeActionEnum;
50 webhook: string;
51 url: string;
52}
53
54const {
55 apiKey,
56 actor = ShopeeActorEnum.default,
57 action = ShopeeActionEnum.search,
58 webhook = '',
59 url = 'https://shopee.tw/2312312.10228173.24803858474',
60} = await Actor.getInput<Input>() ?? {};
61
62// @ts-expect-error scrapeless-sdk-node
63const scrapeless = new Scrapeless({ apiKey });
64
65function getScrapelessInput() {
66 const baseInput = { url };
67 if (action === ShopeeActionEnum.search) {
68 return { ...baseInput, type: ShopeeActionEnum.search };
69 }
70 return { ...baseInput, action };
71}
72
73const response = await scrapeless.scraper({
74 actor,
75 webhook,
76 input: getScrapelessInput(),
77});
78
79console.log('[response]===>', response);
80
81await Actor.pushData(response as object);
82
83// Define the URLs to start the crawler with - get them from the input of the Actor or use a default list.
84
85// Create a proxy configuration that will rotate proxies from Apify Proxy.
86// const proxyConfiguration = await Actor.createProxyConfiguration();
87
88// // Create a PuppeteerCrawler that will use the proxy configuration and and handle requests with the router from routes.ts file.
89// const crawler = new PuppeteerCrawler({
90// proxyConfiguration,
91// requestHandler: router,
92// launchContext: {
93// launchOptions: {
94// args: [
95// '--disable-gpu', // Mitigates the "crashing GPU process" issue in Docker containers
96// ],
97// },
98// },
99// });
100
101// // Run the crawler with the start URLs and wait for it to finish.
102// await crawler.run(startUrls);
103
104// Gracefully exit the Actor process. It's recommended to quit all Actors with an exit().
105await Actor.exit();

src/routes.ts

1import { Dataset, createPuppeteerRouter } from 'crawlee';
2
3export const router = createPuppeteerRouter();
4
5router.addDefaultHandler(async ({ enqueueLinks, log }) => {
6 log.info(`enqueueing new URLs`);
7 await enqueueLinks({
8 globs: ['https://apify.com/*'],
9 label: 'detail',
10 });
11});
12
13router.addHandler('detail', async ({ request, page, log }) => {
14 const title = await page.title();
15 log.info(`${title}`, { url: request.loadedUrl });
16
17 await Dataset.pushData({
18 url: request.loadedUrl,
19 title,
20 });
21});