Open Source Actors Scraper avatar

Open Source Actors Scraper

Try for free

No credit card required

View all Actors
Open Source Actors Scraper

Open Source Actors Scraper

lukaskrivka/open-source-actors-scraper
Try for free

No credit card required

Get all open-source Actors from Apify Store.

.dockerignore

1# configurations
2.idea
3
4# crawlee and apify storage folders
5apify_storage
6crawlee_storage
7storage
8
9# installed files
10node_modules
11
12# git folder
13.git

.editorconfig

1root = true
2
3[*]
4indent_style = space
5indent_size = 4
6charset = utf-8
7trim_trailing_whitespace = true
8insert_final_newline = true
9end_of_line = lf

.eslintrc

1{
2    "root": true,
3    "env": {
4        "browser": true,
5        "es2020": true,
6        "node": true
7    },
8    "extends": [
9        "@apify/eslint-config-ts"
10    ],
11    "parserOptions": {
12        "project": "./tsconfig.json",
13        "ecmaVersion": 2020
14    },
15    "ignorePatterns": [
16        "node_modules",
17        "dist",
18        "**/*.d.ts"
19    ]
20}

.gitignore

1# This file tells Git which files shouldn't be added to source control
2
3.DS_Store
4.idea
5dist
6node_modules
7apify_storage
8storage
9
10# Added by Apify CLI
11.venv

package.json

1{
2	"name": "apify-store-open-source",
3	"version": "0.0.1",
4	"type": "module",
5	"description": "This is a boilerplate of an Apify actor.",
6	"engines": {
7		"node": ">=18.0.0"
8	},
9	"dependencies": {
10		"apify": "^3.1.10",
11		"crawlee": "^3.5.4"
12	},
13	"devDependencies": {
14		"@apify/eslint-config-ts": "^0.3.0",
15		"@apify/tsconfig": "^0.1.0",
16		"@typescript-eslint/eslint-plugin": "^6.7.2",
17		"@typescript-eslint/parser": "^6.7.2",
18		"eslint": "^8.50.0",
19		"tsx": "^4.6.2",
20		"typescript": "^5.3.3"
21	},
22	"scripts": {
23		"start": "npm run start:dev",
24		"start:prod": "node dist/main.js",
25		"start:dev": "tsx src/main.ts",
26		"build": "tsc",
27		"lint": "eslint ./src --ext .ts",
28		"lint:fix": "eslint ./src --ext .ts --fix",
29		"test": "echo \"Error: oops, the actor has no tests yet, sad!\" && exit 1"
30	},
31	"author": "It's not you it's me",
32	"license": "ISC"
33}

tsconfig.json

1{
2    "extends": "@apify/tsconfig",
3    "compilerOptions": {
4        "module": "NodeNext",
5        "moduleResolution": "NodeNext",
6        "target": "ES2022",
7        "outDir": "dist",
8        "noUnusedLocals": false,
9        "skipLibCheck": true,
10        "lib": ["DOM"]
11    },
12    "include": [
13        "./src/**/*"
14    ]
15}

.actor/Dockerfile

1# Specify the base Docker image. You can read more about
2# the available images at https://crawlee.dev/docs/guides/docker-images
3# You can also use any other image from Docker Hub.
4FROM apify/actor-node:20 AS builder
5
6# Check preinstalled packages
7RUN npm ls crawlee apify puppeteer playwright
8
9# Copy just package.json and package-lock.json
10# to speed up the build using Docker layer cache.
11COPY package*.json ./
12
13# Install all dependencies. Don't audit to speed up the installation.
14RUN npm install --include=dev --audit=false
15
16# Next, copy the source files using the user set
17# in the base image.
18COPY . ./
19
20# Install all dependencies and build the project.
21# Don't audit to speed up the installation.
22RUN npm run build
23
24# Create final image
25FROM apify/actor-node:20
26
27# Check preinstalled packages
28RUN npm ls crawlee apify puppeteer playwright
29
30# Copy just package.json and package-lock.json
31# to speed up the build using Docker layer cache.
32COPY package*.json ./
33
34# Install NPM packages, skip optional and development dependencies to
35# keep the image small. Avoid logging too much and print the dependency
36# tree for debugging
37RUN npm --quiet set progress=false \
38    && npm install --omit=dev --omit=optional \
39    && echo "Installed NPM packages:" \
40    && (npm list --omit=dev --all || true) \
41    && echo "Node.js version:" \
42    && node --version \
43    && echo "NPM version:" \
44    && npm --version \
45    && rm -r ~/.npm
46
47# Copy built JS files from builder image
48COPY --from=builder /usr/src/app/dist ./dist
49
50# Next, copy the remaining files and directories with the source code.
51# Since we do this after NPM install, quick build will be really fast
52# for most source file changes.
53COPY . ./
54
55
56# Run the image.
57CMD npm run start:prod --silent

.actor/actor.json

1{
2	"actorSpecification": 1,
3	"name": "open-source-actors-scraper",
4	"title": "Project Cheerio Crawler Typescript",
5	"description": "Crawlee and Cheerio project in typescript.",
6	"version": "0.0",
7	"meta": {
8		"templateId": "ts-crawlee-cheerio"
9	},
10	"input": "./input_schema.json",
11	"dockerfile": "./Dockerfile"
12}

.actor/input_schema.json

1{
2    "title": "CheerioCrawler Template",
3    "type": "object",
4    "schemaVersion": 1,
5    "properties": {
6        "startUrls": {
7            "title": "Start URLs",
8            "type": "array",
9            "description": "URLs to start with.",
10            "editor": "requestListSources",
11            "prefill": [
12                {
13                    "url": "https://crawlee.dev"
14                }
15            ]
16        },
17        "maxRequestsPerCrawl": {
18            "title": "Max Requests per Crawl",
19            "type": "integer",
20            "description": "Maximum number of requests that can be made by this crawler.",
21            "default": 100
22        }
23    }
24}

src/main.ts

1// Apify SDK - toolkit for building Apify Actors (Read more at https://docs.apify.com/sdk/js/)
2import { Actor, log } from 'apify';
3// Crawlee - web scraping and browser automation library (Read more at https://crawlee.dev)
4import { CheerioCrawler } from 'crawlee';
5// this is ESM project, and as such, it requires you to specify extensions in your relative imports
6// read more about this here: https://nodejs.org/docs/latest-v18.x/api/esm.html#mandatory-file-extensions
7// note that we need to use `.js` even when inside TS files
8// import { router } from './routes.js';
9
10// The init() call configures the Actor for its environment. It's recommended to start every Actor with an init()
11await Actor.init();
12
13const proxyConfiguration = await Actor.createProxyConfiguration();
14
15const firstReq = {
16    url: `https://api.apify.com/v2/store?&limit=1000`,
17    label: 'PAGINATION',
18    userData: { offset: 0 },
19};
20
21interface ActorStore {
22    name: string;
23    url: string;
24}
25
26const crawler = new CheerioCrawler({
27    proxyConfiguration,
28    requestHandler: async ({ request, $, json }) => {
29        if (request.label === 'PAGINATION') {
30            const { items } = json.data;
31
32            log.info(`Loaded ${items.length} Actors from page ${request.url}`);
33
34            const toEnqueue = items.map((item: ActorStore) => {
35                return {
36                    url: item.url,
37                    label: 'DETAIL',
38                    userData: { item },
39                };
40            });
41            await crawler.addRequests(toEnqueue);
42
43            if (items.length >= 1000) {
44                const offset = request.userData.offset + 1000;
45                await crawler.addRequests([{
46                    url: `https://api.apify.com/v2/store?limit=1000&offset=${offset}`,
47                    label: 'PAGINATION',
48                    userData: { offset },
49                }]);
50            }
51        } else if (request.label === 'DETAIL') {
52            const { item } = request.userData;
53            const isOpenSource = $('[data-test-url="source-code"]').length > 0;
54
55            if (isOpenSource) {
56                await Actor.pushData({
57                    ...item,
58                    isOpenSource,
59                });
60            }
61
62            log.info(`${item.name} is open source: ${isOpenSource}`);
63        }
64    },
65});
66
67await crawler.run([firstReq]);
68
69// Gracefully exit the Actor process. It's recommended to quit all Actors with an exit()
70await Actor.exit();
Developer
Maintained by Community
Actor metrics
  • 1 monthly user
  • 0 stars
  • 100.0% runs succeeded
  • Created in Sep 2024
  • Modified 15 days ago