Open Source Actors Scraper avatar
Open Source Actors Scraper

Pricing

Pay per usage

Go to Store
Open Source Actors Scraper

Open Source Actors Scraper

Developed by

Lukáš Křivka

Lukáš Křivka

Maintained by Community

Get all open-source Actors from Apify Store.

0.0 (0)

Pricing

Pay per usage

0

Total users

3

Monthly users

2

Last modified

5 months ago

.dockerignore

# configurations
.idea
# crawlee and apify storage folders
apify_storage
crawlee_storage
storage
# installed files
node_modules
# git folder
.git

.editorconfig

root = true
[*]
indent_style = space
indent_size = 4
charset = utf-8
trim_trailing_whitespace = true
insert_final_newline = true
end_of_line = lf

.eslintrc

{
"root": true,
"env": {
"browser": true,
"es2020": true,
"node": true
},
"extends": [
"@apify/eslint-config-ts"
],
"parserOptions": {
"project": "./tsconfig.json",
"ecmaVersion": 2020
},
"ignorePatterns": [
"node_modules",
"dist",
"**/*.d.ts"
]
}

.gitignore

# This file tells Git which files shouldn't be added to source control
.DS_Store
.idea
dist
node_modules
apify_storage
storage
# Added by Apify CLI
.venv

package.json

{
"name": "apify-store-open-source",
"version": "0.0.1",
"type": "module",
"description": "This is a boilerplate of an Apify actor.",
"engines": {
"node": ">=18.0.0"
},
"dependencies": {
"apify": "^3.1.10",
"crawlee": "^3.5.4"
},
"devDependencies": {
"@apify/eslint-config-ts": "^0.3.0",
"@apify/tsconfig": "^0.1.0",
"@typescript-eslint/eslint-plugin": "^6.7.2",
"@typescript-eslint/parser": "^6.7.2",
"eslint": "^8.50.0",
"tsx": "^4.6.2",
"typescript": "^5.3.3"
},
"scripts": {
"start": "npm run start:dev",
"start:prod": "node dist/main.js",
"start:dev": "tsx src/main.ts",
"build": "tsc",
"lint": "eslint ./src --ext .ts",
"lint:fix": "eslint ./src --ext .ts --fix",
"test": "echo \"Error: oops, the actor has no tests yet, sad!\" && exit 1"
},
"author": "It's not you it's me",
"license": "ISC"
}

tsconfig.json

{
"extends": "@apify/tsconfig",
"compilerOptions": {
"module": "NodeNext",
"moduleResolution": "NodeNext",
"target": "ES2022",
"outDir": "dist",
"noUnusedLocals": false,
"skipLibCheck": true,
"lib": ["DOM"]
},
"include": [
"./src/**/*"
]
}

.actor/Dockerfile

# Specify the base Docker image. You can read more about
# the available images at https://crawlee.dev/docs/guides/docker-images
# You can also use any other image from Docker Hub.
FROM apify/actor-node:20 AS builder
# Check preinstalled packages
RUN npm ls crawlee apify puppeteer playwright
# Copy just package.json and package-lock.json
# to speed up the build using Docker layer cache.
COPY package*.json ./
# Install all dependencies. Don't audit to speed up the installation.
RUN npm install --include=dev --audit=false
# Next, copy the source files using the user set
# in the base image.
COPY . ./
# Install all dependencies and build the project.
# Don't audit to speed up the installation.
RUN npm run build
# Create final image
FROM apify/actor-node:20
# Check preinstalled packages
RUN npm ls crawlee apify puppeteer playwright
# Copy just package.json and package-lock.json
# to speed up the build using Docker layer cache.
COPY package*.json ./
# Install NPM packages, skip optional and development dependencies to
# keep the image small. Avoid logging too much and print the dependency
# tree for debugging
RUN npm --quiet set progress=false \
&& npm install --omit=dev --omit=optional \
&& echo "Installed NPM packages:" \
&& (npm list --omit=dev --all || true) \
&& echo "Node.js version:" \
&& node --version \
&& echo "NPM version:" \
&& npm --version \
&& rm -r ~/.npm
# Copy built JS files from builder image
COPY --from=builder /usr/src/app/dist ./dist
# Next, copy the remaining files and directories with the source code.
# Since we do this after NPM install, quick build will be really fast
# for most source file changes.
COPY . ./
# Run the image.
CMD npm run start:prod --silent

.actor/actor.json

{
"actorSpecification": 1,
"name": "open-source-actors-scraper",
"title": "Project Cheerio Crawler Typescript",
"description": "Crawlee and Cheerio project in typescript.",
"version": "0.0",
"meta": {
"templateId": "ts-crawlee-cheerio"
},
"input": "./input_schema.json",
"dockerfile": "./Dockerfile"
}

.actor/input_schema.json

{
"title": "CheerioCrawler Template",
"type": "object",
"schemaVersion": 1,
"properties": {
"startUrls": {
"title": "Start URLs",
"type": "array",
"description": "URLs to start with.",
"editor": "requestListSources",
"prefill": [
{
"url": "https://crawlee.dev"
}
]
},
"maxRequestsPerCrawl": {
"title": "Max Requests per Crawl",
"type": "integer",
"description": "Maximum number of requests that can be made by this crawler.",
"default": 100
}
}
}

src/main.ts

1// Apify SDK - toolkit for building Apify Actors (Read more at https://docs.apify.com/sdk/js/)
2import { Actor, log } from 'apify';
3// Crawlee - web scraping and browser automation library (Read more at https://crawlee.dev)
4import { CheerioCrawler } from 'crawlee';
5// this is ESM project, and as such, it requires you to specify extensions in your relative imports
6// read more about this here: https://nodejs.org/docs/latest-v18.x/api/esm.html#mandatory-file-extensions
7// note that we need to use `.js` even when inside TS files
8// import { router } from './routes.js';
9
10// The init() call configures the Actor for its environment. It's recommended to start every Actor with an init()
11await Actor.init();
12
13const proxyConfiguration = await Actor.createProxyConfiguration();
14
15const firstReq = {
16 url: `https://api.apify.com/v2/store?&limit=1000`,
17 label: 'PAGINATION',
18 userData: { offset: 0 },
19};
20
21interface ActorStore {
22 name: string;
23 url: string;
24}
25
26const crawler = new CheerioCrawler({
27 proxyConfiguration,
28 requestHandler: async ({ request, $, json }) => {
29 if (request.label === 'PAGINATION') {
30 const { items } = json.data;
31
32 log.info(`Loaded ${items.length} Actors from page ${request.url}`);
33
34 const toEnqueue = items.map((item: ActorStore) => {
35 return {
36 url: item.url,
37 label: 'DETAIL',
38 userData: { item },
39 };
40 });
41 await crawler.addRequests(toEnqueue);
42
43 if (items.length >= 1000) {
44 const offset = request.userData.offset + 1000;
45 await crawler.addRequests([{
46 url: `https://api.apify.com/v2/store?limit=1000&offset=${offset}`,
47 label: 'PAGINATION',
48 userData: { offset },
49 }]);
50 }
51 } else if (request.label === 'DETAIL') {
52 const { item } = request.userData;
53 const isOpenSource = $('[data-test-url="source-code"]').length > 0;
54
55 if (isOpenSource) {
56 await Actor.pushData({
57 ...item,
58 isOpenSource,
59 });
60 }
61
62 log.info(`${item.name} is open source: ${isOpenSource}`);
63 }
64 },
65});
66
67await crawler.run([firstReq]);
68
69// Gracefully exit the Actor process. It's recommended to quit all Actors with an exit()
70await Actor.exit();