
Arab chat عربي شات
Pricing
Pay per usage
Go to Store

Arab chat عربي شات
Chat private arab
0.0 (0)
Pricing
Pay per usage
1
Total users
1
Monthly users
1
Runs succeeded
>99%
Last modified
9 months ago
Pricing
Pay per usage
Chat private arab
0.0 (0)
Pricing
Pay per usage
1
Total users
1
Monthly users
1
Runs succeeded
>99%
Last modified
9 months ago
# Specify the base Docker image. You can read more about# the available images at https://crawlee.dev/docs/guides/docker-images# You can also use any other image from Docker Hub.FROM apify/actor-node-puppeteer-chrome:20 AS builder
# Copy just package.json and package-lock.json# to speed up the build using Docker layer cache.COPY package*.json ./
# Install all dependencies. Don't audit to speed up the installation.RUN npm install --include=dev --audit=false
# Next, copy the source files using the user set# in the base image.COPY . ./
# Install all dependencies and build the project.# Don't audit to speed up the installation.RUN npm run build
# Create final imageFROM apify/actor-node-puppeteer-chrome:20
# Copy just package.json and package-lock.json# to speed up the build using Docker layer cache.COPY package*.json ./
# Install NPM packages, skip optional and development dependencies to# keep the image small. Avoid logging too much and print the dependency# tree for debuggingRUN npm --quiet set progress=false \ && npm install --omit=dev --omit=optional \ && echo "Installed NPM packages:" \ && (npm list --omit=dev --all || true) \ && echo "Node.js version:" \ && node --version \ && echo "NPM version:" \ && npm --version \ && rm -r ~/.npm
# Copy built JS files from builder imageCOPY /home/myuser/dist ./dist
# Next, copy the remaining files and directories with the source code.# Since we do this after NPM install, quick build will be really fast# for most source file changes.COPY . ./
# Run the image. If you know you won't need headful browsers,# you can remove the XVFB start script for a micro perf gain.CMD ./start_xvfb_and_run_cmd.sh && npm run start:prod --silent
{ "actorSpecification": 1, "name": "my-actor", "title": "Project Puppeteer Crawler Typescript", "description": "Crawlee and Puppeteer project in typescript.", "version": "0.0", "meta": { "templateId": "ts-crawlee-puppeteer-chrome" }, "input": "./input_schema.json", "dockerfile": "./Dockerfile"}
{ "title": "PlaywrightCrawler Template", "type": "object", "schemaVersion": 1, "properties": { "startUrls": { "title": "Start URLs", "type": "array", "description": "URLs to start with.", "editor": "requestListSources", "prefill": [ { "url": "https://apify.com" } ] } }}
1// Apify SDK - toolkit for building Apify Actors (Read more at https://docs.apify.com/sdk/js/).2import { Actor } from 'apify';3// Web scraping and browser automation library (Read more at https://crawlee.dev)4import { PuppeteerCrawler, Request } from 'crawlee';5import { router } from './routes.js';6
7// The init() call configures the Actor for its environment. It's recommended to start every Actor with an init().8await Actor.init();9
10interface Input {11 startUrls: Request[];12}13// Define the URLs to start the crawler with - get them from the input of the Actor or use a default list.14const {15 startUrls = ['https://crawlee.dev'],16} = await Actor.getInput<Input>() ?? {};17
18// Create a proxy configuration that will rotate proxies from Apify Proxy.19const proxyConfiguration = await Actor.createProxyConfiguration();20
21// Create a PuppeteerCrawler that will use the proxy configuration and and handle requests with the router from routes.js file.22const crawler = new PuppeteerCrawler({23 proxyConfiguration,24 requestHandler: router,25});26
27// Run the crawler with the start URLs and wait for it to finish.28await crawler.run(startUrls);29
30// Gracefully exit the Actor process. It's recommended to quit all Actors with an exit().31await Actor.exit();
1import { Dataset, createPuppeteerRouter } from 'crawlee';2
3export const router = createPuppeteerRouter();4
5router.addDefaultHandler(async ({ enqueueLinks, log }) => {6 log.info(`enqueueing new URLs`);7 await enqueueLinks({8 globs: ['https://apify.com/*'],9 label: 'detail',10 });11});12
13router.addHandler('detail', async ({ request, page, log }) => {14 const title = await page.title();15 log.info(`${title}`, { url: request.loadedUrl });16
17 await Dataset.pushData({18 url: request.loadedUrl,19 title,20 });21});
# configurations.idea
# crawlee and apify storage foldersapify_storagecrawlee_storagestorage
# installed filesnode_modules
# git folder.git
root = true
[*]indent_style = spaceindent_size = 4charset = utf-8trim_trailing_whitespace = trueinsert_final_newline = trueend_of_line = lf
{ "root": true, "env": { "browser": true, "es2020": true, "node": true }, "extends": [ "@apify/eslint-config-ts" ], "parserOptions": { "project": "./tsconfig.json", "ecmaVersion": 2020 }, "ignorePatterns": [ "node_modules", "dist", "**/*.d.ts" ]}
# This file tells Git which files shouldn't be added to source control
.DS_Store.ideadistnode_modulesapify_storagestorage
{ "name": "crawlee-puppeteer-typescript", "version": "0.0.1", "type": "module", "description": "This is an example of an Apify actor.", "engines": { "node": ">=18.0.0" }, "dependencies": { "apify": "^3.1.10", "crawlee": "^3.5.4", "puppeteer": "*" }, "devDependencies": { "@apify/eslint-config-ts": "^0.3.0", "@apify/tsconfig": "^0.1.0", "@typescript-eslint/eslint-plugin": "^6.7.2", "@typescript-eslint/parser": "^6.7.2", "eslint": "^8.50.0", "tsx": "^4.6.2", "typescript": "^5.3.3" }, "scripts": { "start": "npm run start:dev", "start:prod": "node dist/main.js", "start:dev": "tsx src/main.ts", "build": "tsc", "lint": "eslint ./src --ext .ts", "lint:fix": "eslint ./src --ext .ts --fix", "test": "echo \"Error: oops, the actor has no tests yet, sad!\" && exit 1" }, "author": "It's not you it's me", "license": "ISC"}
{ "extends": "@apify/tsconfig", "compilerOptions": { "module": "NodeNext", "moduleResolution": "NodeNext", "target": "ES2022", "outDir": "dist", "noUnusedLocals": false, "skipLibCheck": true, "lib": ["DOM"] }, "include": [ "./src/**/*" ]}