Meta Threads Scraper
Deprecated
Pricing
Pay per usage
Go to Store
Meta Threads Scraper
Deprecated
Scrap a specific thread by input a thread url. Such as "https://www.threads.net/t/CuZsgfWLyiI".
0.0 (0)
Pricing
Pay per usage
1
Total users
24
Monthly users
1
Last modified
2 years ago
.actor/Dockerfile
# Specify the base Docker image. You can read more about# the available images at https://crawlee.dev/docs/guides/docker-images# You can also use any other image from Docker Hub.FROM apify/actor-node-playwright-chrome:16
# Copy just package.json and package-lock.json# to speed up the build using Docker layer cache.COPY package*.json ./
# Install NPM packages, skip optional and development dependencies to# keep the image small. Avoid logging too much and print the dependency# tree for debuggingRUN npm --quiet set progress=false \ && npm install --omit=dev --omit=optional \ && echo "Installed NPM packages:" \ && (npm list --omit=dev --all || true) \ && echo "Node.js version:" \ && node --version \ && echo "NPM version:" \ && npm --version \ && rm -r ~/.npm
# Next, copy the remaining files and directories with the source code.# Since we do this after NPM install, quick build will be really fast# for most source file changes.COPY . ./
# Run the image. If you know you won't need headful browsers,# you can remove the XVFB start script for a micro perf gain.CMD ./start_xvfb_and_run_cmd.sh && npm start --silent
.actor/actor.json
{ "actorSpecification": 1, "name": "my-actor", "title": "Project Playwright Crawler JavaScript", "description": "Crawlee and Playwright project in JavaScript.", "version": "0.0", "meta": { "templateId": "js-crawlee-playwright-chrome" }, "input": "./input_schema.json", "dockerfile": "./Dockerfile", "storages": { "dataset": { "actorSpecification": 1, "views": { "overview": { "title": "Result", "transformation": { "fields": [ "containing_thread", "reply_threads" ] }, "display": { "component": "table", "properties": { "containing_thread": { "label": "containing_thread", "format": "array" }, "reply_threads": { "label": "containing_thread", "format": "array" } } } } } } }}
.actor/input_schema.json
{ "title": "PlaywrightCrawler Template", "type": "object", "schemaVersion": 1, "properties": { "target": { "title": "target Threads URLs", "type": "string", "description": "URLs to crawl", "editor": "textfield", "default": "https://www.threads.net/t/CuZsgfWLyiI" } }}
src/main.js
1/**2 * This template is a production ready boilerplate for developing with `PlaywrightCrawler`.3 * Use this to bootstrap your projects using the most up-to-date code.4 * If you're looking for examples or want to learn more, see README.5 */6
7// For more information, see https://docs.apify.com/sdk/js8import { Actor } from 'apify';9// For more information, see https://crawlee.dev10import { PlaywrightCrawler, RequestQueue } from 'crawlee';11
12// Initialize the Apify SDK13await Actor.init();14const input = await Actor.getInput();15console.log(111, JSON.stringify(input))16
17const requestQueue = await RequestQueue.open();18await requestQueue.addRequest({ url: input.target || '' });19console.log(222)20
21const proxyConfiguration = await Actor.createProxyConfiguration();22const crawler = new PlaywrightCrawler({23 proxyConfiguration,24 requestQueue,25 async requestHandler({ request, page, log }) {26 log.info(`Processing ${request.url}...`);27 const title = await page.title();28 log.info(`${title}`, { url: request.loadedUrl });29 page.on('response', async req => {30 console.log(req.url())31 if (req.url() === 'https://www.threads.net/api/graphql') {32 const data = await req.json()33 log.info(JSON.stringify())34 await Actor.pushData(data.data.data)35 console.log('success, waiting to exit')36 await Actor.exit();37 }38 });39 await page.waitForTimeout(6000)40 },41});42
43await crawler.run();44
45// Exit successfully46await Actor.exit();
src/routes.js
1import { Dataset, createPlaywrightRouter } from 'crawlee';2
3export const router = createPlaywrightRouter();4
5router.addDefaultHandler(async ({ enqueueLinks, log }) => {6 log.info(`enqueueing new URLs`);7 await enqueueLinks({8 globs: ['https://apify.com/*'],9 label: 'detail',10 });11});12
13router.addHandler('detail', async ({ request, page, log }) => {14 const title = await page.title();15 log.info(`${title}`, { url: request.loadedUrl });16
17 await Dataset.pushData({18 url: request.loadedUrl,19 title,20 });21});
.dockerignore
# configurations.idea
# crawlee and apify storage foldersapify_storagecrawlee_storagestorage
# installed filesnode_modules
# git folder.git
.editorconfig
root = true
[*]indent_style = spaceindent_size = 4charset = utf-8trim_trailing_whitespace = trueinsert_final_newline = trueend_of_line = lf
.eslintrc
{ "extends": "@apify", "root": true}
.gitignore
# This file tells Git which files shouldn't be added to source control
.DS_Store.ideadistnode_modulesapify_storagestorage
package.json
{ "name": "crawlee-playwright-javascript", "version": "0.0.1", "type": "module", "description": "This is an example of an Apify actor.", "dependencies": { "apify": "^3.0.0", "crawlee": "^3.0.0", "playwright": "*" }, "devDependencies": { "@apify/eslint-config": "^0.3.1", "eslint": "^8.36.0" }, "scripts": { "start": "node src/main.js", "lint": "eslint ./src --ext .js,.jsx", "lint:fix": "eslint ./src --ext .js,.jsx --fix", "test": "echo \"Error: oops, the actor has no tests yet, sad!\" && exit 1" }, "author": "It's not you it's me", "license": "ISC"}