Xiaohongshu note detail avatar
Xiaohongshu note detail
Try for free

No credit card required

View all Actors
Xiaohongshu note detail

Xiaohongshu note detail

vivianwei/xiaohongshu-note-detail
Try for free

No credit card required

Can download the title and description of one Xiaohongshu note.

.actor/Dockerfile

1# Specify the base Docker image. You can read more about
2# the available images at https://docs.apify.com/sdk/js/docs/guides/docker-images
3# You can also use any other image from Docker Hub.
4FROM apify/actor-node:18
5
6# Copy just package.json and package-lock.json
7# to speed up the build using Docker layer cache.
8COPY package*.json ./
9
10# Install NPM packages, skip optional and development dependencies to
11# keep the image small. Avoid logging too much and print the dependency
12# tree for debugging
13RUN npm --quiet set progress=false \
14    && npm install --omit=dev --omit=optional \
15    && echo "Installed NPM packages:" \
16    && (npm list --omit=dev --all || true) \
17    && echo "Node.js version:" \
18    && node --version \
19    && echo "NPM version:" \
20    && npm --version \
21    && rm -r ~/.npm
22
23# Next, copy the remaining files and directories with the source code.
24# Since we do this after NPM install, quick build will be really fast
25# for most source file changes.
26COPY . ./
27
28
29# Run the image.
30CMD npm start --silent

.actor/actor.json

1{
2    "actorSpecification": 1,
3    "name": "my-actor-1",
4    "title": "Scrape single page in JavaScript",
5    "description": "Scrape data from single page with provided URL.",
6    "version": "0.0",
7    "meta": {
8        "templateId": "js-start"
9    },
10    "input": "./input_schema.json",
11    "dockerfile": "./Dockerfile"
12}

.actor/input_schema.json

1{
2    "title": "Scrape data from a web page",
3    "type": "object",
4    "schemaVersion": 1,
5    "properties": {
6        "startUrls": {
7            "title": "Start URLs",
8            "type": "array",
9            "description": "URLs to start with.",
10            "editor": "requestListSources",
11            "prefill": [
12                {
13                    "url": "https://apify.com"
14                }
15            ]
16        }
17    },
18    "required": ["startUrls"]
19}

src/main.js

1// Axios - Promise based HTTP client for the browser and node.js (Read more at https://axios-http.com/docs/intro).
2import axios from 'axios';
3// Cheerio - The fast, flexible & elegant library for parsing and manipulating HTML and XML (Read more at https://cheerio.js.org/).
4import * as cheerio from 'cheerio';
5// Apify SDK - toolkit for building Apify Actors (Read more at https://docs.apify.com/sdk/js/).
6import { Actor } from 'apify';
7// this is ESM project, and as such, it requires you to specify extensions in your relative imports
8// read more about this here: https://nodejs.org/docs/latest-v18.x/api/esm.html#mandatory-file-extensions
9// import { router } from './routes.js';
10
11// The init() call configures the Actor for its environment. It's recommended to start every Actor with an init().
12await Actor.init();
13
14// Structure of input is defined in input_schema.json
15const input = await Actor.getInput();
16// const { url } = input;
17const startUrls = input?.startUrls || [{ url: 'https://apify.com' }];
18
19const results=[];
20for(const startUrl of startUrls){
21    // Fetch the HTML content of the page.
22    const response = await axios.get(startUrl.url);
23    // Parse the downloaded HTML with Cheerio to enable data extraction.
24    const $ = cheerio.load(response.data);
25
26    const title = $('#detail-title').text();
27    const description = $('#detail-desc').text();
28
29    // const hashTags = [];
30    // // 使用each方法遍历每个具有相同ID的元素,并将其文本值添加到数组中
31    // $('a').each((index, element) => {
32    //     const hrefValue = $(element).attr('href');
33    //     hashTags.push(hrefValue);
34    //     // const hashTagText = $(element).text();
35    //     // console.log("hashTagText", hashTagText);
36    //     // hashTags.push(hashTagText);
37    // });
38
39    const result = {
40        title: title,
41        description: description,
42        // tags: hashTags,
43    }
44    
45    results.push(result);   
46}
47
48// Save headings to Dataset - a table-like storage.
49await Actor.pushData(results);
50
51// Gracefully exit the Actor process. It's recommended to quit all Actors with an exit().
52await Actor.exit();

.dockerignore

1# configurations
2.idea
3
4# crawlee and apify storage folders
5apify_storage
6crawlee_storage
7storage
8
9# installed files
10node_modules
11
12# git folder
13.git

.gitignore

1# This file tells Git which files shouldn't be added to source control
2.DS_Store
3.idea
4dist
5node_modules
6apify_storage
7storage/*
8!storage/key_value_stores
9storage/key_value_stores/*
10!storage/key_value_stores/default
11storage/key_value_stores/default/*
12!storage/key_value_stores/default/INPUT.json

package.json

1{
2    "name": "js-scrape-single-page",
3    "version": "0.0.1",
4    "type": "module",
5    "description": "This is an example of an Apify actor.",
6    "engines": {
7        "node": ">=18.0.0"
8    },
9    "dependencies": {
10        "apify": "^3.1.10",
11        "axios": "^1.5.0",
12        "cheerio": "^1.0.0-rc.12"
13    },
14    "scripts": {
15        "start": "node ./src/main.js",
16        "test": "echo \"Error: oops, the actor has no tests yet, sad!\" && exit 1"
17    },
18    "author": "It's not you it's me",
19    "license": "ISC"
20}
Developer
Maintained by Community
Actor metrics
  • 4 monthly users
  • 2 stars
  • 100.0% runs succeeded
  • Created in Feb 2024
  • Modified 5 months ago
Categories