LD+JSON Schema scraper
Try for free
No credit card required
Go to Store
LD+JSON Schema scraper
pocesar/json-ld-schema
Try for free
No credit card required
Extract all LD+JSON tags from the given URLs.
.editorconfig
1root = true
2
3[*]
4indent_style = space
5indent_size = 4
6charset = utf-8
7trim_trailing_whitespace = true
8insert_final_newline = true
9end_of_line = lf
.eslintrc
1{
2 "extends": "@apify"
3}
.gitignore
1# This file tells Git which files shouldn't be added to source control
2
3.idea
4node_modules
Dockerfile
1# First, specify the base Docker image. You can read more about
2# the available images at https://sdk.apify.com/docs/guides/docker-images
3# You can also use any other image from Docker Hub.
4FROM apify/actor-node:16
5
6# Second, copy just package.json and package-lock.json since it should be
7# the only file that affects "npm install" in the next step, to speed up the build
8COPY package*.json ./
9
10# Install NPM packages, skip optional and development dependencies to
11# keep the image small. Avoid logging too much and print the dependency
12# tree for debugging
13RUN npm --quiet set progress=false \
14 && npm install --only=prod --no-optional \
15 && echo "Installed NPM packages:" \
16 && (npm list --only=prod --no-optional --all || true) \
17 && echo "Node.js version:" \
18 && node --version \
19 && echo "NPM version:" \
20 && npm --version
21
22# Next, copy the remaining files and directories with the source code.
23# Since we do this after NPM install, quick build will be really fast
24# for most source file changes.
25COPY . ./
26
27# Optionally, specify how to launch the source code of your actor.
28# By default, Apify's base Docker images define the CMD instruction
29# that runs the Node.js source code using the command specified
30# in the "scripts.start" section of the package.json file.
31# In short, the instruction looks something like this:
32#
33# CMD npm start
INPUT_SCHEMA.json
1{
2 "title": "LD JSON Schema scraper",
3 "type": "object",
4 "schemaVersion": 1,
5 "properties": {
6 "startUrls": {
7 "title": "Start Urls",
8 "type": "array",
9 "description": "The URLs to extract all LD+JSON data",
10 "default": [],
11 "prefill": [{
12 "url": "https://blog.apify.com/"
13 }],
14 "editor": "requestListSources"
15 },
16 "proxyConfiguration": {
17 "title": "Proxy configuration",
18 "description": "A proxy required for scraping",
19 "type": "object",
20 "default": { "useApifyProxy": true },
21 "prefill": { "useApifyProxy": true },
22 "editor": "proxy"
23 },
24 "customData": {
25 "title": "Custom data",
26 "description": "Provide some custom data to output",
27 "type": "object",
28 "default": {},
29 "prefill": {},
30 "editor": "json"
31 }
32 },
33 "required": [
34 "startUrls",
35 "proxyConfiguration"
36 ]
37}
apify.json
1{
2 "env": { "npm_config_loglevel": "silent" }
3}
main.js
1const Apify = require('apify');
2
3const pageFunction = async (context) => {
4 const { request, $, log, customData } = context;
5
6 const { url } = request;
7
8 const lds = $('script[type="application/ld+json"]');
9
10 if (!lds.length) {
11 log.warning('No LD+JSON found on page', { url });
12 return {
13 data: {},
14 url,
15 customData,
16 };
17 }
18
19 return lds
20 .map((_, el) => $(el).html().trim())
21 .get()
22 .map((html) => {
23 try {
24 return JSON.parse(html);
25 } catch (e) {
26 log.exception(e, 'Invalid JSON', { url });
27 }
28 })
29 .filter(Boolean)
30 .map((data) => {
31 return {
32 data,
33 url,
34 customData,
35 }
36 });
37};
38
39Apify.main(async () => {
40 const { proxyConfiguration, startUrls, customData } = await Apify.getInput();
41
42 if (!proxyConfiguration) {
43 throw new Error('You require a proxy to run');
44 }
45
46 // test proxy
47 const proxy = await Apify.createProxyConfiguration(proxyConfiguration);
48
49 if (!proxy) {
50 throw new Error('Invalid proxy configuration');
51 }
52
53 if (!startUrls?.length) {
54 throw new Error('Provide a RequestList sources array on "startUrls" input');
55 }
56
57 await Apify.metamorph('apify/cheerio-scraper', {
58 startUrls,
59 pageFunction: pageFunction.toString(),
60 proxyConfiguration,
61 customData,
62 ignoreSslErrors: true,
63 });
64});
package.json
1{
2 "name": "project-empty",
3 "version": "0.0.1",
4 "description": "This is a boilerplate of an Apify actor.",
5 "dependencies": {
6 "apify": "^2.2.1"
7 },
8 "scripts": {
9 "start": "node main.js",
10 "lint": "./node_modules/.bin/eslint ./src --ext .js,.jsx",
11 "lint:fix": "./node_modules/.bin/eslint ./src --ext .js,.jsx --fix",
12 "test": "echo \"Error: oops, the actor has no tests yet, sad!\" && exit 1"
13 },
14 "author": "It's not you it's me",
15 "license": "ISC"
16}
Developer
Maintained by Community
Actor Metrics
13 monthly users
-
5 stars
>99% runs succeeded
Created in Feb 2022
Modified 3 years ago
Categories