1import { ApifyClient } from 'apify-client';
2
3
4
5const client = new ApifyClient({
6 token: '<YOUR_API_TOKEN>',
7});
8
9
10const input = {
11 "requests": [
12 {
13 "url": "https://apify.com"
14 }
15 ],
16 "pseudoUrls": [
17 {
18 "purl": "https://apify.com[(/[\\w-]+)?]"
19 }
20 ],
21 "linkSelector": "a[href]",
22 "pageFunction": async function pageFunction(context) {
23 const { window, document, crawler, enqueueRequest, request, response, userData, json, body, kvStore, customData } = context;
24
25 const title = document.querySelector('title').textContent
26
27 const responseHeaders = response.headers
28
29 return {
30 title,
31 responseHeaders
32 };
33 },
34 "preNavigationHooks": `// We need to return array of (possibly async) functions here.
35 // The functions accept two arguments: the "crawlingContext" object
36 // and "requestAsBrowserOptions" which are passed to the `requestAsBrowser()`
37 // function the crawler calls to navigate..
38 [
39 async (crawlingContext, requestAsBrowserOptions) => {
40 // ...
41 }
42 ]`,
43 "postNavigationHooks": `// We need to return array of (possibly async) functions here.
44 // The functions accept a single argument: the "crawlingContext" object.
45 [
46 async (crawlingContext) => {
47 // ...
48 },
49 ]`,
50 "proxy": {
51 "useApifyProxy": true
52 },
53 "additionalMimeTypes": [],
54 "customData": {}
55};
56
57
58const run = await client.actor("mstephen190/vanilla-js-scraper").call(input);
59
60
61console.log('Results from dataset');
62console.log(`💾 Check your data here: https://console.apify.com/storage/datasets/${run.defaultDatasetId}`);
63const { items } = await client.dataset(run.defaultDatasetId).listItems();
64items.forEach((item) => {
65 console.dir(item);
66});
67
68