1from apify_client import ApifyClient
2
3
4
5client = ApifyClient("<YOUR_API_TOKEN>")
6
7
8run_input = { "code": """// The function accepts a single argument: the \"context\" object.
9// For a complete list of its properties and functions,
10// see https://apify.com/apify/web-scraper#page-function
11async function pageFunction(context) {
12 // This statement works as a breakpoint when you're trying to debug your code. Works only with Run mode: DEVELOPMENT!
13 // debugger;
14
15 // jQuery is handy for finding DOM elements and extracting data from them.
16 // To use it, make sure to enable the \"Inject jQuery\" option.
17 const $ = context.jQuery;
18 const pageTitle = $('title').first().text();
19
20 // Print some information to actor log
21 context.log.info(`URL: ${context.request.url}, TITLE: ${pageTitle}`);
22
23 // Manually add a new page to the queue for scraping.
24 context.enqueueRequest({ url: 'http://www.example.com' });
25
26 // Return an object with the data extracted from the page.
27 // It will be stored to the resulting dataset.
28 return {
29 url: context.request.url,
30 pageTitle
31 };
32}""" }
33
34
35run = client.actor("drobnikj/js-code-2-flowchart").call(run_input=run_input)
36
37
38print("💾 Check your data here: https://console.apify.com/storage/datasets/" + run["defaultDatasetId"])
39for item in client.dataset(run["defaultDatasetId"]).iterate_items():
40 print(item)
41
42