1from apify_client import ApifyClient
2
3
4
5client = ApifyClient("<YOUR_API_TOKEN>")
6
7
8run_input = {
9 "startUrls": [{ "url": "https://docs.apify.com/academy/scraping-basics-javascript" }],
10 "crawlerType": "playwright:adaptive",
11 "includeUrlGlobs": [],
12 "excludeUrlGlobs": [],
13 "useSitemaps": False,
14 "useLlmsTxt": False,
15 "respectRobotsTxtFile": True,
16 "proxyConfiguration": { "useApifyProxy": True },
17 "initialCookies": [],
18 "customHttpHeaders": {},
19 "signHttpRequests": False,
20 "blockMedia": True,
21 "clickElementsCssSelector": "[aria-expanded=\"false\"]",
22 "keepElementsCssSelector": "",
23 "removeElementsCssSelector": """nav, footer, script, style, noscript, svg, img[src^='data:'],
24[role=\"alert\"],
25[role=\"banner\"],
26[role=\"dialog\"],
27[role=\"alertdialog\"],
28[role=\"region\"][aria-label*=\"skip\" i],
29[aria-modal=\"true\"]""",
30 "storeSkippedUrls": False,
31}
32
33
34run = client.actor("apify/website-content-crawler").call(run_input=run_input)
35
36
37print("๐พ Check your data here: https://console.apify.com/storage/datasets/" + run["defaultDatasetId"])
38for item in client.dataset(run["defaultDatasetId"]).iterate_items():
39 print(item)
40
41