1from apify_client import ApifyClient
2
3
4
5client = ApifyClient("<YOUR_API_TOKEN>")
6
7
8run_input = {
9 "startUrls": [{ "url": "https://docs.apify.com/academy/web-scraping-for-beginners" }],
10 "useSitemaps": False,
11 "respectRobotsTxtFile": True,
12 "crawlerType": "playwright:adaptive",
13 "includeUrlGlobs": [],
14 "excludeUrlGlobs": [],
15 "initialCookies": [],
16 "customHttpHeaders": {},
17 "signHttpRequests": False,
18 "proxyConfiguration": { "useApifyProxy": True },
19 "keepElementsCssSelector": "",
20 "removeElementsCssSelector": """nav, footer, script, style, noscript, svg, img[src^='data:'],
21[role=\"alert\"],
22[role=\"banner\"],
23[role=\"dialog\"],
24[role=\"alertdialog\"],
25[role=\"region\"][aria-label*=\"skip\" i],
26[aria-modal=\"true\"]""",
27 "blockMedia": True,
28 "clickElementsCssSelector": "[aria-expanded=\"false\"]",
29 "storeSkippedUrls": False,
30}
31
32
33run = client.actor("apify/website-content-crawler").call(run_input=run_input)
34
35
36print("๐พ Check your data here: https://console.apify.com/storage/datasets/" + run["defaultDatasetId"])
37for item in client.dataset(run["defaultDatasetId"]).iterate_items():
38 print(item)
39
40