1from apify_client import ApifyClient
2
3
4
5client = ApifyClient("<YOUR_API_TOKEN>")
6
7
8run_input = {
9 "startUrls": [{ "url": "https://crawlee.dev" }],
10 "maxCrawlingDepth": 1,
11 "maxRequestsPerCrawl": 1,
12 "requestTimeout": 30,
13 "linkSelector": "a[href]",
14 "linkPatterns": [".*crawlee\\.dev.*"],
15 "pageFunction": """from typing import Any
16from crawlee.crawlers import PlaywrightCrawlingContext
17
18async def page_function(context: PlaywrightCrawlingContext) -> Any:
19 url = context.request.url
20 title = await context.page.locator(\"title\").first.inner_text()
21 return {'url': url, 'title': title}
22""",
23 "proxyConfiguration": { "useApifyProxy": True },
24}
25
26
27run = client.actor("josef.prochazka/camoufox-scraper").call(run_input=run_input)
28
29
30print("💾 Check your data here: https://console.apify.com/storage/datasets/" + run["defaultDatasetId"])
31for item in client.dataset(run["defaultDatasetId"]).iterate_items():
32 print(item)
33
34