1from apify_client import ApifyClient
2
3
4
5client = ApifyClient("<YOUR_API_TOKEN>")
6
7
8run_input = {
9 "websites": [{
10 "topic": "bbc-news",
11 "urls": ["https://www.bbc.com/news"],
12 "patterns": ["**/news/**"],
13 "ignoreUrls": [
14 "**/live/**",
15 "**/weather/**",
16 ],
17 }],
18 "maxRequestsPerCrawl": 100,
19}
20
21
22run = client.actor("universal_scraping/universal-article-scraper").call(run_input=run_input)
23
24
25print("💾 Check your data here: https://console.apify.com/storage/datasets/" + run["defaultDatasetId"])
26for item in client.dataset(run["defaultDatasetId"]).iterate_items():
27 print(item)
28
29