1from apify_client import ApifyClient
2
3
4
5client = ApifyClient("<YOUR_API_TOKEN>")
6
7
8run_input = {
9 "startUrls": [
10 "https://www.newsweek.com/ron-desantis-abortion-six-week-bill-2024-1794367",
11 "https://www.newsweek.com/topic/climate-change",
12 "https://www.newsweek.com/authors/jon-jackson",
13 "https://www.newsweek.com/education",
14 ],
15 "search": "donald trump",
16 "maxItems": 20,
17 "endPage": 1,
18 "extendOutputFunction": "($) => { return {} }",
19 "customMapFunction": "(object) => { return {...object} }",
20 "proxy": { "useApifyProxy": True },
21}
22
23
24run = client.actor("epctex/newsweek-scraper").call(run_input=run_input)
25
26
27print("💾 Check your data here: https://console.apify.com/storage/datasets/" + run["defaultDatasetId"])
28for item in client.dataset(run["defaultDatasetId"]).iterate_items():
29 print(item)
30
31