1from apify_client import ApifyClient
2
3
4
5client = ApifyClient("<YOUR_API_TOKEN>")
6
7
8run_input = {
9 "startUrls": [
10 "https://x.com/KMbappe",
11 "https://x.com/search?q=real%20madrid&src=typed_query",
12 "https://x.com/realmadrid/status/1877491794000412879",
13 ],
14 "searchTerms": [
15 "web scraping",
16 "scraping from:apify",
17 ],
18 "twitterHandles": [
19 "elonmusk",
20 "taylorswift13",
21 ],
22 "conversationIds": [
23 "1754067365707563045",
24 "1732037140111102460",
25 ],
26 "maxItems": 1000,
27 "sort": "Latest",
28 "tweetLanguage": "en",
29 "author": "apify",
30 "inReplyTo": "webexpo",
31 "mentioning": "elonmusk",
32 "geotaggedNear": "Los Angeles",
33 "withinRadius": "15km",
34 "geocode": "37.7764685,-122.4172004,10km",
35 "placeObjectId": "96683cc9126741d1",
36 "minimumRetweets": 5,
37 "minimumFavorites": 5,
38 "minimumReplies": 5,
39 "start": "2021-07-01",
40 "end": "2021-07-02",
41 "customMapFunction": "(object) => { return {...object} }",
42}
43
44
45run = client.actor("epctex/twitter-scraper").call(run_input=run_input)
46
47
48print("💾 Check your data here: https://console.apify.com/storage/datasets/" + run["defaultDatasetId"])
49for item in client.dataset(run["defaultDatasetId"]).iterate_items():
50 print(item)
51
52