1from apify_client import ApifyClient
2
3
4
5client = ApifyClient("<YOUR_API_TOKEN>")
6
7
8run_input = {
9 "startUrls": [
10 "https://twitter.com/apify",
11 "https://twitter.com/search?q=apify%20&src=typed_query",
12 "https://twitter.com/i/lists/78783491",
13 "https://twitter.com/elonmusk/with_replies",
14 ],
15 "searchTerms": [
16 "web scraping",
17 "scraping from:apify",
18 ],
19 "twitterHandles": [
20 "elonmusk",
21 "taylorswift13",
22 ],
23 "conversationIds": [
24 "1754067365707563045",
25 "1732037140111102460",
26 ],
27 "maxItems": 1000,
28 "sort": "Latest",
29 "tweetLanguage": "en",
30 "author": "apify",
31 "inReplyTo": "webexpo",
32 "mentioning": "elonmusk",
33 "geotaggedNear": "Los Angeles",
34 "withinRadius": "15km",
35 "geocode": "37.7764685,-122.4172004,10km",
36 "placeObjectId": "96683cc9126741d1",
37 "minimumRetweets": 5,
38 "minimumFavorites": 5,
39 "minimumReplies": 5,
40 "start": "2021-07-01",
41 "end": "2021-07-02",
42 "customMapFunction": "(object) => { return {...object} }",
43}
44
45
46run = client.actor("apidojo/tweet-scraper").call(run_input=run_input)
47
48
49print("💾 Check your data here: https://console.apify.com/storage/datasets/" + run["defaultDatasetId"])
50for item in client.dataset(run["defaultDatasetId"]).iterate_items():
51 print(item)
52
53