Ultimate Reddit Profile Scraper avatar

Ultimate Reddit Profile Scraper

Try for free

1 day trial then $25.00/month - No credit card required now

Go to Store
Ultimate Reddit Profile Scraper

Ultimate Reddit Profile Scraper

potatopeeler/reddit-scraper
Try for free

1 day trial then $25.00/month - No credit card required now

Seamlessly download full Reddit user accounts, capturing posts, images, activity, and historical data, including URLs and media comments. Export detailed insights to CSV, JSON, XML, EXCEL formats, or effortlessly import them into your email for comprehensive analysis and easy access.

You can access the Ultimate Reddit Profile Scraper programmatically from your own applications by using the Apify API. You can choose the language preference from below. To use the Apify API, you’ll need an Apify account and your API token, found in Integrations settings in Apify Console.

1{
2  "openapi": "3.0.1",
3  "info": {
4    "version": "1.4",
5    "x-build-id": "seHTzOHRjjU0pQMIW"
6  },
7  "servers": [
8    {
9      "url": "https://api.apify.com/v2"
10    }
11  ],
12  "paths": {
13    "/acts/potatopeeler~reddit-scraper/run-sync-get-dataset-items": {
14      "post": {
15        "operationId": "run-sync-get-dataset-items-potatopeeler-reddit-scraper",
16        "x-openai-isConsequential": false,
17        "summary": "Executes an Actor, waits for its completion, and returns Actor's dataset items in response.",
18        "tags": [
19          "Run Actor"
20        ],
21        "requestBody": {
22          "required": true,
23          "content": {
24            "application/json": {
25              "schema": {
26                "$ref": "#/components/schemas/inputSchema"
27              }
28            }
29          }
30        },
31        "parameters": [
32          {
33            "name": "token",
34            "in": "query",
35            "required": true,
36            "schema": {
37              "type": "string"
38            },
39            "description": "Enter your Apify token here"
40          }
41        ],
42        "responses": {
43          "200": {
44            "description": "OK"
45          }
46        }
47      }
48    },
49    "/acts/potatopeeler~reddit-scraper/runs": {
50      "post": {
51        "operationId": "runs-sync-potatopeeler-reddit-scraper",
52        "x-openai-isConsequential": false,
53        "summary": "Executes an Actor and returns information about the initiated run in response.",
54        "tags": [
55          "Run Actor"
56        ],
57        "requestBody": {
58          "required": true,
59          "content": {
60            "application/json": {
61              "schema": {
62                "$ref": "#/components/schemas/inputSchema"
63              }
64            }
65          }
66        },
67        "parameters": [
68          {
69            "name": "token",
70            "in": "query",
71            "required": true,
72            "schema": {
73              "type": "string"
74            },
75            "description": "Enter your Apify token here"
76          }
77        ],
78        "responses": {
79          "200": {
80            "description": "OK",
81            "content": {
82              "application/json": {
83                "schema": {
84                  "$ref": "#/components/schemas/runsResponseSchema"
85                }
86              }
87            }
88          }
89        }
90      }
91    },
92    "/acts/potatopeeler~reddit-scraper/run-sync": {
93      "post": {
94        "operationId": "run-sync-potatopeeler-reddit-scraper",
95        "x-openai-isConsequential": false,
96        "summary": "Executes an Actor, waits for completion, and returns the OUTPUT from Key-value store in response.",
97        "tags": [
98          "Run Actor"
99        ],
100        "requestBody": {
101          "required": true,
102          "content": {
103            "application/json": {
104              "schema": {
105                "$ref": "#/components/schemas/inputSchema"
106              }
107            }
108          }
109        },
110        "parameters": [
111          {
112            "name": "token",
113            "in": "query",
114            "required": true,
115            "schema": {
116              "type": "string"
117            },
118            "description": "Enter your Apify token here"
119          }
120        ],
121        "responses": {
122          "200": {
123            "description": "OK"
124          }
125        }
126      }
127    }
128  },
129  "components": {
130    "schemas": {
131      "inputSchema": {
132        "type": "object",
133        "required": [
134          "user"
135        ],
136        "properties": {
137          "user": {
138            "title": "Reddit user",
139            "type": "string",
140            "description": "The Reddit users to scrape data from, this input accepts either the username or the user profile URL.",
141            "default": "https://www.reddit.com/user/Mark_Ruffalo"
142          },
143          "scrapeType": {
144            "title": "Download user content type",
145            "enum": [
146              "posts",
147              "comments",
148              "profile"
149            ],
150            "type": "string",
151            "description": "Select the type of data to scrape/download, either posts, comments, or profile information.",
152            "default": "posts"
153          },
154          "filterOptions": {
155            "title": "Filter options",
156            "type": "string",
157            "description": "Advanced settings for the crawler, for time out, retry, and other settings."
158          },
159          "limit": {
160            "title": "max number of posts to retrieve",
161            "enum": [
162              "25",
163              "50",
164              "100",
165              "200",
166              "500",
167              "1000",
168              "2000",
169              "5000",
170              "10000",
171              "all"
172            ],
173            "type": "string",
174            "description": "how many items to retrieve, the crawler will stop after reaching this number, or when there are no more items to retrieve, whichever comes first.",
175            "default": "100"
176          },
177          "sort": {
178            "title": "Sort Options",
179            "enum": [
180              "new",
181              "hot",
182              "top"
183            ],
184            "type": "string",
185            "description": "select the order of the posts to be returned. The 'new' sort option returns the newest posts, 'hot' returns the posts with the most upvotes, and 'top' returns the posts with the highest score.",
186            "default": "new"
187          },
188          "time": {
189            "title": "Time Period",
190            "enum": [
191              "hour",
192              "day",
193              "week",
194              "month",
195              "year",
196              "all"
197            ],
198            "type": "string",
199            "description": "Only applicable for the 'top' sort option. Select the time period for the top sort option, will be ignored for other sort options, returns the top posts from the selected time period.",
200            "default": "day"
201          },
202          "proxy": {
203            "title": "Proxy Settings",
204            "type": "object",
205            "description": "For best results, it's recommend to use residential proxy, the next best is datacenter proxy, and the last option is to use direct connection, or your own proxy.",
206            "default": {
207              "useApifyProxy": true,
208              "apifyProxyGroups": [
209                "RESIDENTIAL"
210              ]
211            }
212          },
213          "advanced": {
214            "title": "Advanced Settings",
215            "type": "string",
216            "description": "Advanced settings for the crawler, for time out, retry, and other settings."
217          },
218          "maxRetries": {
219            "title": "Max retries",
220            "type": "integer",
221            "description": "How many retries until the scraper should give up",
222            "default": 3
223          },
224          "timeout": {
225            "title": "Timeout",
226            "type": "integer",
227            "description": "How long the scraper should wait for a response before giving up in seconds",
228            "default": 10
229          }
230        }
231      },
232      "runsResponseSchema": {
233        "type": "object",
234        "properties": {
235          "data": {
236            "type": "object",
237            "properties": {
238              "id": {
239                "type": "string"
240              },
241              "actId": {
242                "type": "string"
243              },
244              "userId": {
245                "type": "string"
246              },
247              "startedAt": {
248                "type": "string",
249                "format": "date-time",
250                "example": "2025-01-08T00:00:00.000Z"
251              },
252              "finishedAt": {
253                "type": "string",
254                "format": "date-time",
255                "example": "2025-01-08T00:00:00.000Z"
256              },
257              "status": {
258                "type": "string",
259                "example": "READY"
260              },
261              "meta": {
262                "type": "object",
263                "properties": {
264                  "origin": {
265                    "type": "string",
266                    "example": "API"
267                  },
268                  "userAgent": {
269                    "type": "string"
270                  }
271                }
272              },
273              "stats": {
274                "type": "object",
275                "properties": {
276                  "inputBodyLen": {
277                    "type": "integer",
278                    "example": 2000
279                  },
280                  "rebootCount": {
281                    "type": "integer",
282                    "example": 0
283                  },
284                  "restartCount": {
285                    "type": "integer",
286                    "example": 0
287                  },
288                  "resurrectCount": {
289                    "type": "integer",
290                    "example": 0
291                  },
292                  "computeUnits": {
293                    "type": "integer",
294                    "example": 0
295                  }
296                }
297              },
298              "options": {
299                "type": "object",
300                "properties": {
301                  "build": {
302                    "type": "string",
303                    "example": "latest"
304                  },
305                  "timeoutSecs": {
306                    "type": "integer",
307                    "example": 300
308                  },
309                  "memoryMbytes": {
310                    "type": "integer",
311                    "example": 1024
312                  },
313                  "diskMbytes": {
314                    "type": "integer",
315                    "example": 2048
316                  }
317                }
318              },
319              "buildId": {
320                "type": "string"
321              },
322              "defaultKeyValueStoreId": {
323                "type": "string"
324              },
325              "defaultDatasetId": {
326                "type": "string"
327              },
328              "defaultRequestQueueId": {
329                "type": "string"
330              },
331              "buildNumber": {
332                "type": "string",
333                "example": "1.0.0"
334              },
335              "containerUrl": {
336                "type": "string"
337              },
338              "usage": {
339                "type": "object",
340                "properties": {
341                  "ACTOR_COMPUTE_UNITS": {
342                    "type": "integer",
343                    "example": 0
344                  },
345                  "DATASET_READS": {
346                    "type": "integer",
347                    "example": 0
348                  },
349                  "DATASET_WRITES": {
350                    "type": "integer",
351                    "example": 0
352                  },
353                  "KEY_VALUE_STORE_READS": {
354                    "type": "integer",
355                    "example": 0
356                  },
357                  "KEY_VALUE_STORE_WRITES": {
358                    "type": "integer",
359                    "example": 1
360                  },
361                  "KEY_VALUE_STORE_LISTS": {
362                    "type": "integer",
363                    "example": 0
364                  },
365                  "REQUEST_QUEUE_READS": {
366                    "type": "integer",
367                    "example": 0
368                  },
369                  "REQUEST_QUEUE_WRITES": {
370                    "type": "integer",
371                    "example": 0
372                  },
373                  "DATA_TRANSFER_INTERNAL_GBYTES": {
374                    "type": "integer",
375                    "example": 0
376                  },
377                  "DATA_TRANSFER_EXTERNAL_GBYTES": {
378                    "type": "integer",
379                    "example": 0
380                  },
381                  "PROXY_RESIDENTIAL_TRANSFER_GBYTES": {
382                    "type": "integer",
383                    "example": 0
384                  },
385                  "PROXY_SERPS": {
386                    "type": "integer",
387                    "example": 0
388                  }
389                }
390              },
391              "usageTotalUsd": {
392                "type": "number",
393                "example": 0.00005
394              },
395              "usageUsd": {
396                "type": "object",
397                "properties": {
398                  "ACTOR_COMPUTE_UNITS": {
399                    "type": "integer",
400                    "example": 0
401                  },
402                  "DATASET_READS": {
403                    "type": "integer",
404                    "example": 0
405                  },
406                  "DATASET_WRITES": {
407                    "type": "integer",
408                    "example": 0
409                  },
410                  "KEY_VALUE_STORE_READS": {
411                    "type": "integer",
412                    "example": 0
413                  },
414                  "KEY_VALUE_STORE_WRITES": {
415                    "type": "number",
416                    "example": 0.00005
417                  },
418                  "KEY_VALUE_STORE_LISTS": {
419                    "type": "integer",
420                    "example": 0
421                  },
422                  "REQUEST_QUEUE_READS": {
423                    "type": "integer",
424                    "example": 0
425                  },
426                  "REQUEST_QUEUE_WRITES": {
427                    "type": "integer",
428                    "example": 0
429                  },
430                  "DATA_TRANSFER_INTERNAL_GBYTES": {
431                    "type": "integer",
432                    "example": 0
433                  },
434                  "DATA_TRANSFER_EXTERNAL_GBYTES": {
435                    "type": "integer",
436                    "example": 0
437                  },
438                  "PROXY_RESIDENTIAL_TRANSFER_GBYTES": {
439                    "type": "integer",
440                    "example": 0
441                  },
442                  "PROXY_SERPS": {
443                    "type": "integer",
444                    "example": 0
445                  }
446                }
447              }
448            }
449          }
450        }
451      }
452    }
453  }
454}

Ultimate Reddit Profile Scraper OpenAPI definition

OpenAPI is a standard for designing and describing RESTful APIs, allowing developers to define API structure, endpoints, and data formats in a machine-readable way. It simplifies API development, integration, and documentation.

OpenAPI is effective when used with AI agents and GPTs by standardizing how these systems interact with various APIs, for reliable integrations and efficient communication.

By defining machine-readable API specifications, OpenAPI allows AI models like GPTs to understand and use varied data sources, improving accuracy. This accelerates development, reduces errors, and provides context-aware responses, making OpenAPI a core component for AI applications.

You can download the OpenAPI definitions for Ultimate Reddit Profile Scraper from the options below:

If you’d like to learn more about how OpenAPI powers GPTs, read our blog post.

You can also check out our other API clients:

Developer
Maintained by Community

Actor Metrics

  • 17 monthly users

  • 4 stars

  • >99% runs succeeded

  • 10 hours response time

  • Created in Jan 2024

  • Modified 5 months ago

Categories