
S3 Uploader
Pricing
Pay per usage

S3 Uploader
Upload data from an Apify dataset to an Amazon S3 bucket. Providing various filters and transformation options, this Actor allows precise control over data structure, formatting, and upload settings to ensure seamless integration into your data pipeline.
0.0 (0)
Pricing
Pay per usage
0
Monthly users
0
Last modified
6 days ago
You can access the S3 Uploader programmatically from your own applications by using the Apify API. You can also choose the language preference from below. To use the Apify API, you’ll need an Apify account and your API token, found in Integrations settings in Apify Console.
1{
2 "openapi": "3.0.1",
3 "info": {
4 "version": "0.0",
5 "x-build-id": "UkWwyMJI88NAoH8Kx"
6 },
7 "servers": [
8 {
9 "url": "https://api.apify.com/v2"
10 }
11 ],
12 "paths": {
13 "/acts/apify~s3-uploader/run-sync-get-dataset-items": {
14 "post": {
15 "operationId": "run-sync-get-dataset-items-apify-s3-uploader",
16 "x-openai-isConsequential": false,
17 "summary": "Executes an Actor, waits for its completion, and returns Actor's dataset items in response.",
18 "tags": [
19 "Run Actor"
20 ],
21 "requestBody": {
22 "required": true,
23 "content": {
24 "application/json": {
25 "schema": {
26 "$ref": "#/components/schemas/inputSchema"
27 }
28 }
29 }
30 },
31 "parameters": [
32 {
33 "name": "token",
34 "in": "query",
35 "required": true,
36 "schema": {
37 "type": "string"
38 },
39 "description": "Enter your Apify token here"
40 }
41 ],
42 "responses": {
43 "200": {
44 "description": "OK"
45 }
46 }
47 }
48 },
49 "/acts/apify~s3-uploader/runs": {
50 "post": {
51 "operationId": "runs-sync-apify-s3-uploader",
52 "x-openai-isConsequential": false,
53 "summary": "Executes an Actor and returns information about the initiated run in response.",
54 "tags": [
55 "Run Actor"
56 ],
57 "requestBody": {
58 "required": true,
59 "content": {
60 "application/json": {
61 "schema": {
62 "$ref": "#/components/schemas/inputSchema"
63 }
64 }
65 }
66 },
67 "parameters": [
68 {
69 "name": "token",
70 "in": "query",
71 "required": true,
72 "schema": {
73 "type": "string"
74 },
75 "description": "Enter your Apify token here"
76 }
77 ],
78 "responses": {
79 "200": {
80 "description": "OK",
81 "content": {
82 "application/json": {
83 "schema": {
84 "$ref": "#/components/schemas/runsResponseSchema"
85 }
86 }
87 }
88 }
89 }
90 }
91 },
92 "/acts/apify~s3-uploader/run-sync": {
93 "post": {
94 "operationId": "run-sync-apify-s3-uploader",
95 "x-openai-isConsequential": false,
96 "summary": "Executes an Actor, waits for completion, and returns the OUTPUT from Key-value store in response.",
97 "tags": [
98 "Run Actor"
99 ],
100 "requestBody": {
101 "required": true,
102 "content": {
103 "application/json": {
104 "schema": {
105 "$ref": "#/components/schemas/inputSchema"
106 }
107 }
108 }
109 },
110 "parameters": [
111 {
112 "name": "token",
113 "in": "query",
114 "required": true,
115 "schema": {
116 "type": "string"
117 },
118 "description": "Enter your Apify token here"
119 }
120 ],
121 "responses": {
122 "200": {
123 "description": "OK"
124 }
125 }
126 }
127 }
128 },
129 "components": {
130 "schemas": {
131 "inputSchema": {
132 "type": "object",
133 "required": [
134 "accessKeyId",
135 "secretAccessKey",
136 "region",
137 "bucket",
138 "datasetId",
139 "key"
140 ],
141 "properties": {
142 "accessKeyId": {
143 "title": "Access key ID",
144 "type": "string",
145 "description": "Your AWS access key ID used for authorization of the upload. You can get it from *AWS Console* -> *IAM* -> *Users* -> *Create user / Select existing user* -> *Security credentials* -> *Access keys*."
146 },
147 "secretAccessKey": {
148 "title": "Secret access key",
149 "type": "string",
150 "description": "Your AWS secret access key used for authorization of the upload. You can get it from *AWS Console* -> *IAM* -> *Users* -> *Create user / Select existing user* -> *Security credentials* -> *Access keys* -> *Create access key*. The secret access key will be displayed only once, upon creation of the access key."
151 },
152 "region": {
153 "title": "Region",
154 "enum": [
155 "af-south-1",
156 "ap-east-1",
157 "ap-northeast-1",
158 "ap-northeast-2",
159 "ap-northeast-3",
160 "ap-south-1",
161 "ap-south-2",
162 "ap-southeast-1",
163 "ap-southeast-2",
164 "ap-southeast-3",
165 "ap-southeast-4",
166 "ap-southeast-5",
167 "ap-southeast-7",
168 "ca-central-1",
169 "ca-west-1",
170 "cn-north-1",
171 "cn-northwest-1",
172 "eu-central-1",
173 "eu-central-2",
174 "eu-north-1",
175 "eu-south-1",
176 "eu-south-2",
177 "eu-west-1",
178 "eu-west-2",
179 "eu-west-3",
180 "il-central-1",
181 "me-central-1",
182 "me-south-1",
183 "mx-central-1",
184 "sa-east-1",
185 "us-east-1",
186 "us-east-2",
187 "us-gov-east-1",
188 "us-gov-west-1",
189 "us-west-1",
190 "us-west-2"
191 ],
192 "type": "string",
193 "description": "The AWS region where the target S3 bucket is located. You can get it from *AWS Console* -> *S3* -> *Create bucket / Select existing bucket* -> *Properties*."
194 },
195 "bucket": {
196 "title": "Bucket",
197 "type": "string",
198 "description": "The name of the target S3 bucket."
199 },
200 "key": {
201 "title": "Key",
202 "type": "string",
203 "description": "The object key, which serves as an identifier for the uploaded data in the S3 bucket. It can include an optional prefix. If an object with the same key already exists, it will be overwritten with the uploaded data."
204 },
205 "datasetId": {
206 "title": "Dataset ID",
207 "type": "string",
208 "description": "The Apify dataset ID from which data will be retrieved for the upload."
209 },
210 "format": {
211 "title": "Format",
212 "enum": [
213 "json",
214 "jsonl",
215 "html",
216 "csv",
217 "xml",
218 "xlsx",
219 "rss"
220 ],
221 "type": "string",
222 "description": "The format of the uploaded data.",
223 "default": "json"
224 },
225 "fields": {
226 "title": "Select",
227 "type": "array",
228 "description": "Fields to include in the output. If not specified, all fields will be included."
229 },
230 "omit": {
231 "title": "Omit",
232 "type": "array",
233 "description": "Fields to exclude from the output."
234 },
235 "unwind": {
236 "title": "Unwind fields",
237 "type": "array",
238 "description": "Fields to unwind. If the field is an array, every element will become a separate record and merged with the parent object. If the unwound field is an object, it is merged with the parent object. If the unwound field is missing or its value is neither an array nor an object, it cannot be merged with a parent object, and the item gets preserved as is. If you specify multiple fields, they are unwound in the order you specify."
239 },
240 "flatten": {
241 "title": "Flatten fields",
242 "type": "array",
243 "description": "Fields to transform from nested objects into a flat structure."
244 },
245 "offset": {
246 "title": "Offset",
247 "minimum": 0,
248 "type": "integer",
249 "description": "Number of items to skip from the beginning of the dataset."
250 },
251 "limit": {
252 "title": "Limit",
253 "minimum": 1,
254 "type": "integer",
255 "description": "Maximum number of items to upload."
256 },
257 "clean": {
258 "title": "Clean only",
259 "type": "boolean",
260 "description": "If enabled, only clean dataset items and their non-hidden fields will be uploaded.",
261 "default": true
262 }
263 }
264 },
265 "runsResponseSchema": {
266 "type": "object",
267 "properties": {
268 "data": {
269 "type": "object",
270 "properties": {
271 "id": {
272 "type": "string"
273 },
274 "actId": {
275 "type": "string"
276 },
277 "userId": {
278 "type": "string"
279 },
280 "startedAt": {
281 "type": "string",
282 "format": "date-time",
283 "example": "2025-01-08T00:00:00.000Z"
284 },
285 "finishedAt": {
286 "type": "string",
287 "format": "date-time",
288 "example": "2025-01-08T00:00:00.000Z"
289 },
290 "status": {
291 "type": "string",
292 "example": "READY"
293 },
294 "meta": {
295 "type": "object",
296 "properties": {
297 "origin": {
298 "type": "string",
299 "example": "API"
300 },
301 "userAgent": {
302 "type": "string"
303 }
304 }
305 },
306 "stats": {
307 "type": "object",
308 "properties": {
309 "inputBodyLen": {
310 "type": "integer",
311 "example": 2000
312 },
313 "rebootCount": {
314 "type": "integer",
315 "example": 0
316 },
317 "restartCount": {
318 "type": "integer",
319 "example": 0
320 },
321 "resurrectCount": {
322 "type": "integer",
323 "example": 0
324 },
325 "computeUnits": {
326 "type": "integer",
327 "example": 0
328 }
329 }
330 },
331 "options": {
332 "type": "object",
333 "properties": {
334 "build": {
335 "type": "string",
336 "example": "latest"
337 },
338 "timeoutSecs": {
339 "type": "integer",
340 "example": 300
341 },
342 "memoryMbytes": {
343 "type": "integer",
344 "example": 1024
345 },
346 "diskMbytes": {
347 "type": "integer",
348 "example": 2048
349 }
350 }
351 },
352 "buildId": {
353 "type": "string"
354 },
355 "defaultKeyValueStoreId": {
356 "type": "string"
357 },
358 "defaultDatasetId": {
359 "type": "string"
360 },
361 "defaultRequestQueueId": {
362 "type": "string"
363 },
364 "buildNumber": {
365 "type": "string",
366 "example": "1.0.0"
367 },
368 "containerUrl": {
369 "type": "string"
370 },
371 "usage": {
372 "type": "object",
373 "properties": {
374 "ACTOR_COMPUTE_UNITS": {
375 "type": "integer",
376 "example": 0
377 },
378 "DATASET_READS": {
379 "type": "integer",
380 "example": 0
381 },
382 "DATASET_WRITES": {
383 "type": "integer",
384 "example": 0
385 },
386 "KEY_VALUE_STORE_READS": {
387 "type": "integer",
388 "example": 0
389 },
390 "KEY_VALUE_STORE_WRITES": {
391 "type": "integer",
392 "example": 1
393 },
394 "KEY_VALUE_STORE_LISTS": {
395 "type": "integer",
396 "example": 0
397 },
398 "REQUEST_QUEUE_READS": {
399 "type": "integer",
400 "example": 0
401 },
402 "REQUEST_QUEUE_WRITES": {
403 "type": "integer",
404 "example": 0
405 },
406 "DATA_TRANSFER_INTERNAL_GBYTES": {
407 "type": "integer",
408 "example": 0
409 },
410 "DATA_TRANSFER_EXTERNAL_GBYTES": {
411 "type": "integer",
412 "example": 0
413 },
414 "PROXY_RESIDENTIAL_TRANSFER_GBYTES": {
415 "type": "integer",
416 "example": 0
417 },
418 "PROXY_SERPS": {
419 "type": "integer",
420 "example": 0
421 }
422 }
423 },
424 "usageTotalUsd": {
425 "type": "number",
426 "example": 0.00005
427 },
428 "usageUsd": {
429 "type": "object",
430 "properties": {
431 "ACTOR_COMPUTE_UNITS": {
432 "type": "integer",
433 "example": 0
434 },
435 "DATASET_READS": {
436 "type": "integer",
437 "example": 0
438 },
439 "DATASET_WRITES": {
440 "type": "integer",
441 "example": 0
442 },
443 "KEY_VALUE_STORE_READS": {
444 "type": "integer",
445 "example": 0
446 },
447 "KEY_VALUE_STORE_WRITES": {
448 "type": "number",
449 "example": 0.00005
450 },
451 "KEY_VALUE_STORE_LISTS": {
452 "type": "integer",
453 "example": 0
454 },
455 "REQUEST_QUEUE_READS": {
456 "type": "integer",
457 "example": 0
458 },
459 "REQUEST_QUEUE_WRITES": {
460 "type": "integer",
461 "example": 0
462 },
463 "DATA_TRANSFER_INTERNAL_GBYTES": {
464 "type": "integer",
465 "example": 0
466 },
467 "DATA_TRANSFER_EXTERNAL_GBYTES": {
468 "type": "integer",
469 "example": 0
470 },
471 "PROXY_RESIDENTIAL_TRANSFER_GBYTES": {
472 "type": "integer",
473 "example": 0
474 },
475 "PROXY_SERPS": {
476 "type": "integer",
477 "example": 0
478 }
479 }
480 }
481 }
482 }
483 }
484 }
485 }
486 }
487}
S3 Uploader OpenAPI definition
OpenAPI is a standard for designing and describing RESTful APIs, allowing developers to define API structure, endpoints, and data formats in a machine-readable way. It simplifies API development, integration, and documentation.
OpenAPI is effective when used with AI agents and GPTs by standardizing how these systems interact with various APIs, for reliable integrations and efficient communication.
By defining machine-readable API specifications, OpenAPI allows AI models like GPTs to understand and use varied data sources, improving accuracy. This accelerates development, reduces errors, and provides context-aware responses, making OpenAPI a core component for AI applications.
You can download the OpenAPI definitions for S3 Uploader from the options below:
If you’d like to learn more about how OpenAPI powers GPTs, read our blog post.
You can also check out our other API clients:
Pricing
Pricing model
Pay per usageThis Actor is paid per platform usage. The Actor is free to use, and you only pay for the Apify platform usage.