Twitter/X Video Transcriber & Translator avatar
Twitter/X Video Transcriber & Translator

Pricing

$3.00/month + usage

Go to Store
Twitter/X Video Transcriber & Translator

Twitter/X Video Transcriber & Translator

Developed by

ius iyb

ius iyb

Maintained by Community

Effortlessly convert any public Twitter/X video into accurate text, subtitles, or translations with this powerful OpenAI Whisper API actor.

0.0 (0)

Pricing

$3.00/month + usage

0

Total users

3

Monthly users

3

Runs succeeded

0%

Last modified

17 days ago

You can access the Twitter/X Video Transcriber & Translator programmatically from your own applications by using the Apify API. You can also choose the language preference from below. To use the Apify API, you’ll need an Apify account and your API token, found in Integrations settings in Apify Console.

{
"openapi": "3.0.1",
"info": {
"version": "0.0",
"x-build-id": "6QHLZqjujg3kNmv4M"
},
"servers": [
{
"url": "https://api.apify.com/v2"
}
],
"paths": {
"/acts/linen_snack~twitter-x-video-transcriber-translator/run-sync-get-dataset-items": {
"post": {
"operationId": "run-sync-get-dataset-items-linen_snack-twitter-x-video-transcriber-translator",
"x-openai-isConsequential": false,
"summary": "Executes an Actor, waits for its completion, and returns Actor's dataset items in response.",
"tags": [
"Run Actor"
],
"requestBody": {
"required": true,
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/inputSchema"
}
}
}
},
"parameters": [
{
"name": "token",
"in": "query",
"required": true,
"schema": {
"type": "string"
},
"description": "Enter your Apify token here"
}
],
"responses": {
"200": {
"description": "OK"
}
}
}
},
"/acts/linen_snack~twitter-x-video-transcriber-translator/runs": {
"post": {
"operationId": "runs-sync-linen_snack-twitter-x-video-transcriber-translator",
"x-openai-isConsequential": false,
"summary": "Executes an Actor and returns information about the initiated run in response.",
"tags": [
"Run Actor"
],
"requestBody": {
"required": true,
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/inputSchema"
}
}
}
},
"parameters": [
{
"name": "token",
"in": "query",
"required": true,
"schema": {
"type": "string"
},
"description": "Enter your Apify token here"
}
],
"responses": {
"200": {
"description": "OK",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/runsResponseSchema"
}
}
}
}
}
}
},
"/acts/linen_snack~twitter-x-video-transcriber-translator/run-sync": {
"post": {
"operationId": "run-sync-linen_snack-twitter-x-video-transcriber-translator",
"x-openai-isConsequential": false,
"summary": "Executes an Actor, waits for completion, and returns the OUTPUT from Key-value store in response.",
"tags": [
"Run Actor"
],
"requestBody": {
"required": true,
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/inputSchema"
}
}
}
},
"parameters": [
{
"name": "token",
"in": "query",
"required": true,
"schema": {
"type": "string"
},
"description": "Enter your Apify token here"
}
],
"responses": {
"200": {
"description": "OK"
}
}
}
}
},
"components": {
"schemas": {
"inputSchema": {
"type": "object",
"required": [
"twitterUrl",
"openaiApiKey"
],
"properties": {
"twitterUrl": {
"title": "Twitter/x Video URL",
"type": "string",
"description": "The URL of the Twitter/x video you want to process."
},
"openaiApiKey": {
"title": "OpenAI API Key",
"type": "string",
"description": "Your OpenAI API key. Best practice is to set this as a secret environment variable."
},
"task": {
"title": "Task Type",
"enum": [
"transcription",
"translation"
],
"type": "string",
"description": "Choose whether to transcribe the audio into its original language or translate it into English."
},
"model": {
"title": "Transcription Model",
"enum": [
"whisper-1",
"gpt-4o-transcribe",
"gpt-4o-mini-transcribe"
],
"type": "string",
"description": "Select the model. Note: 'translation' and 'timestamp_granularities' tasks require 'whisper-1'."
},
"language": {
"title": "Language (Optional)",
"type": "string",
"description": "The language of the audio in ISO-639-1 format (e.g., 'en', 'es'). Improves accuracy. Required by some models."
},
"prompt": {
"title": "Prompt (Optional)",
"type": "string",
"description": "A prompt to guide the model's style or to correct specific words and acronyms."
},
"response_format": {
"title": "Response Format",
"enum": [
"json",
"text",
"srt",
"verbose_json",
"vtt"
],
"type": "string",
"description": "The output format. Note: 'gpt-4o-*' models only support 'json' or 'text'. Timestamps require 'verbose_json'."
},
"temperature": {
"title": "Temperature (Optional)",
"type": "string",
"description": "A value between 0 and 1. Higher values (e.g., 0.8) are more random; lower values (e.g., 0.2) are more focused."
},
"timestamp_granularities": {
"title": "Timestamp Granularities (Optional)",
"type": "array",
"description": "Request word or segment-level timestamps. Requires 'whisper-1' model and 'verbose_json' response format.",
"items": {
"type": "string",
"enum": [
"word",
"segment"
]
}
}
}
},
"runsResponseSchema": {
"type": "object",
"properties": {
"data": {
"type": "object",
"properties": {
"id": {
"type": "string"
},
"actId": {
"type": "string"
},
"userId": {
"type": "string"
},
"startedAt": {
"type": "string",
"format": "date-time",
"example": "2025-01-08T00:00:00.000Z"
},
"finishedAt": {
"type": "string",
"format": "date-time",
"example": "2025-01-08T00:00:00.000Z"
},
"status": {
"type": "string",
"example": "READY"
},
"meta": {
"type": "object",
"properties": {
"origin": {
"type": "string",
"example": "API"
},
"userAgent": {
"type": "string"
}
}
},
"stats": {
"type": "object",
"properties": {
"inputBodyLen": {
"type": "integer",
"example": 2000
},
"rebootCount": {
"type": "integer",
"example": 0
},
"restartCount": {
"type": "integer",
"example": 0
},
"resurrectCount": {
"type": "integer",
"example": 0
},
"computeUnits": {
"type": "integer",
"example": 0
}
}
},
"options": {
"type": "object",
"properties": {
"build": {
"type": "string",
"example": "latest"
},
"timeoutSecs": {
"type": "integer",
"example": 300
},
"memoryMbytes": {
"type": "integer",
"example": 1024
},
"diskMbytes": {
"type": "integer",
"example": 2048
}
}
},
"buildId": {
"type": "string"
},
"defaultKeyValueStoreId": {
"type": "string"
},
"defaultDatasetId": {
"type": "string"
},
"defaultRequestQueueId": {
"type": "string"
},
"buildNumber": {
"type": "string",
"example": "1.0.0"
},
"containerUrl": {
"type": "string"
},
"usage": {
"type": "object",
"properties": {
"ACTOR_COMPUTE_UNITS": {
"type": "integer",
"example": 0
},
"DATASET_READS": {
"type": "integer",
"example": 0
},
"DATASET_WRITES": {
"type": "integer",
"example": 0
},
"KEY_VALUE_STORE_READS": {
"type": "integer",
"example": 0
},
"KEY_VALUE_STORE_WRITES": {
"type": "integer",
"example": 1
},
"KEY_VALUE_STORE_LISTS": {
"type": "integer",
"example": 0
},
"REQUEST_QUEUE_READS": {
"type": "integer",
"example": 0
},
"REQUEST_QUEUE_WRITES": {
"type": "integer",
"example": 0
},
"DATA_TRANSFER_INTERNAL_GBYTES": {
"type": "integer",
"example": 0
},
"DATA_TRANSFER_EXTERNAL_GBYTES": {
"type": "integer",
"example": 0
},
"PROXY_RESIDENTIAL_TRANSFER_GBYTES": {
"type": "integer",
"example": 0
},
"PROXY_SERPS": {
"type": "integer",
"example": 0
}
}
},
"usageTotalUsd": {
"type": "number",
"example": 0.00005
},
"usageUsd": {
"type": "object",
"properties": {
"ACTOR_COMPUTE_UNITS": {
"type": "integer",
"example": 0
},
"DATASET_READS": {
"type": "integer",
"example": 0
},
"DATASET_WRITES": {
"type": "integer",
"example": 0
},
"KEY_VALUE_STORE_READS": {
"type": "integer",
"example": 0
},
"KEY_VALUE_STORE_WRITES": {
"type": "number",
"example": 0.00005
},
"KEY_VALUE_STORE_LISTS": {
"type": "integer",
"example": 0
},
"REQUEST_QUEUE_READS": {
"type": "integer",
"example": 0
},
"REQUEST_QUEUE_WRITES": {
"type": "integer",
"example": 0
},
"DATA_TRANSFER_INTERNAL_GBYTES": {
"type": "integer",
"example": 0
},
"DATA_TRANSFER_EXTERNAL_GBYTES": {
"type": "integer",
"example": 0
},
"PROXY_RESIDENTIAL_TRANSFER_GBYTES": {
"type": "integer",
"example": 0
},
"PROXY_SERPS": {
"type": "integer",
"example": 0
}
}
}
}
}
}
}
}
}
}

Twitter/x video transcript using OpenAI Api OpenAPI definition

OpenAPI is a standard for designing and describing RESTful APIs, allowing developers to define API structure, endpoints, and data formats in a machine-readable way. It simplifies API development, integration, and documentation.

OpenAPI is effective when used with AI agents and GPTs by standardizing how these systems interact with various APIs, for reliable integrations and efficient communication.

By defining machine-readable API specifications, OpenAPI allows AI models like GPTs to understand and use varied data sources, improving accuracy. This accelerates development, reduces errors, and provides context-aware responses, making OpenAPI a core component for AI applications.

You can download the OpenAPI definitions for Twitter/X Video Transcriber & Translator from the options below:

If you’d like to learn more about how OpenAPI powers GPTs, read our blog post.

You can also check out our other API clients: