
Chroma Integration
No credit card required

Chroma Integration
No credit card required
This integration transfers data from Apify Actors to a Chroma and is a good starting point for a question-answering, search, or RAG use case.
You can access the Chroma Integration programmatically from your own applications by using the Apify API. You can choose the language preference from below. To use the Apify API, you’ll need an Apify account and your API token, found in Integrations settings in Apify Console.
1{
2 "openapi": "3.0.1",
3 "info": {
4 "version": "0.0",
5 "x-build-id": "Pghqfo4R51eB6OUdV"
6 },
7 "servers": [
8 {
9 "url": "https://api.apify.com/v2"
10 }
11 ],
12 "paths": {
13 "/acts/apify~chroma-integration/run-sync-get-dataset-items": {
14 "post": {
15 "operationId": "run-sync-get-dataset-items-apify-chroma-integration",
16 "x-openai-isConsequential": false,
17 "summary": "Executes an Actor, waits for its completion, and returns Actor's dataset items in response.",
18 "tags": [
19 "Run Actor"
20 ],
21 "requestBody": {
22 "required": true,
23 "content": {
24 "application/json": {
25 "schema": {
26 "$ref": "#/components/schemas/inputSchema"
27 }
28 }
29 }
30 },
31 "parameters": [
32 {
33 "name": "token",
34 "in": "query",
35 "required": true,
36 "schema": {
37 "type": "string"
38 },
39 "description": "Enter your Apify token here"
40 }
41 ],
42 "responses": {
43 "200": {
44 "description": "OK"
45 }
46 }
47 }
48 },
49 "/acts/apify~chroma-integration/runs": {
50 "post": {
51 "operationId": "runs-sync-apify-chroma-integration",
52 "x-openai-isConsequential": false,
53 "summary": "Executes an Actor and returns information about the initiated run in response.",
54 "tags": [
55 "Run Actor"
56 ],
57 "requestBody": {
58 "required": true,
59 "content": {
60 "application/json": {
61 "schema": {
62 "$ref": "#/components/schemas/inputSchema"
63 }
64 }
65 }
66 },
67 "parameters": [
68 {
69 "name": "token",
70 "in": "query",
71 "required": true,
72 "schema": {
73 "type": "string"
74 },
75 "description": "Enter your Apify token here"
76 }
77 ],
78 "responses": {
79 "200": {
80 "description": "OK",
81 "content": {
82 "application/json": {
83 "schema": {
84 "$ref": "#/components/schemas/runsResponseSchema"
85 }
86 }
87 }
88 }
89 }
90 }
91 },
92 "/acts/apify~chroma-integration/run-sync": {
93 "post": {
94 "operationId": "run-sync-apify-chroma-integration",
95 "x-openai-isConsequential": false,
96 "summary": "Executes an Actor, waits for completion, and returns the OUTPUT from Key-value store in response.",
97 "tags": [
98 "Run Actor"
99 ],
100 "requestBody": {
101 "required": true,
102 "content": {
103 "application/json": {
104 "schema": {
105 "$ref": "#/components/schemas/inputSchema"
106 }
107 }
108 }
109 },
110 "parameters": [
111 {
112 "name": "token",
113 "in": "query",
114 "required": true,
115 "schema": {
116 "type": "string"
117 },
118 "description": "Enter your Apify token here"
119 }
120 ],
121 "responses": {
122 "200": {
123 "description": "OK"
124 }
125 }
126 }
127 }
128 },
129 "components": {
130 "schemas": {
131 "inputSchema": {
132 "type": "object",
133 "required": [
134 "chromaClientHost",
135 "embeddingsProvider",
136 "embeddingsApiKey",
137 "datasetFields"
138 ],
139 "properties": {
140 "chromaCollectionName": {
141 "title": "Chroma collection name",
142 "type": "string",
143 "description": "Name of the chroma collection where the data will be stored",
144 "default": "chroma"
145 },
146 "chromaClientHost": {
147 "title": "Chroma host",
148 "type": "string",
149 "description": "Host argument for Chroma HTTP Client"
150 },
151 "chromaClientPort": {
152 "title": "Chroma port",
153 "type": "integer",
154 "description": "Port argument for Chroma HTTP Client",
155 "default": 8000
156 },
157 "chromaClientSsl": {
158 "title": "Chroma SSL enabled",
159 "type": "boolean",
160 "description": "Enable/Disable SSL",
161 "default": false
162 },
163 "chromaServerAuthCredentials": {
164 "title": "Chroma server Auth Static API token credentials",
165 "type": "string",
166 "description": "Chroma server Auth Static API token."
167 },
168 "chromaClientAuthProvider": {
169 "title": "Chroma client auth provider",
170 "type": "string",
171 "description": "Chroma client auth provider",
172 "default": "chromadb.auth.token_authn.TokenAuthClientProvider"
173 },
174 "embeddingsProvider": {
175 "title": "Embeddings provider (as defined in the langchain API)",
176 "enum": [
177 "OpenAI",
178 "Cohere"
179 ],
180 "type": "string",
181 "description": "Choose the embeddings provider to use for generating embeddings",
182 "default": "OpenAI"
183 },
184 "embeddingsConfig": {
185 "title": "Configuration for embeddings provider",
186 "type": "object",
187 "description": "Configure the parameters for the LangChain embedding class. Key points to consider:\n\n1. Typically, you only need to specify the model name. For example, for OpenAI, set the model name as {\"model\": \"text-embedding-3-small\"}.\n\n2. It's crucial to ensure that the vector size of your embeddings matches the size of embeddings in the database.\n\n3. Here are some examples of embedding models:\n - [OpenAI](https://platform.openai.com/docs/guides/embeddings): `text-embedding-3-small`, `text-embedding-3-large`, etc.\n - [Cohere](https://docs.cohere.com/docs/cohere-embed): `embed-english-v3.0`, `embed-multilingual-light-v3.0`, etc.\n\n4. For more details about other parameters, refer to the [LangChain documentation](https://python.langchain.com/v0.2/docs/integrations/text_embedding/)."
188 },
189 "embeddingsApiKey": {
190 "title": "Embeddings API KEY (whenever applicable, depends on provider)",
191 "type": "string",
192 "description": "Value of the API KEY for the embeddings provider (if required).\n\n For example for OpenAI it is OPENAI_API_KEY, for Cohere it is COHERE_API_KEY)"
193 },
194 "datasetFields": {
195 "title": "Dataset fields to select from the dataset results and store in the database",
196 "type": "array",
197 "description": "This array specifies the dataset fields to be selected and stored in the vector store. Only the fields listed here will be included in the vector store.\n\nFor instance, when using the Website Content Crawler, you might choose to include fields such as `text`, `url`, and `metadata.title` in the vector store.",
198 "default": [
199 "text"
200 ],
201 "items": {
202 "type": "string"
203 }
204 },
205 "metadataDatasetFields": {
206 "title": "Dataset fields to select from the dataset and store as metadata in the database",
207 "type": "object",
208 "description": "A list of dataset fields which should be selected from the dataset and stored as metadata in the vector stores.\n\nFor example, when using the Website Content Crawler, you might want to store `url` in metadata. In this case, use `metadataDatasetFields parameter as follows {\"url\": \"url\"}`"
209 },
210 "metadataObject": {
211 "title": "Custom object to be stored as metadata in the vector store database",
212 "type": "object",
213 "description": "This object allows you to store custom metadata for every item in the vector store.\n\nFor example, if you want to store the `domain` as metadata, use the `metadataObject` like this: {\"domain\": \"apify.com\"}."
214 },
215 "datasetId": {
216 "title": "Dataset ID",
217 "type": "string",
218 "description": "Dataset ID (when running standalone without integration)"
219 },
220 "enableDeltaUpdates": {
221 "title": "Enable incremental updates for objects based on deltas",
222 "type": "boolean",
223 "description": "When set to true, this setting enables incremental updates for objects in the database by comparing the changes (deltas) between the crawled dataset items and the existing objects, uniquely identified by the `datasetKeysToItemId` field.\n\n The integration will only add new objects and update those that have changed, reducing unnecessary updates. The `datasetFields`, `metadataDatasetFields`, and `metadataObject` fields are used to determine the changes.",
224 "default": true
225 },
226 "deltaUpdatesPrimaryDatasetFields": {
227 "title": "Dataset fields to uniquely identify dataset items (only relevant when `enableDeltaUpdates` is enabled)",
228 "type": "array",
229 "description": "This array contains fields that are used to uniquely identify dataset items, which helps to handle content changes across different runs.\n\nFor instance, in a web content crawling scenario, the `url` field could serve as a unique identifier for each item.",
230 "default": [
231 "url"
232 ],
233 "items": {
234 "type": "string"
235 }
236 },
237 "deleteExpiredObjects": {
238 "title": "Delete expired objects from the database",
239 "type": "boolean",
240 "description": "When set to true, delete objects from the database that have not been crawled for a specified period.",
241 "default": true
242 },
243 "expiredObjectDeletionPeriodDays": {
244 "title": "Delete expired objects from the database after a specified number of days",
245 "minimum": 0,
246 "type": "integer",
247 "description": "This setting allows the integration to manage the deletion of objects from the database that have not been crawled for a specified period. It is typically used in subsequent runs after the initial crawl.\n\nWhen the value is greater than 0, the integration checks if objects have been seen within the last X days (determined by the expiration period). If the objects are expired, they are deleted from the database. The specific value for `deletedExpiredObjectsDays` depends on your use case and how frequently you crawl data.\n\nFor example, if you crawl data daily, you can set `deletedExpiredObjectsDays` to 7 days. If you crawl data weekly, you can set `deletedExpiredObjectsDays` to 30 days.",
248 "default": 30
249 },
250 "performChunking": {
251 "title": "Enable text chunking",
252 "type": "boolean",
253 "description": "When set to true, the text will be divided into smaller chunks based on the settings provided below. Proper chunking helps optimize retrieval and ensures accurate and efficient responses.",
254 "default": true
255 },
256 "chunkSize": {
257 "title": "Maximum chunk size",
258 "minimum": 1,
259 "type": "integer",
260 "description": "Defines the maximum number of characters in each text chunk. Choosing the right size balances between detailed context and system performance. Optimal sizes ensure high relevancy and minimal response time.",
261 "default": 2000
262 },
263 "chunkOverlap": {
264 "title": "Chunk overlap",
265 "minimum": 0,
266 "type": "integer",
267 "description": "Specifies the number of overlapping characters between consecutive text chunks. Adjusting this helps maintain context across chunks, which is crucial for accuracy in retrieval-augmented generation systems.",
268 "default": 0
269 }
270 }
271 },
272 "runsResponseSchema": {
273 "type": "object",
274 "properties": {
275 "data": {
276 "type": "object",
277 "properties": {
278 "id": {
279 "type": "string"
280 },
281 "actId": {
282 "type": "string"
283 },
284 "userId": {
285 "type": "string"
286 },
287 "startedAt": {
288 "type": "string",
289 "format": "date-time",
290 "example": "2025-01-08T00:00:00.000Z"
291 },
292 "finishedAt": {
293 "type": "string",
294 "format": "date-time",
295 "example": "2025-01-08T00:00:00.000Z"
296 },
297 "status": {
298 "type": "string",
299 "example": "READY"
300 },
301 "meta": {
302 "type": "object",
303 "properties": {
304 "origin": {
305 "type": "string",
306 "example": "API"
307 },
308 "userAgent": {
309 "type": "string"
310 }
311 }
312 },
313 "stats": {
314 "type": "object",
315 "properties": {
316 "inputBodyLen": {
317 "type": "integer",
318 "example": 2000
319 },
320 "rebootCount": {
321 "type": "integer",
322 "example": 0
323 },
324 "restartCount": {
325 "type": "integer",
326 "example": 0
327 },
328 "resurrectCount": {
329 "type": "integer",
330 "example": 0
331 },
332 "computeUnits": {
333 "type": "integer",
334 "example": 0
335 }
336 }
337 },
338 "options": {
339 "type": "object",
340 "properties": {
341 "build": {
342 "type": "string",
343 "example": "latest"
344 },
345 "timeoutSecs": {
346 "type": "integer",
347 "example": 300
348 },
349 "memoryMbytes": {
350 "type": "integer",
351 "example": 1024
352 },
353 "diskMbytes": {
354 "type": "integer",
355 "example": 2048
356 }
357 }
358 },
359 "buildId": {
360 "type": "string"
361 },
362 "defaultKeyValueStoreId": {
363 "type": "string"
364 },
365 "defaultDatasetId": {
366 "type": "string"
367 },
368 "defaultRequestQueueId": {
369 "type": "string"
370 },
371 "buildNumber": {
372 "type": "string",
373 "example": "1.0.0"
374 },
375 "containerUrl": {
376 "type": "string"
377 },
378 "usage": {
379 "type": "object",
380 "properties": {
381 "ACTOR_COMPUTE_UNITS": {
382 "type": "integer",
383 "example": 0
384 },
385 "DATASET_READS": {
386 "type": "integer",
387 "example": 0
388 },
389 "DATASET_WRITES": {
390 "type": "integer",
391 "example": 0
392 },
393 "KEY_VALUE_STORE_READS": {
394 "type": "integer",
395 "example": 0
396 },
397 "KEY_VALUE_STORE_WRITES": {
398 "type": "integer",
399 "example": 1
400 },
401 "KEY_VALUE_STORE_LISTS": {
402 "type": "integer",
403 "example": 0
404 },
405 "REQUEST_QUEUE_READS": {
406 "type": "integer",
407 "example": 0
408 },
409 "REQUEST_QUEUE_WRITES": {
410 "type": "integer",
411 "example": 0
412 },
413 "DATA_TRANSFER_INTERNAL_GBYTES": {
414 "type": "integer",
415 "example": 0
416 },
417 "DATA_TRANSFER_EXTERNAL_GBYTES": {
418 "type": "integer",
419 "example": 0
420 },
421 "PROXY_RESIDENTIAL_TRANSFER_GBYTES": {
422 "type": "integer",
423 "example": 0
424 },
425 "PROXY_SERPS": {
426 "type": "integer",
427 "example": 0
428 }
429 }
430 },
431 "usageTotalUsd": {
432 "type": "number",
433 "example": 0.00005
434 },
435 "usageUsd": {
436 "type": "object",
437 "properties": {
438 "ACTOR_COMPUTE_UNITS": {
439 "type": "integer",
440 "example": 0
441 },
442 "DATASET_READS": {
443 "type": "integer",
444 "example": 0
445 },
446 "DATASET_WRITES": {
447 "type": "integer",
448 "example": 0
449 },
450 "KEY_VALUE_STORE_READS": {
451 "type": "integer",
452 "example": 0
453 },
454 "KEY_VALUE_STORE_WRITES": {
455 "type": "number",
456 "example": 0.00005
457 },
458 "KEY_VALUE_STORE_LISTS": {
459 "type": "integer",
460 "example": 0
461 },
462 "REQUEST_QUEUE_READS": {
463 "type": "integer",
464 "example": 0
465 },
466 "REQUEST_QUEUE_WRITES": {
467 "type": "integer",
468 "example": 0
469 },
470 "DATA_TRANSFER_INTERNAL_GBYTES": {
471 "type": "integer",
472 "example": 0
473 },
474 "DATA_TRANSFER_EXTERNAL_GBYTES": {
475 "type": "integer",
476 "example": 0
477 },
478 "PROXY_RESIDENTIAL_TRANSFER_GBYTES": {
479 "type": "integer",
480 "example": 0
481 },
482 "PROXY_SERPS": {
483 "type": "integer",
484 "example": 0
485 }
486 }
487 }
488 }
489 }
490 }
491 }
492 }
493 }
494}
Chroma Integration OpenAPI definition
OpenAPI is a standard for designing and describing RESTful APIs, allowing developers to define API structure, endpoints, and data formats in a machine-readable way. It simplifies API development, integration, and documentation.
OpenAPI is effective when used with AI agents and GPTs by standardizing how these systems interact with various APIs, for reliable integrations and efficient communication.
By defining machine-readable API specifications, OpenAPI allows AI models like GPTs to understand and use varied data sources, improving accuracy. This accelerates development, reduces errors, and provides context-aware responses, making OpenAPI a core component for AI applications.
You can download the OpenAPI definitions for Chroma Integration from the options below:
If you’d like to learn more about how OpenAPI powers GPTs, read our blog post.
You can also check out our other API clients:
Actor Metrics
1 monthly user
-
0 No bookmarks yet
Created in Jun 2024
Modified a month ago