diff --git a/openapi.yaml b/openapi.yaml index 4446e164..66bad64f 100644 --- a/openapi.yaml +++ b/openapi.yaml @@ -2143,103 +2143,101 @@ paths: Custom-Host: [] x-code-samples: - name: List assistants - group: assistants - beta: true - returns: A list of [assistant](https://platform.openai.com/docs/api-reference/assistants/object) objects. - examples: - request: - curl: | - curl "https://api.portkey.ai/v1/assistants?order=desc&limit=20" \ + - lang: curl + source: | + curl "https://api.portkey.ai/v1/assistants?order=desc&limit=20" \ -H "Content-Type: application/json" \ -H "x-portkey-api-key: $PORTKEY_API_KEY" \ -H "x-portkey-virtual-key: $PORTKEY_PROVIDER_VIRTUAL_KEY" \ -H "OpenAI-Beta: assistants=v2" - python: | - from portkey_ai import Portkey - - client = Portkey( - api_key = "PORTKEY_API_KEY", - virtual_key = "PROVIDER_VIRTUAL_KEY" - ) - - my_assistants = client.beta.assistants.list( - order="desc", - limit="20", - ) - print(my_assistants.data) - node.js: |- - import Portkey from 'portkey-ai'; - - const client = new Portkey({ - apiKey: 'PORTKEY_API_KEY', - virtualKey: 'PROVIDER_VIRTUAL_KEY' - }); - - async function main() { - const myAssistants = await client.beta.assistants.list({ - order: "desc", - limit: "20", - }); - - console.log(myAssistants.data); - } + - lang: python + source: | + from portkey_ai import Portkey - main(); - response: &list_assistants_example | - { - "object": "list", - "data": [ - { - "id": "asst_abc123", - "object": "assistant", - "created_at": 1698982736, - "name": "Coding Tutor", - "description": null, - "model": "gpt-4-turbo", - "instructions": "You are a helpful assistant designed to make me better at coding!", - "tools": [], - "tool_resources": {}, - "metadata": {}, - "top_p": 1.0, - "temperature": 1.0, - "response_format": "auto" - }, - { - "id": "asst_abc456", - "object": "assistant", - "created_at": 1698982718, - "name": "My Assistant", - "description": null, - "model": "gpt-4-turbo", - "instructions": "You are a helpful assistant designed to make me better at coding!", - "tools": [], - "tool_resources": {}, - "metadata": {}, - "top_p": 1.0, - "temperature": 1.0, - "response_format": "auto" - }, - { - "id": "asst_abc789", - "object": "assistant", - "created_at": 1698982643, - "name": null, - "description": null, - "model": "gpt-4-turbo", - "instructions": null, - "tools": [], - "tool_resources": {}, - "metadata": {}, - "top_p": 1.0, - "temperature": 1.0, - "response_format": "auto" - } - ], - "first_id": "asst_abc123", - "last_id": "asst_abc789", - "has_more": false - } + client = Portkey( + api_key = "PORTKEY_API_KEY", + virtual_key = "PROVIDER_VIRTUAL_KEY" + ) + + my_assistants = client.beta.assistants.list( + order="desc", + limit="20", + ) + print(my_assistants.data) + - lang: javascript + source: | + import Portkey from 'portkey-ai'; + + const client = new Portkey({ + apiKey: 'PORTKEY_API_KEY', + virtualKey: 'PROVIDER_VIRTUAL_KEY' + }); + + async function main() { + const myAssistants = await client.beta.assistants.list({ + order: "desc", + limit: "20", + }); + + console.log(myAssistants.data); + } + + main(); + response: &list_assistants_example | + { + "object": "list", + "data": [ + { + "id": "asst_abc123", + "object": "assistant", + "created_at": 1698982736, + "name": "Coding Tutor", + "description": null, + "model": "gpt-4-turbo", + "instructions": "You are a helpful assistant designed to make me better at coding!", + "tools": [], + "tool_resources": {}, + "metadata": {}, + "top_p": 1.0, + "temperature": 1.0, + "response_format": "auto" + }, + { + "id": "asst_abc456", + "object": "assistant", + "created_at": 1698982718, + "name": "My Assistant", + "description": null, + "model": "gpt-4-turbo", + "instructions": "You are a helpful assistant designed to make me better at coding!", + "tools": [], + "tool_resources": {}, + "metadata": {}, + "top_p": 1.0, + "temperature": 1.0, + "response_format": "auto" + }, + { + "id": "asst_abc789", + "object": "assistant", + "created_at": 1698982643, + "name": null, + "description": null, + "model": "gpt-4-turbo", + "instructions": null, + "tools": [], + "tool_resources": {}, + "metadata": {}, + "top_p": 1.0, + "temperature": 1.0, + "response_format": "auto" + } + ], + "first_id": "asst_abc123", + "last_id": "asst_abc789", + "has_more": false + } + post: operationId: createAssistant tags: @@ -2273,162 +2271,76 @@ paths: Custom-Host: [] x-code-samples: - name: Create assistant - group: assistants - beta: true - returns: An [assistant](https://platform.openai.com/docs/api-reference/assistants/object) object. - examples: - - title: Code Interpreter - request: - curl: | - curl "https://api.portkey.ai/v1/assistants" \ - -H "Content-Type: application/json" \ - -H "x-portkey-api-key: $PORTKEY_API_KEY" \ - -H "x-portkey-virtual-key: $PORTKEY_PROVIDER_VIRTUAL_KEY" \ - -H "OpenAI-Beta: assistants=v2" \ - -d '{ - "instructions": "You are a personal math tutor. When asked a question, write and run Python code to answer the question.", - "name": "Math Tutor", - "tools": [{"type": "code_interpreter"}], - "model": "gpt-4-turbo" - }' - - python: | - from portkey_ai import Portkey - - client = Portkey( - api_key = "PORTKEY_API_KEY", - virtual_key = "PROVIDER_VIRTUAL_KEY" - ) - - my_assistant = client.beta.assistants.create( - instructions="You are a personal math tutor. When asked a question, write and run Python code to answer the question.", - name="Math Tutor", - tools=[{"type": "code_interpreter"}], - model="gpt-4-turbo", - ) - print(my_assistant) - node.js: |- - import Portkey from 'portkey-ai'; - - const client = new Portkey({ - apiKey: 'PORTKEY_API_KEY', - virtualKey: 'PROVIDER_VIRTUAL_KEY' - }); - - async function main() { - const myAssistant = await client.beta.assistants.create({ - instructions: - "You are a personal math tutor. When asked a question, write and run Python code to answer the question.", - name: "Math Tutor", - tools: [{ type: "code_interpreter" }], - model: "gpt-4-turbo", - }); - - console.log(myAssistant); - } + - lang: curl + source: | + curl "https://api.portkey.ai/v1/assistants" \ + -H "Content-Type: application/json" \ + -H "x-portkey-api-key: $PORTKEY_API_KEY" \ + -H "x-portkey-virtual-key: $PORTKEY_PROVIDER_VIRTUAL_KEY" \ + -H "OpenAI-Beta: assistants=v2" \ + -d '{ + "instructions": "You are a personal math tutor. When asked a question, write and run Python code to answer the question.", + "name": "Math Tutor", + "tools": [{"type": "code_interpreter"}], + "model": "gpt-4-turbo" + }' + - lang: python + source: | + from portkey_ai import Portkey - main(); - response: &create_assistants_example | - { - "id": "asst_abc123", - "object": "assistant", - "created_at": 1698984975, - "name": "Math Tutor", - "description": null, - "model": "gpt-4-turbo", - "instructions": "You are a personal math tutor. When asked a question, write and run Python code to answer the question.", - "tools": [ - { - "type": "code_interpreter" - } - ], - "metadata": {}, - "top_p": 1.0, - "temperature": 1.0, - "response_format": "auto" - } - - title: Files - request: - curl: | - curl https://api.portkey.ai/v1/assistants \ - -H "Content-Type: application/json" \ - -H "x-portkey-api-key: $PORTKEY_API_KEY" \ - -H "x-portkey-virtual-key: $PORTKEY_PROVIDER_VIRTUAL_KEY" \ - -H "OpenAI-Beta: assistants=v2" \ - -d '{ - "instructions": "You are an HR bot, and you have access to files to answer employee questions about company policies.", - "tools": [{"type": "file_search"}], - "tool_resources": {"file_search": {"vector_store_ids": ["vs_123"]}}, - "model": "gpt-4-turbo" - }' - python: | - from portkey_ai import Portkey - - client = Portkey( - api_key = "PORTKEY_API_KEY", - virtual_key = "PROVIDER_VIRTUAL_KEY" - ) - - my_assistant = client.beta.assistants.create( - instructions="You are an HR bot, and you have access to files to answer employee questions about company policies.", - name="HR Helper", - tools=[{"type": "file_search"}], - tool_resources={"file_search": {"vector_store_ids": ["vs_123"]}}, - model="gpt-4-turbo" - ) - print(my_assistant) - node.js: |- - import Portkey from 'portkey-ai'; - - const client = new Portkey({ - apiKey: 'PORTKEY_API_KEY', - virtualKey: 'PROVIDER_VIRTUAL_KEY' - }); - - async function main() { - const myAssistant = await client.beta.assistants.create({ - instructions: - "You are an HR bot, and you have access to files to answer employee questions about company policies.", - name: "HR Helper", - tools: [{ type: "file_search" }], - tool_resources: { - file_search: { - vector_store_ids: ["vs_123"] - } - }, - model: "gpt-4-turbo" - }); + client = Portkey( + api_key = "PORTKEY_API_KEY", + virtual_key = "PROVIDER_VIRTUAL_KEY" + ) - console.log(myAssistant); - } + my_assistant = client.beta.assistants.create( + instructions="You are a personal math tutor. When asked a question, write and run Python code to answer the question.", + name="Math Tutor", + tools=[{"type": "code_interpreter"}], + model="gpt-4-turbo", + ) + print(my_assistant) + - lang: javascript + source: | + import Portkey from 'portkey-ai'; + + const client = new Portkey({ + apiKey: 'PORTKEY_API_KEY', + virtualKey: 'PROVIDER_VIRTUAL_KEY' + }); + + async function main() { + const myAssistant = await client.beta.assistants.create({ + instructions: + "You are a personal math tutor. When asked a question, write and run Python code to answer the question.", + name: "Math Tutor", + tools: [{ type: "code_interpreter" }], + model: "gpt-4-turbo", + }); + + console.log(myAssistant); + } - main(); - response: | + main(); + response: &create_assistants_example | + { + "id": "asst_abc123", + "object": "assistant", + "created_at": 1698984975, + "name": "Math Tutor", + "description": null, + "model": "gpt-4-turbo", + "instructions": "You are a personal math tutor. When asked a question, write and run Python code to answer the question.", + "tools": [ { - "id": "asst_abc123", - "object": "assistant", - "created_at": 1699009403, - "name": "HR Helper", - "description": null, - "model": "gpt-4-turbo", - "instructions": "You are an HR bot, and you have access to files to answer employee questions about company policies.", - "tools": [ - { - "type": "file_search" - } - ], - "tool_resources": { - "file_search": { - "vector_store_ids": ["vs_123"] - } - }, - "metadata": {}, - "top_p": 1.0, - "temperature": 1.0, - "response_format": "auto" + "type": "code_interpreter" } - + ], + "metadata": {}, + "top_p": 1.0, + "temperature": 1.0, + "response_format": "auto" + } /assistants/{assistant_id}: get: operationId: getAssistant @@ -2464,64 +2376,61 @@ paths: Custom-Host: [] x-code-samples: - name: Retrieve assistant - group: assistants - beta: true - returns: The [assistant](https://platform.openai.com/docs/api-reference/assistants/object) object matching the specified ID. - examples: - request: - curl: | - curl https://api.portkey.ai/v1/assistants/asst_abc123 \ - -H "Content-Type: application/json" \ - -H "x-portkey-api-key: $PORTKEY_API_KEY" \ - -H "x-portkey-virtual-key: $PORTKEY_PROVIDER_VIRTUAL_KEY" \ - -H "OpenAI-Beta: assistants=v2" - python: | - from portkey_ai import Portkey - - client = Portkey( - api_key = "PORTKEY_API_KEY", - virtual_key = "PROVIDER_VIRTUAL_KEY" - ) - - my_assistant = client.beta.assistants.retrieve("asst_abc123") - print(my_assistant) - node.js: |- - import Portkey from 'portkey-ai'; - - const client = new Portkey({ - apiKey: 'PORTKEY_API_KEY', - virtualKey: 'PROVIDER_VIRTUAL_KEY' - }); - - async function main() { - const myAssistant = await client.beta.assistants.retrieve( - "asst_abc123" - ); - - console.log(myAssistant); - } + - lang: curl + source: | + curl https://api.portkey.ai/v1/assistants/asst_abc123 \ + -H "Content-Type: application/json" \ + -H "x-portkey-api-key: $PORTKEY_API_KEY" \ + -H "x-portkey-virtual-key: $PORTKEY_PROVIDER_VIRTUAL_KEY" \ + -H "OpenAI-Beta: assistants=v2" + - lang: python + source: | + from portkey_ai import Portkey - main(); - response: | - { - "id": "asst_abc123", - "object": "assistant", - "created_at": 1699009709, - "name": "HR Helper", - "description": null, - "model": "gpt-4-turbo", - "instructions": "You are an HR bot, and you have access to files to answer employee questions about company policies.", - "tools": [ - { - "type": "file_search" - } - ], - "metadata": {}, - "top_p": 1.0, - "temperature": 1.0, - "response_format": "auto" - } + client = Portkey( + api_key = "PORTKEY_API_KEY", + virtual_key = "PROVIDER_VIRTUAL_KEY" + ) + + my_assistant = client.beta.assistants.retrieve("asst_abc123") + print(my_assistant) + - lang: javascript + source: | + import Portkey from 'portkey-ai'; + + const client = new Portkey({ + apiKey: 'PORTKEY_API_KEY', + virtualKey: 'PROVIDER_VIRTUAL_KEY' + }); + + async function main() { + const myAssistant = await client.beta.assistants.retrieve( + "asst_abc123" + ); + + console.log(myAssistant); + } + + main(); + response: | + { + "id": "asst_abc123", + "object": "assistant", + "created_at": 1699009709, + "name": "HR Helper", + "description": null, + "model": "gpt-4-turbo", + "instructions": "You are an HR bot, and you have access to files to answer employee questions about company policies.", + "tools": [ + { + "type": "file_search" + } + ], + "metadata": {}, + "top_p": 1.0, + "temperature": 1.0, + "response_format": "auto" + } post: operationId: modifyAssistant tags: @@ -2562,65 +2471,62 @@ paths: Custom-Host: [] x-code-samples: - name: Modify assistant - group: assistants - beta: true - returns: The modified [assistant](https://platform.openai.com/docs/api-reference/assistants/object) object. - examples: - request: - curl: | - curl https://api.portkey.ai/v1/assistants/asst_abc123 \ - -H "Content-Type: application/json" \ - -H "x-portkey-api-key: $PORTKEY_API_KEY" \ - -H "x-portkey-virtual-key: $PORTKEY_PROVIDER_VIRTUAL_KEY" \ - -H "OpenAI-Beta: assistants=v2" \ - -d '{ - "instructions": "You are an HR bot, and you have access to files to answer employee questions about company policies. Always response with info from either of the files.", - "tools": [{"type": "file_search"}], - "model": "gpt-4-turbo" - }' - python: | - from portkey_ai import Portkey - - client = Portkey( - api_key = "PORTKEY_API_KEY", - virtual_key = "PROVIDER_VIRTUAL_KEY" - ) - - my_updated_assistant = client.beta.assistants.update( - "asst_abc123", - instructions="You are an HR bot, and you have access to files to answer employee questions about company policies. Always response with info from either of the files.", - name="HR Helper", - tools=[{"type": "file_search"}], - model="gpt-4-turbo" - ) - - print(my_updated_assistant) - node.js: |- - import Portkey from 'portkey-ai'; - - const client = new Portkey({ - apiKey: 'PORTKEY_API_KEY', - virtualKey: 'PROVIDER_VIRTUAL_KEY' - }); - - async function main() { - const myUpdatedAssistant = await client.beta.assistants.update( - "asst_abc123", - { - instructions: - "You are an HR bot, and you have access to files to answer employee questions about company policies. Always response with info from either of the files.", - name: "HR Helper", - tools: [{ type: "file_search" }], - model: "gpt-4-turbo" - } - ); + - lang: curl + source: | + curl https://api.portkey.ai/v1/assistants/asst_abc123 \ + -H "Content-Type: application/json" \ + -H "x-portkey-api-key: $PORTKEY_API_KEY" \ + -H "x-portkey-virtual-key: $PORTKEY_PROVIDER_VIRTUAL_KEY" \ + -H "OpenAI-Beta: assistants=v2" \ + -d '{ + "instructions": "You are an HR bot, and you have access to files to answer employee questions about company policies. Always response with info from either of the files.", + "tools": [{"type": "file_search"}], + "model": "gpt-4-turbo" + }' + - lang: python + source: | + from portkey_ai import Portkey - console.log(myUpdatedAssistant); - } + client = Portkey( + api_key = "PORTKEY_API_KEY", + virtual_key = "PROVIDER_VIRTUAL_KEY" + ) + + my_updated_assistant = client.beta.assistants.update( + "asst_abc123", + instructions="You are an HR bot, and you have access to files to answer employee questions about company policies. Always response with info from either of the files.", + name="HR Helper", + tools=[{"type": "file_search"}], + model="gpt-4-turbo" + ) + + print(my_updated_assistant) + - lang: javascript + source: | + import Portkey from 'portkey-ai'; + + const client = new Portkey({ + apiKey: 'PORTKEY_API_KEY', + virtualKey: 'PROVIDER_VIRTUAL_KEY' + }); - main(); - response: | + async function main() { + const myUpdatedAssistant = await client.beta.assistants.update( + "asst_abc123", + { + instructions: + "You are an HR bot, and you have access to files to answer employee questions about company policies. Always response with info from either of the files.", + name: "HR Helper", + tools: [{ type: "file_search" }], + model: "gpt-4-turbo" + } + ); + + console.log(myUpdatedAssistant); + } + + main(); + response: | { "id": "asst_123", "object": "assistant", @@ -2678,59 +2584,56 @@ paths: Custom-Host: [] x-code-samples: - name: Delete assistant - group: assistants - beta: true - returns: Deletion status - examples: - request: - curl: | - curl https://api.portkey.ai/v1/assistants/asst_abc123 \ - -H "Content-Type: application/json" \ - -H "x-portkey-api-key: $PORTKEY_API_KEY" \ - -H "x-portkey-virtual-key: $PORTKEY_PROVIDER_VIRTUAL_KEY" \ - -H "OpenAI-Beta: assistants=v2" \ - -X DELETE - python: | - from portkey_ai import Portkey - - client = Portkey( - api_key = "PORTKEY_API_KEY", - virtual_key = "PROVIDER_VIRTUAL_KEY" - ) - - response = client.beta.assistants.delete("asst_abc123") - print(response) - node.js: |- - import Portkey from 'portkey-ai'; - - const client = new Portkey({ - apiKey: 'PORTKEY_API_KEY', - virtualKey: 'PROVIDER_VIRTUAL_KEY' - }); - - async function main() { - const response = await client.beta.assistants.del("asst_abc123"); - - console.log(response); - } - main(); - response: | - { - "id": "asst_abc123", - "object": "assistant.deleted", - "deleted": true - } + - lang: curl + source: | + curl https://api.portkey.ai/v1/assistants/asst_abc123 \ + -H "Content-Type: application/json" \ + -H "x-portkey-api-key: $PORTKEY_API_KEY" \ + -H "x-portkey-virtual-key: $PORTKEY_PROVIDER_VIRTUAL_KEY" \ + -H "OpenAI-Beta: assistants=v2" \ + -X DELETE + - lang: python + source: | + from portkey_ai import Portkey - /threads: - post: - operationId: createThread - tags: - - Assistants - summary: Create a thread. - requestBody: - content: - application/json: + client = Portkey( + api_key = "PORTKEY_API_KEY", + virtual_key = "PROVIDER_VIRTUAL_KEY" + ) + + response = client.beta.assistants.delete("asst_abc123") + print(response) + - lang: javascript + source: | + import Portkey from 'portkey-ai'; + + const client = new Portkey({ + apiKey: 'PORTKEY_API_KEY', + virtualKey: 'PROVIDER_VIRTUAL_KEY' + }); + + async function main() { + const response = await client.beta.assistants.del("asst_abc123"); + + console.log(response); + } + main(); + response: | + { + "id": "asst_abc123", + "object": "assistant.deleted", + "deleted": true + } + + /threads: + post: + operationId: createThread + tags: + - Assistants + summary: Create a thread. + requestBody: + content: + application/json: schema: $ref: "#/components/schemas/CreateThreadRequest" responses: @@ -2755,126 +2658,80 @@ paths: Custom-Host: [] x-code-samples: - name: Create thread - group: threads - beta: true - returns: A [thread](https://platform.openai.com/docs/api-reference/threads) object. - examples: - - title: Empty - request: - curl: | - curl https://api.portkey.ai/v1/threads \ - -H "Content-Type: application/json" \ - -H "x-portkey-api-key: $PORTKEY_API_KEY" \ - -H "x-portkey-virtual-key: $PORTKEY_PROVIDER_VIRTUAL_KEY" \ - -H "OpenAI-Beta: assistants=v2" \ - -d '' - python: | - from portkey_ai import Portkey - - client = Portkey( - api_key = "PORTKEY_API_KEY", - virtual_key = "PROVIDER_VIRTUAL_KEY" - ) - - empty_thread = client.beta.threads.create() - print(empty_thread) - node.js: |- - import Portkey from 'portkey-ai'; - - const client = new Portkey({ - apiKey: 'PORTKEY_API_KEY', - virtualKey: 'PROVIDER_VIRTUAL_KEY' - }); - - async function main() { - const emptyThread = await client.beta.threads.create(); - - console.log(emptyThread); - } - - main(); - response: | - { - "id": "thread_abc123", - "object": "thread", - "created_at": 1699012949, - "metadata": {}, - "tool_resources": {} - } - - title: Messages - request: - curl: | - curl https://api.portkey.ai/v1/threads \ - -H "Content-Type: application/json" \ - -H "x-portkey-api-key: $PORTKEY_API_KEY" \ - -H "x-portkey-virtual-key: $PORTKEY_PROVIDER_VIRTUAL_KEY" \ - -H "OpenAI-Beta: assistants=v2" \ - -d '{ - "messages": [{ - "role": "user", - "content": "Hello, what is AI?" - }, { - "role": "user", - "content": "How does AI work? Explain it in simple terms." - }] - }' - python: | - from portkey_ai import Portkey - - client = Portkey( - api_key = "PORTKEY_API_KEY", - virtual_key = "PROVIDER_VIRTUAL_KEY" - ) - - message_thread = client.beta.threads.create( - messages=[ - { - "role": "user", - "content": "Hello, what is AI?" - }, - { - "role": "user", - "content": "How does AI work? Explain it in simple terms." - }, - ] - ) + - lang: curl + source: | + curl https://api.portkey.ai/v1/threads \ + -H "Content-Type: application/json" \ + -H "x-portkey-api-key: $PORTKEY_API_KEY" \ + -H "x-portkey-virtual-key: $PORTKEY_PROVIDER_VIRTUAL_KEY" \ + -H "OpenAI-Beta: assistants=v2" \ + -d '{ + "messages": [{ + "role": "user", + "content": "Hello, what is AI?" + }, { + "role": "user", + "content": "How does AI work? Explain it in simple terms." + }] + }' + - lang: python + source: | + from portkey_ai import Portkey - print(message_thread) - node.js: |- - import Portkey from 'portkey-ai'; + client = Portkey( + api_key = "PORTKEY_API_KEY", + virtual_key = "PROVIDER_VIRTUAL_KEY" + ) - const client = new Portkey({ - apiKey: 'PORTKEY_API_KEY', - virtualKey: 'PROVIDER_VIRTUAL_KEY' - }); + message_thread = client.beta.threads.create( + messages=[ + { + "role": "user", + "content": "Hello, what is AI?" + }, + { + "role": "user", + "content": "How does AI work? Explain it in simple terms." + }, + ] + ) - async function main() { - const messageThread = await client.beta.threads.create({ - messages: [ - { - role: "user", - content: "Hello, what is AI?" - }, - { - role: "user", - content: "How does AI work? Explain it in simple terms.", - }, - ], - }); + print(message_thread) + - lang: javascript + source: | + import Portkey from 'portkey-ai'; - console.log(messageThread); - } + const client = new Portkey({ + apiKey: 'PORTKEY_API_KEY', + virtualKey: 'PROVIDER_VIRTUAL_KEY' + }); - main(); - response: | + async function main() { + const messageThread = await client.beta.threads.create({ + messages: [ { - "id": "thread_abc123", - "object": "thread", - "created_at": 1699014083, - "metadata": {}, - "tool_resources": {} - } + role: "user", + content: "Hello, what is AI?" + }, + { + role: "user", + content: "How does AI work? Explain it in simple terms.", + }, + ], + }); + + console.log(messageThread); + } + + main(); + response: | + { + "id": "thread_abc123", + "object": "thread", + "created_at": 1699014083, + "metadata": {}, + "tool_resources": {} + } /threads/{thread_id}: get: @@ -2911,57 +2768,54 @@ paths: Custom-Host: [] x-code-samples: - name: Retrieve thread - group: threads - beta: true - returns: The [thread](https://platform.openai.com/docs/api-reference/threads/object) object matching the specified ID. - examples: - request: - curl: | - curl https://api.portkey.ai/v1/threads/thread_abc123 \ - -H "Content-Type: application/json" \ - -H "x-portkey-api-key: $PORTKEY_API_KEY" \ - -H "x-portkey-virtual-key: $PORTKEY_PROVIDER_VIRTUAL_KEY" \ - -H "OpenAI-Beta: assistants=v2" - python: | - from portkey_ai import Portkey - - client = Portkey( - api_key = "PORTKEY_API_KEY", - virtual_key = "PROVIDER_VIRTUAL_KEY" - ) - - my_thread = client.beta.threads.retrieve("thread_abc123") - print(my_thread) - node.js: |- - import Portkey from 'portkey-ai'; - - const client = new Portkey({ - apiKey: 'PORTKEY_API_KEY', - virtualKey: 'PROVIDER_VIRTUAL_KEY' - }); - - async function main() { - const myThread = await client.beta.threads.retrieve( - "thread_abc123" - ); - - console.log(myThread); - } + - lang: curl + source: | + curl https://api.portkey.ai/v1/threads/thread_abc123 \ + -H "Content-Type: application/json" \ + -H "x-portkey-api-key: $PORTKEY_API_KEY" \ + -H "x-portkey-virtual-key: $PORTKEY_PROVIDER_VIRTUAL_KEY" \ + -H "OpenAI-Beta: assistants=v2" + - lang: python + source: | + from portkey_ai import Portkey - main(); - response: | - { - "id": "thread_abc123", - "object": "thread", - "created_at": 1699014083, - "metadata": {}, - "tool_resources": { - "code_interpreter": { - "file_ids": [] - } - } + client = Portkey( + api_key = "PORTKEY_API_KEY", + virtual_key = "PROVIDER_VIRTUAL_KEY" + ) + + my_thread = client.beta.threads.retrieve("thread_abc123") + print(my_thread) + - lang: javascript + source: | + import Portkey from 'portkey-ai'; + + const client = new Portkey({ + apiKey: 'PORTKEY_API_KEY', + virtualKey: 'PROVIDER_VIRTUAL_KEY' + }); + + async function main() { + const myThread = await client.beta.threads.retrieve( + "thread_abc123" + ); + + console.log(myThread); + } + + main(); + response: | + { + "id": "thread_abc123", + "object": "thread", + "created_at": 1699014083, + "metadata": {}, + "tool_resources": { + "code_interpreter": { + "file_ids": [] } + } + } post: operationId: modifyThread tags: @@ -3002,71 +2856,68 @@ paths: Custom-Host: [] x-code-samples: - name: Modify thread - group: threads - beta: true - returns: The modified [thread](https://platform.openai.com/docs/api-reference/threads/object) object matching the specified ID. - examples: - request: - curl: | - curl https://api.portkey.ai/v1/threads/thread_abc123 \ - -H "Content-Type: application/json" \ - -H "x-portkey-api-key: $PORTKEY_API_KEY" \ - -H "x-portkey-virtual-key: $PORTKEY_PROVIDER_VIRTUAL_KEY" \ - -H "OpenAI-Beta: assistants=v2" \ - -d '{ - "metadata": { - "modified": "true", - "user": "abc123" - } - }' - python: | - from portkey_ai import Portkey - - client = Portkey( - api_key = "PORTKEY_API_KEY", - virtual_key = "PROVIDER_VIRTUAL_KEY" - ) - - my_updated_thread = client.beta.threads.update( - "thread_abc123", - metadata={ - "modified": "true", - "user": "abc123" - } - ) - print(my_updated_thread) - node.js: |- - import Portkey from 'portkey-ai'; - - const client = new Portkey({ - apiKey: 'PORTKEY_API_KEY', - virtualKey: 'PROVIDER_VIRTUAL_KEY' - }); - - async function main() { - const updatedThread = await client.beta.threads.update( - "thread_abc123", - { - metadata: { modified: "true", user: "abc123" }, - } - ); - - console.log(updatedThread); - } - - main(); - response: | - { - "id": "thread_abc123", - "object": "thread", - "created_at": 1699014083, + - lang: curl + source: | + curl https://api.portkey.ai/v1/threads/thread_abc123 \ + -H "Content-Type: application/json" \ + -H "x-portkey-api-key: $PORTKEY_API_KEY" \ + -H "x-portkey-virtual-key: $PORTKEY_PROVIDER_VIRTUAL_KEY" \ + -H "OpenAI-Beta: assistants=v2" \ + -d '{ "metadata": { "modified": "true", "user": "abc123" - }, - "tool_resources": {} + } + }' + - lang: python + source: | + from portkey_ai import Portkey + + client = Portkey( + api_key = "PORTKEY_API_KEY", + virtual_key = "PROVIDER_VIRTUAL_KEY" + ) + + my_updated_thread = client.beta.threads.update( + "thread_abc123", + metadata={ + "modified": "true", + "user": "abc123" + } + ) + print(my_updated_thread) + - lang: javascript + source: | + import Portkey from 'portkey-ai'; + + const client = new Portkey({ + apiKey: 'PORTKEY_API_KEY', + virtualKey: 'PROVIDER_VIRTUAL_KEY' + }); + + async function main() { + const updatedThread = await client.beta.threads.update( + "thread_abc123", + { + metadata: { modified: "true", user: "abc123" }, } + ); + + console.log(updatedThread); + } + + main(); + response: | + { + "id": "thread_abc123", + "object": "thread", + "created_at": 1699014083, + "metadata": { + "modified": "true", + "user": "abc123" + }, + "tool_resources": {} + } delete: operationId: deleteThread tags: @@ -3101,49 +2952,46 @@ paths: Custom-Host: [] x-code-samples: - name: Delete thread - group: threads - beta: true - returns: Deletion status - examples: - request: - curl: | - curl https://api.portkey.ai/v1/threads/thread_abc123 \ - -H "Content-Type: application/json" \ - -H "x-portkey-api-key: $PORTKEY_API_KEY" \ - -H "x-portkey-virtual-key: $PORTKEY_PROVIDER_VIRTUAL_KEY" \ - -H "OpenAI-Beta: assistants=v2" \ - -X DELETE - python: | - from portkey_ai import Portkey - - client = Portkey( - api_key = "PORTKEY_API_KEY", - virtual_key = "PROVIDER_VIRTUAL_KEY" - ) - - response = client.beta.threads.delete("thread_abc123") - print(response) - node.js: |- - import Portkey from 'portkey-ai'; - - const client = new Portkey({ - apiKey: 'PORTKEY_API_KEY', - virtualKey: 'PROVIDER_VIRTUAL_KEY' - }); - - async function main() { - const response = await client.beta.threads.del("thread_abc123"); - - console.log(response); - } - main(); - response: | - { - "id": "thread_abc123", - "object": "thread.deleted", - "deleted": true - } + - lang: curl + source: | + curl https://api.portkey.ai/v1/threads/thread_abc123 \ + -H "Content-Type: application/json" \ + -H "x-portkey-api-key: $PORTKEY_API_KEY" \ + -H "x-portkey-virtual-key: $PORTKEY_PROVIDER_VIRTUAL_KEY" \ + -H "OpenAI-Beta: assistants=v2" \ + -X DELETE + - lang: python + source: | + from portkey_ai import Portkey + + client = Portkey( + api_key = "PORTKEY_API_KEY", + virtual_key = "PROVIDER_VIRTUAL_KEY" + ) + + response = client.beta.threads.delete("thread_abc123") + print(response) + - lang: javascript + source: | + import Portkey from 'portkey-ai'; + + const client = new Portkey({ + apiKey: 'PORTKEY_API_KEY', + virtualKey: 'PROVIDER_VIRTUAL_KEY' + }); + + async function main() { + const response = await client.beta.threads.del("thread_abc123"); + + console.log(response); + } + main(); + response: | + { + "id": "thread_abc123", + "object": "thread.deleted", + "deleted": true + } /threads/{thread_id}/messages: get: @@ -3210,94 +3058,91 @@ paths: Custom-Host: [] x-code-samples: - name: List messages - group: threads - beta: true - returns: A list of [message](https://platform.openai.com/docs/api-reference/messages) objects. - examples: - request: - curl: | - curl https://api.portkey.ai/v1/threads/thread_abc123/messages \ - -H "Content-Type: application/json" \ - -H "x-portkey-api-key: $PORTKEY_API_KEY" \ - -H "x-portkey-virtual-key: $PORTKEY_PROVIDER_VIRTUAL_KEY" \ - -H "OpenAI-Beta: assistants=v2" - python: | - from portkey_ai import Portkey - - client = Portkey( - api_key = "PORTKEY_API_KEY", - virtual_key = "PROVIDER_VIRTUAL_KEY" - ) - - thread_messages = client.beta.threads.messages.list("thread_abc123") - print(thread_messages.data) - node.js: |- - import Portkey from 'portkey-ai'; - - const client = new Portkey({ - apiKey: 'PORTKEY_API_KEY', - virtualKey: 'PROVIDER_VIRTUAL_KEY' - }); - - async function main() { - const threadMessages = await client.beta.threads.messages.list( - "thread_abc123" - ); - - console.log(threadMessages.data); - } + - lang: curl + source: | + curl https://api.portkey.ai/v1/threads/thread_abc123/messages \ + -H "Content-Type: application/json" \ + -H "x-portkey-api-key: $PORTKEY_API_KEY" \ + -H "x-portkey-virtual-key: $PORTKEY_PROVIDER_VIRTUAL_KEY" \ + -H "OpenAI-Beta: assistants=v2" + - lang: python + source: | + from portkey_ai import Portkey - main(); - response: | - { - "object": "list", - "data": [ - { - "id": "msg_abc123", - "object": "thread.message", - "created_at": 1699016383, - "assistant_id": null, - "thread_id": "thread_abc123", - "run_id": null, - "role": "user", - "content": [ - { - "type": "text", - "text": { - "value": "How does AI work? Explain it in simple terms.", - "annotations": [] - } - } - ], - "attachments": [], - "metadata": {} - }, - { - "id": "msg_abc456", - "object": "thread.message", - "created_at": 1699016383, - "assistant_id": null, - "thread_id": "thread_abc123", - "run_id": null, - "role": "user", - "content": [ - { - "type": "text", - "text": { - "value": "Hello, what is AI?", - "annotations": [] - } + client = Portkey( + api_key = "PORTKEY_API_KEY", + virtual_key = "PROVIDER_VIRTUAL_KEY" + ) + + thread_messages = client.beta.threads.messages.list("thread_abc123") + print(thread_messages.data) + - lang: javascript + source: | + import Portkey from 'portkey-ai'; + + const client = new Portkey({ + apiKey: 'PORTKEY_API_KEY', + virtualKey: 'PROVIDER_VIRTUAL_KEY' + }); + + async function main() { + const threadMessages = await client.beta.threads.messages.list( + "thread_abc123" + ); + + console.log(threadMessages.data); + } + + main(); + response: | + { + "object": "list", + "data": [ + { + "id": "msg_abc123", + "object": "thread.message", + "created_at": 1699016383, + "assistant_id": null, + "thread_id": "thread_abc123", + "run_id": null, + "role": "user", + "content": [ + { + "type": "text", + "text": { + "value": "How does AI work? Explain it in simple terms.", + "annotations": [] } - ], - "attachments": [], - "metadata": {} - } - ], - "first_id": "msg_abc123", - "last_id": "msg_abc456", - "has_more": false - } + } + ], + "attachments": [], + "metadata": {} + }, + { + "id": "msg_abc456", + "object": "thread.message", + "created_at": 1699016383, + "assistant_id": null, + "thread_id": "thread_abc123", + "run_id": null, + "role": "user", + "content": [ + { + "type": "text", + "text": { + "value": "Hello, what is AI?", + "annotations": [] + } + } + ], + "attachments": [], + "metadata": {} + } + ], + "first_id": "msg_abc123", + "last_id": "msg_abc456", + "has_more": false + } post: operationId: createMessage tags: @@ -3338,75 +3183,72 @@ paths: Custom-Host: [] x-code-samples: - name: Create message - group: threads - beta: true - returns: A [message](https://platform.openai.com/docs/api-reference/messages/object) object. - examples: - request: - curl: | - curl https://api.portkey.ai/v1/threads/thread_abc123/messages \ - -H "Content-Type: application/json" \ - -H "x-portkey-api-key: $PORTKEY_API_KEY" \ - -H "x-portkey-virtual-key: $PORTKEY_PROVIDER_VIRTUAL_KEY" \ - -H "OpenAI-Beta: assistants=v2" \ - -d '{ - "role": "user", - "content": "How does AI work? Explain it in simple terms." - }' - python: | - from portkey_ai import Portkey - - client = Portkey( - api_key = "PORTKEY_API_KEY", - virtual_key = "PROVIDER_VIRTUAL_KEY" - ) - - thread_message = client.beta.threads.messages.create( - "thread_abc123", - role="user", - content="How does AI work? Explain it in simple terms.", - ) - print(thread_message) - node.js: |- - import Portkey from 'portkey-ai'; - - const client = new Portkey({ - apiKey: 'PORTKEY_API_KEY', - virtualKey: 'PROVIDER_VIRTUAL_KEY' - }); - - async function main() { - const threadMessages = await client.beta.threads.messages.create( - "thread_abc123", - { role: "user", content: "How does AI work? Explain it in simple terms." } - ); - - console.log(threadMessages); - } - - main(); - response: | - { - "id": "msg_abc123", - "object": "thread.message", - "created_at": 1713226573, - "assistant_id": null, - "thread_id": "thread_abc123", - "run_id": null, + - lang: curl + source: | + curl https://api.portkey.ai/v1/threads/thread_abc123/messages \ + -H "Content-Type: application/json" \ + -H "x-portkey-api-key: $PORTKEY_API_KEY" \ + -H "x-portkey-virtual-key: $PORTKEY_PROVIDER_VIRTUAL_KEY" \ + -H "OpenAI-Beta: assistants=v2" \ + -d '{ "role": "user", - "content": [ - { - "type": "text", - "text": { - "value": "How does AI work? Explain it in simple terms.", - "annotations": [] - } + "content": "How does AI work? Explain it in simple terms." + }' + - lang: python + source: | + from portkey_ai import Portkey + + client = Portkey( + api_key = "PORTKEY_API_KEY", + virtual_key = "PROVIDER_VIRTUAL_KEY" + ) + + thread_message = client.beta.threads.messages.create( + "thread_abc123", + role="user", + content="How does AI work? Explain it in simple terms.", + ) + print(thread_message) + - lang: javascript + source: | + import Portkey from 'portkey-ai'; + + const client = new Portkey({ + apiKey: 'PORTKEY_API_KEY', + virtualKey: 'PROVIDER_VIRTUAL_KEY' + }); + + async function main() { + const threadMessages = await client.beta.threads.messages.create( + "thread_abc123", + { role: "user", content: "How does AI work? Explain it in simple terms." } + ); + + console.log(threadMessages); + } + + main(); + response: | + { + "id": "msg_abc123", + "object": "thread.message", + "created_at": 1713226573, + "assistant_id": null, + "thread_id": "thread_abc123", + "run_id": null, + "role": "user", + "content": [ + { + "type": "text", + "text": { + "value": "How does AI work? Explain it in simple terms.", + "annotations": [] } - ], - "attachments": [], - "metadata": {} - } + } + ], + "attachments": [], + "metadata": {} + } /threads/{thread_id}/messages/{message_id}: get: @@ -3449,70 +3291,67 @@ paths: Custom-Host: [] x-code-samples: - name: Retrieve message - group: threads - beta: true - returns: The [message](https://platform.openai.com/docs/api-reference/threads/messages/object) object matching the specified ID. - examples: - request: - curl: | - curl https://api.portkey.ai/v1/threads/thread_abc123/messages/msg_abc123 \ - -H "Content-Type: application/json" \ - -H "x-portkey-api-key: $PORTKEY_API_KEY" \ - -H "x-portkey-virtual-key: $PORTKEY_PROVIDER_VIRTUAL_KEY" \ - -H "OpenAI-Beta: assistants=v2" - python: | - from portkey_ai import Portkey - - client = Portkey( - api_key = "PORTKEY_API_KEY", - virtual_key = "PROVIDER_VIRTUAL_KEY" - ) - - message = client.beta.threads.messages.retrieve( - message_id="msg_abc123", - thread_id="thread_abc123", - ) - print(message) - node.js: |- - import Portkey from 'portkey-ai'; - - const client = new Portkey({ - apiKey: 'PORTKEY_API_KEY', - virtualKey: 'PROVIDER_VIRTUAL_KEY' - }); - - async function main() { - const message = await client.beta.threads.messages.retrieve( - "thread_abc123", - "msg_abc123" - ); - - console.log(message); - } + - lang: curl + source: | + curl https://api.portkey.ai/v1/threads/thread_abc123/messages/msg_abc123 \ + -H "Content-Type: application/json" \ + -H "x-portkey-api-key: $PORTKEY_API_KEY" \ + -H "x-portkey-virtual-key: $PORTKEY_PROVIDER_VIRTUAL_KEY" \ + -H "OpenAI-Beta: assistants=v2" + - lang: python + source: | + from portkey_ai import Portkey - main(); - response: | - { - "id": "msg_abc123", - "object": "thread.message", - "created_at": 1699017614, - "assistant_id": null, - "thread_id": "thread_abc123", - "run_id": null, - "role": "user", - "content": [ - { - "type": "text", - "text": { - "value": "How does AI work? Explain it in simple terms.", - "annotations": [] - } + client = Portkey( + api_key = "PORTKEY_API_KEY", + virtual_key = "PROVIDER_VIRTUAL_KEY" + ) + + message = client.beta.threads.messages.retrieve( + message_id="msg_abc123", + thread_id="thread_abc123", + ) + print(message) + - lang: javascript + source: | + import Portkey from 'portkey-ai'; + + const client = new Portkey({ + apiKey: 'PORTKEY_API_KEY', + virtualKey: 'PROVIDER_VIRTUAL_KEY' + }); + + async function main() { + const message = await client.beta.threads.messages.retrieve( + "thread_abc123", + "msg_abc123" + ); + + console.log(message); + } + + main(); + response: | + { + "id": "msg_abc123", + "object": "thread.message", + "created_at": 1699017614, + "assistant_id": null, + "thread_id": "thread_abc123", + "run_id": null, + "role": "user", + "content": [ + { + "type": "text", + "text": { + "value": "How does AI work? Explain it in simple terms.", + "annotations": [] } - ], - "attachments": [], - "metadata": {} - } + } + ], + "attachments": [], + "metadata": {} + } post: operationId: modifyMessage tags: @@ -3559,84 +3398,81 @@ paths: Custom-Host: [] x-code-samples: - name: Modify message - group: threads - beta: true - returns: The modified [message](https://platform.openai.com/docs/api-reference/threads/messages/object) object. - examples: - request: - curl: | - curl https://api.portkey.ai/v1/threads/thread_abc123/messages/msg_abc123 \ - -H "Content-Type: application/json" \ - -H "x-portkey-api-key: $PORTKEY_API_KEY" \ - -H "x-portkey-virtual-key: $PORTKEY_PROVIDER_VIRTUAL_KEY" \ - -H "OpenAI-Beta: assistants=v2" \ - -d '{ - "metadata": { - "modified": "true", - "user": "abc123" - } - }' - python: | - from portkey_ai import Portkey - - client = Portkey( - api_key = "PORTKEY_API_KEY", - virtual_key = "PROVIDER_VIRTUAL_KEY" - ) - - message = client.beta.threads.messages.update( - message_id="msg_abc12", - thread_id="thread_abc123", - metadata={ - "modified": "true", - "user": "abc123", - }, - ) - print(message) - node.js: |- - import Portkey from 'portkey-ai'; - - const client = new Portkey({ - apiKey: 'PORTKEY_API_KEY', - virtualKey: 'PROVIDER_VIRTUAL_KEY' - }); - - async function main() { - const message = await client.beta.threads.messages.update( - "thread_abc123", - "msg_abc123", - { - metadata: { - modified: "true", - user: "abc123", - }, - } - }' - response: | - { - "id": "msg_abc123", - "object": "thread.message", - "created_at": 1699017614, - "assistant_id": null, - "thread_id": "thread_abc123", - "run_id": null, - "role": "user", - "content": [ - { - "type": "text", - "text": { - "value": "How does AI work? Explain it in simple terms.", - "annotations": [] - } - } - ], - "file_ids": [], + - lang: curl + source: | + curl https://api.portkey.ai/v1/threads/thread_abc123/messages/msg_abc123 \ + -H "Content-Type: application/json" \ + -H "x-portkey-api-key: $PORTKEY_API_KEY" \ + -H "x-portkey-virtual-key: $PORTKEY_PROVIDER_VIRTUAL_KEY" \ + -H "OpenAI-Beta: assistants=v2" \ + -d '{ "metadata": { "modified": "true", "user": "abc123" } + }' + - lang: python + source: | + from portkey_ai import Portkey + + client = Portkey( + api_key = "PORTKEY_API_KEY", + virtual_key = "PROVIDER_VIRTUAL_KEY" + ) + + message = client.beta.threads.messages.update( + message_id="msg_abc12", + thread_id="thread_abc123", + metadata={ + "modified": "true", + "user": "abc123", + }, + ) + print(message) + - lang: javascript + source: | + import Portkey from 'portkey-ai'; + + const client = new Portkey({ + apiKey: 'PORTKEY_API_KEY', + virtualKey: 'PROVIDER_VIRTUAL_KEY' + }); + + async function main() { + const message = await client.beta.threads.messages.update( + "thread_abc123", + "msg_abc123", + { + metadata: { + modified: "true", + user: "abc123", + }, + } + }' + response: | + { + "id": "msg_abc123", + "object": "thread.message", + "created_at": 1699017614, + "assistant_id": null, + "thread_id": "thread_abc123", + "run_id": null, + "role": "user", + "content": [ + { + "type": "text", + "text": { + "value": "How does AI work? Explain it in simple terms.", + "annotations": [] + } + } + ], + "file_ids": [], + "metadata": { + "modified": "true", + "user": "abc123" } + } delete: operationId: deleteMessage tags: @@ -3677,53 +3513,50 @@ paths: Custom-Host: [] x-code-samples: - name: Delete message - group: threads - beta: true - returns: Deletion status - examples: - request: - curl: | - curl -X DELETE https://api.portkey.ai/v1/threads/thread_abc123/messages/msg_abc123 \ - -H "Content-Type: application/json" \ - -H "x-portkey-api-key: $PORTKEY_API_KEY" \ - -H "x-portkey-virtual-key: $PORTKEY_PROVIDER_VIRTUAL_KEY" \ - -H "OpenAI-Beta: assistants=v2" - python: | - from portkey_ai import Portkey - - client = Portkey( - api_key = "PORTKEY_API_KEY", - virtual_key = "PROVIDER_VIRTUAL_KEY" - ) - - deleted_message = client.beta.threads.messages.delete( - message_id="msg_abc12", - thread_id="thread_abc123", - ) - print(deleted_message) - node.js: |- - import Portkey from 'portkey-ai'; - - const client = new Portkey({ - apiKey: 'PORTKEY_API_KEY', - virtualKey: 'PROVIDER_VIRTUAL_KEY' - }); - - async function main() { - const deletedMessage = await client.beta.threads.messages.del( - "thread_abc123", - "msg_abc123" - ); - - console.log(deletedMessage); - } - response: | - { - "id": "msg_abc123", - "object": "thread.message.deleted", - "deleted": true - } + - lang: curl + source: | + curl -X DELETE https://api.portkey.ai/v1/threads/thread_abc123/messages/msg_abc123 \ + -H "Content-Type: application/json" \ + -H "x-portkey-api-key: $PORTKEY_API_KEY" \ + -H "x-portkey-virtual-key: $PORTKEY_PROVIDER_VIRTUAL_KEY" \ + -H "OpenAI-Beta: assistants=v2" + - lang: python + source: | + from portkey_ai import Portkey + + client = Portkey( + api_key = "PORTKEY_API_KEY", + virtual_key = "PROVIDER_VIRTUAL_KEY" + ) + + deleted_message = client.beta.threads.messages.delete( + message_id="msg_abc12", + thread_id="thread_abc123", + ) + print(deleted_message) + - lang: javascript + source: | + import Portkey from 'portkey-ai'; + + const client = new Portkey({ + apiKey: 'PORTKEY_API_KEY', + virtualKey: 'PROVIDER_VIRTUAL_KEY' + }); + + async function main() { + const deletedMessage = await client.beta.threads.messages.del( + "thread_abc123", + "msg_abc123" + ); + + console.log(deletedMessage); + } + response: | + { + "id": "msg_abc123", + "object": "thread.message.deleted", + "deleted": true + } /threads/runs: post: @@ -3759,382 +3592,97 @@ paths: Custom-Host: [] x-code-samples: - name: Create thread and run - group: threads - beta: true - returns: A [run](https://platform.openai.com/docs/api-reference/runs/object) object. - examples: - - title: Default - request: - curl: | - curl https://api.portkey.ai/v1/threads/runs \ - -H "x-portkey-api-key: $PORTKEY_API_KEY" \ - -H "x-portkey-virtual-key: $PORTKEY_PROVIDER_VIRTUAL_KEY" \ - -H "Content-Type: application/json" \ - -H "OpenAI-Beta: assistants=v2" \ - -d '{ - "assistant_id": "asst_abc123", - "thread": { - "messages": [ - {"role": "user", "content": "Explain deep learning to a 5 year old."} - ] - } - }' - python: | - from portkey_ai import Portkey - - client = Portkey( - api_key = "PORTKEY_API_KEY", - virtual_key = "PROVIDER_VIRTUAL_KEY" - ) - - run = client.beta.threads.create_and_run( - assistant_id="asst_abc123", - thread={ - "messages": [ - {"role": "user", "content": "Explain deep learning to a 5 year old."} - ] - } - ) - - print(run) - node.js: | - import Portkey from 'portkey-ai'; - - const client = new Portkey({ - apiKey: 'PORTKEY_API_KEY', - virtualKey: 'PROVIDER_VIRTUAL_KEY' - }); - - async function main() { - const run = await client.beta.threads.createAndRun({ - assistant_id: "asst_abc123", - thread: { - messages: [ - { role: "user", content: "Explain deep learning to a 5 year old." }, - ], - }, - }); + - lang: curl + source: | + curl https://api.portkey.ai/v1/threads/runs \ + -H "x-portkey-api-key: $PORTKEY_API_KEY" \ + -H "x-portkey-virtual-key: $PORTKEY_PROVIDER_VIRTUAL_KEY" \ + -H "Content-Type: application/json" \ + -H "OpenAI-Beta: assistants=v2" \ + -d '{ + "assistant_id": "asst_abc123", + "thread": { + "messages": [ + {"role": "user", "content": "Explain deep learning to a 5 year old."} + ] + } + }' + - lang: python + source: | + from portkey_ai import Portkey - console.log(run); - } + client = Portkey( + api_key = "PORTKEY_API_KEY", + virtual_key = "PROVIDER_VIRTUAL_KEY" + ) - main(); - response: | - { - "id": "run_abc123", - "object": "thread.run", - "created_at": 1699076792, - "assistant_id": "asst_abc123", - "thread_id": "thread_abc123", - "status": "queued", - "started_at": null, - "expires_at": 1699077392, - "cancelled_at": null, - "failed_at": null, - "completed_at": null, - "required_action": null, - "last_error": null, - "model": "gpt-4-turbo", - "instructions": "You are a helpful assistant.", - "tools": [], - "tool_resources": {}, - "metadata": {}, - "temperature": 1.0, - "top_p": 1.0, - "max_completion_tokens": null, - "max_prompt_tokens": null, - "truncation_strategy": { - "type": "auto", - "last_messages": null - }, - "incomplete_details": null, - "usage": null, - "response_format": "auto", - "tool_choice": "auto", - "parallel_tool_calls": true - } - - - title: Streaming - request: - curl: | - curl https://api.portkey.ai/v1/threads/runs \ - -H "x-portkey-api-key: $PORTKEY_API_KEY" \ - -H "x-portkey-virtual-key: $PORTKEY_PROVIDER_VIRTUAL_KEY" \ - -H "Content-Type: application/json" \ - -H "OpenAI-Beta: assistants=v2" \ - -d '{ - "assistant_id": "asst_123", - "thread": { - "messages": [ - {"role": "user", "content": "Hello"} - ] - }, - "stream": true - }' - python: | - from portkey_ai import Portkey - - client = Portkey( - api_key = "PORTKEY_API_KEY", - virtual_key = "PROVIDER_VIRTUAL_KEY" - ) - - stream = client.beta.threads.create_and_run( - assistant_id="asst_123", - thread={ - "messages": [ - {"role": "user", "content": "Hello"} - ] - }, - stream=True - ) - - for event in stream: - print(event) - node.js: | - import Portkey from 'portkey-ai'; - - const client = new Portkey({ - apiKey: 'PORTKEY_API_KEY', - virtualKey: 'PROVIDER_VIRTUAL_KEY' - }); - - async function main() { - const stream = await client.beta.threads.createAndRun({ - assistant_id: "asst_123", - thread: { - messages: [ - { role: "user", content: "Hello" }, - ], - }, - stream: true - }); - - for await (const event of stream) { - console.log(event); - } - } - - main(); - response: | - event: thread.created - data: {"id":"thread_123","object":"thread","created_at":1710348075,"metadata":{}} - - event: thread.run.created - data: {"id":"run_123","object":"thread.run","created_at":1710348075,"assistant_id":"asst_123","thread_id":"thread_123","status":"queued","started_at":null,"expires_at":1710348675,"cancelled_at":null,"failed_at":null,"completed_at":null,"required_action":null,"last_error":null,"model":"gpt-4-turbo","instructions":null,"tools":[],"tool_resources":{},"metadata":{},"temperature":1.0,"top_p":1.0,"max_completion_tokens":null,"max_prompt_tokens":null,"truncation_strategy":{"type":"auto","last_messages":null},"incomplete_details":null,"usage":null,"response_format":"auto","tool_choice":"auto","parallel_tool_calls":true} - - event: thread.run.queued - data: {"id":"run_123","object":"thread.run","created_at":1710348075,"assistant_id":"asst_123","thread_id":"thread_123","status":"queued","started_at":null,"expires_at":1710348675,"cancelled_at":null,"failed_at":null,"completed_at":null,"required_action":null,"last_error":null,"model":"gpt-4-turbo","instructions":null,"tools":[],"tool_resources":{},"metadata":{},"temperature":1.0,"top_p":1.0,"max_completion_tokens":null,"max_prompt_tokens":null,"truncation_strategy":{"type":"auto","last_messages":null},"incomplete_details":null,"usage":null,"response_format":"auto","tool_choice":"auto","parallel_tool_calls":true} - - event: thread.run.in_progress - data: {"id":"run_123","object":"thread.run","created_at":1710348075,"assistant_id":"asst_123","thread_id":"thread_123","status":"in_progress","started_at":null,"expires_at":1710348675,"cancelled_at":null,"failed_at":null,"completed_at":null,"required_action":null,"last_error":null,"model":"gpt-4-turbo","instructions":null,"tools":[],"tool_resources":{},"metadata":{},"temperature":1.0,"top_p":1.0,"max_completion_tokens":null,"max_prompt_tokens":null,"truncation_strategy":{"type":"auto","last_messages":null},"incomplete_details":null,"usage":null,"response_format":"auto","tool_choice":"auto","parallel_tool_calls":true} - - event: thread.run.step.created - data: {"id":"step_001","object":"thread.run.step","created_at":1710348076,"run_id":"run_123","assistant_id":"asst_123","thread_id":"thread_123","type":"message_creation","status":"in_progress","cancelled_at":null,"completed_at":null,"expires_at":1710348675,"failed_at":null,"last_error":null,"step_details":{"type":"message_creation","message_creation":{"message_id":"msg_001"}},"usage":null} - - event: thread.run.step.in_progress - data: {"id":"step_001","object":"thread.run.step","created_at":1710348076,"run_id":"run_123","assistant_id":"asst_123","thread_id":"thread_123","type":"message_creation","status":"in_progress","cancelled_at":null,"completed_at":null,"expires_at":1710348675,"failed_at":null,"last_error":null,"step_details":{"type":"message_creation","message_creation":{"message_id":"msg_001"}},"usage":null} - - event: thread.message.created - data: {"id":"msg_001","object":"thread.message","created_at":1710348076,"assistant_id":"asst_123","thread_id":"thread_123","run_id":"run_123","status":"in_progress","incomplete_details":null,"incomplete_at":null,"completed_at":null,"role":"assistant","content":[], "metadata":{}} - - event: thread.message.in_progress - data: {"id":"msg_001","object":"thread.message","created_at":1710348076,"assistant_id":"asst_123","thread_id":"thread_123","run_id":"run_123","status":"in_progress","incomplete_details":null,"incomplete_at":null,"completed_at":null,"role":"assistant","content":[], "metadata":{}} - - event: thread.message.delta - data: {"id":"msg_001","object":"thread.message.delta","delta":{"content":[{"index":0,"type":"text","text":{"value":"Hello","annotations":[]}}]}} - - ... - - event: thread.message.delta - data: {"id":"msg_001","object":"thread.message.delta","delta":{"content":[{"index":0,"type":"text","text":{"value":" today"}}]}} - - event: thread.message.delta - data: {"id":"msg_001","object":"thread.message.delta","delta":{"content":[{"index":0,"type":"text","text":{"value":"?"}}]}} - - event: thread.message.completed - data: {"id":"msg_001","object":"thread.message","created_at":1710348076,"assistant_id":"asst_123","thread_id":"thread_123","run_id":"run_123","status":"completed","incomplete_details":null,"incomplete_at":null,"completed_at":1710348077,"role":"assistant","content":[{"type":"text","text":{"value":"Hello! How can I assist you today?","annotations":[]}}], "metadata":{}} - - event: thread.run.step.completed - data: {"id":"step_001","object":"thread.run.step","created_at":1710348076,"run_id":"run_123","assistant_id":"asst_123","thread_id":"thread_123","type":"message_creation","status":"completed","cancelled_at":null,"completed_at":1710348077,"expires_at":1710348675,"failed_at":null,"last_error":null,"step_details":{"type":"message_creation","message_creation":{"message_id":"msg_001"}},"usage":{"prompt_tokens":20,"completion_tokens":11,"total_tokens":31}} - - event: thread.run.completed - {"id":"run_123","object":"thread.run","created_at":1710348076,"assistant_id":"asst_123","thread_id":"thread_123","status":"completed","started_at":1713226836,"expires_at":null,"cancelled_at":null,"failed_at":null,"completed_at":1713226837,"required_action":null,"last_error":null,"model":"gpt-4-turbo","instructions":null,"tools":[],"metadata":{},"temperature":1.0,"top_p":1.0,"max_completion_tokens":null,"max_prompt_tokens":null,"truncation_strategy":{"type":"auto","last_messages":null},"incomplete_details":null,"usage":{"prompt_tokens":345,"completion_tokens":11,"total_tokens":356},"response_format":"auto","tool_choice":"auto","parallel_tool_calls":true} - - event: done - data: [DONE] - - - title: Streaming with Functions - request: - curl: | - curl https://api.portkey.ai/v1/threads/runs \ - -H "x-portkey-api-key: $PORTKEY_API_KEY" \ - -H "x-portkey-virtual-key: $PORTKEY_PROVIDER_VIRTUAL_KEY" \ - -H "Content-Type: application/json" \ - -H "OpenAI-Beta: assistants=v2" \ - -d '{ - "assistant_id": "asst_abc123", - "thread": { - "messages": [ - {"role": "user", "content": "What is the weather like in San Francisco?"} - ] - }, - "tools": [ - { - "type": "function", - "function": { - "name": "get_current_weather", - "description": "Get the current weather in a given location", - "parameters": { - "type": "object", - "properties": { - "location": { - "type": "string", - "description": "The city and state, e.g. San Francisco, CA" - }, - "unit": { - "type": "string", - "enum": ["celsius", "fahrenheit"] - } - }, - "required": ["location"] - } - } - } - ], - "stream": true - }' - python: | - from portkey_ai import Portkey - - client = Portkey( - api_key = "PORTKEY_API_KEY", - virtual_key = "PROVIDER_VIRTUAL_KEY" - ) - - tools = [ - { - "type": "function", - "function": { - "name": "get_current_weather", - "description": "Get the current weather in a given location", - "parameters": { - "type": "object", - "properties": { - "location": { - "type": "string", - "description": "The city and state, e.g. San Francisco, CA", - }, - "unit": {"type": "string", "enum": ["celsius", "fahrenheit"]}, - }, - "required": ["location"], - }, - } - } - ] - - stream = client.beta.threads.create_and_run( - thread={ - "messages": [ - {"role": "user", "content": "What is the weather like in San Francisco?"} - ] - }, - assistant_id="asst_abc123", - tools=tools, - stream=True - ) - - for event in stream: - print(event) - node.js: | - import Portkey from 'portkey-ai'; - - const client = new Portkey({ - apiKey: 'PORTKEY_API_KEY', - virtualKey: 'PROVIDER_VIRTUAL_KEY' - }); - - const tools = [ - { - "type": "function", - "function": { - "name": "get_current_weather", - "description": "Get the current weather in a given location", - "parameters": { - "type": "object", - "properties": { - "location": { - "type": "string", - "description": "The city and state, e.g. San Francisco, CA", - }, - "unit": {"type": "string", "enum": ["celsius", "fahrenheit"]}, - }, - "required": ["location"], - }, - } - } - ]; - - async function main() { - const stream = await client.beta.threads.createAndRun({ - assistant_id: "asst_123", - thread: { - messages: [ - { role: "user", content: "What is the weather like in San Francisco?" }, - ], - }, - tools: tools, - stream: true - }); - - for await (const event of stream) { - console.log(event); - } - } - - main(); - response: | - event: thread.created - data: {"id":"thread_123","object":"thread","created_at":1710351818,"metadata":{}} - - event: thread.run.created - data: {"id":"run_123","object":"thread.run","created_at":1710351818,"assistant_id":"asst_123","thread_id":"thread_123","status":"queued","started_at":null,"expires_at":1710352418,"cancelled_at":null,"failed_at":null,"completed_at":null,"required_action":null,"last_error":null,"model":"gpt-4-turbo","instructions":null,"tools":[{"type":"function","function":{"name":"get_current_weather","description":"Get the current weather in a given location","parameters":{"type":"object","properties":{"location":{"type":"string","description":"The city and state, e.g. San Francisco, CA"},"unit":{"type":"string","enum":["celsius","fahrenheit"]}},"required":["location"]}}}],"metadata":{},"temperature":1.0,"top_p":1.0,"max_completion_tokens":null,"max_prompt_tokens":null,"truncation_strategy":{"type":"auto","last_messages":null},"incomplete_details":null,"usage":null,"response_format":"auto","tool_choice":"auto","parallel_tool_calls":true}} - - event: thread.run.queued - data: {"id":"run_123","object":"thread.run","created_at":1710351818,"assistant_id":"asst_123","thread_id":"thread_123","status":"queued","started_at":null,"expires_at":1710352418,"cancelled_at":null,"failed_at":null,"completed_at":null,"required_action":null,"last_error":null,"model":"gpt-4-turbo","instructions":null,"tools":[{"type":"function","function":{"name":"get_current_weather","description":"Get the current weather in a given location","parameters":{"type":"object","properties":{"location":{"type":"string","description":"The city and state, e.g. San Francisco, CA"},"unit":{"type":"string","enum":["celsius","fahrenheit"]}},"required":["location"]}}}],"metadata":{},"temperature":1.0,"top_p":1.0,"max_completion_tokens":null,"max_prompt_tokens":null,"truncation_strategy":{"type":"auto","last_messages":null},"incomplete_details":null,"usage":null,"response_format":"auto","tool_choice":"auto","parallel_tool_calls":true}} - - event: thread.run.in_progress - data: {"id":"run_123","object":"thread.run","created_at":1710351818,"assistant_id":"asst_123","thread_id":"thread_123","status":"in_progress","started_at":1710351818,"expires_at":1710352418,"cancelled_at":null,"failed_at":null,"completed_at":null,"required_action":null,"last_error":null,"model":"gpt-4-turbo","instructions":null,"tools":[{"type":"function","function":{"name":"get_current_weather","description":"Get the current weather in a given location","parameters":{"type":"object","properties":{"location":{"type":"string","description":"The city and state, e.g. San Francisco, CA"},"unit":{"type":"string","enum":["celsius","fahrenheit"]}},"required":["location"]}}}],"metadata":{},"temperature":1.0,"top_p":1.0,"max_completion_tokens":null,"max_prompt_tokens":null,"truncation_strategy":{"type":"auto","last_messages":null},"incomplete_details":null,"usage":null,"response_format":"auto","tool_choice":"auto","parallel_tool_calls":true}} - - event: thread.run.step.created - data: {"id":"step_001","object":"thread.run.step","created_at":1710351819,"run_id":"run_123","assistant_id":"asst_123","thread_id":"thread_123","type":"tool_calls","status":"in_progress","cancelled_at":null,"completed_at":null,"expires_at":1710352418,"failed_at":null,"last_error":null,"step_details":{"type":"tool_calls","tool_calls":[]},"usage":null} - - event: thread.run.step.in_progress - data: {"id":"step_001","object":"thread.run.step","created_at":1710351819,"run_id":"run_123","assistant_id":"asst_123","thread_id":"thread_123","type":"tool_calls","status":"in_progress","cancelled_at":null,"completed_at":null,"expires_at":1710352418,"failed_at":null,"last_error":null,"step_details":{"type":"tool_calls","tool_calls":[]},"usage":null} - - event: thread.run.step.delta - data: {"id":"step_001","object":"thread.run.step.delta","delta":{"step_details":{"type":"tool_calls","tool_calls":[{"index":0,"id":"call_XXNp8YGaFrjrSjgqxtC8JJ1B","type":"function","function":{"name":"get_current_weather","arguments":"","output":null}}]}}} - - event: thread.run.step.delta - data: {"id":"step_001","object":"thread.run.step.delta","delta":{"step_details":{"type":"tool_calls","tool_calls":[{"index":0,"type":"function","function":{"arguments":"{\""}}]}}} - - event: thread.run.step.delta - data: {"id":"step_001","object":"thread.run.step.delta","delta":{"step_details":{"type":"tool_calls","tool_calls":[{"index":0,"type":"function","function":{"arguments":"location"}}]}}} + run = client.beta.threads.create_and_run( + assistant_id="asst_abc123", + thread={ + "messages": [ + {"role": "user", "content": "Explain deep learning to a 5 year old."} + ] + } + ) - ... + print(run) + - lang: javascript + source: | + import Portkey from 'portkey-ai'; - event: thread.run.step.delta - data: {"id":"step_001","object":"thread.run.step.delta","delta":{"step_details":{"type":"tool_calls","tool_calls":[{"index":0,"type":"function","function":{"arguments":"ahrenheit"}}]}}} + const client = new Portkey({ + apiKey: 'PORTKEY_API_KEY', + virtualKey: 'PROVIDER_VIRTUAL_KEY' + }); - event: thread.run.step.delta - data: {"id":"step_001","object":"thread.run.step.delta","delta":{"step_details":{"type":"tool_calls","tool_calls":[{"index":0,"type":"function","function":{"arguments":"\"}"}}]}}} + async function main() { + const run = await client.beta.threads.createAndRun({ + assistant_id: "asst_abc123", + thread: { + messages: [ + { role: "user", content: "Explain deep learning to a 5 year old." }, + ], + }, + }); - event: thread.run.requires_action - data: {"id":"run_123","object":"thread.run","created_at":1710351818,"assistant_id":"asst_123","thread_id":"thread_123","status":"requires_action","started_at":1710351818,"expires_at":1710352418,"cancelled_at":null,"failed_at":null,"completed_at":null,"required_action":{"type":"submit_tool_outputs","submit_tool_outputs":{"tool_calls":[{"id":"call_XXNp8YGaFrjrSjgqxtC8JJ1B","type":"function","function":{"name":"get_current_weather","arguments":"{\"location\":\"San Francisco, CA\",\"unit\":\"fahrenheit\"}"}}]}},"last_error":null,"model":"gpt-4-turbo","instructions":null,"tools":[{"type":"function","function":{"name":"get_current_weather","description":"Get the current weather in a given location","parameters":{"type":"object","properties":{"location":{"type":"string","description":"The city and state, e.g. San Francisco, CA"},"unit":{"type":"string","enum":["celsius","fahrenheit"]}},"required":["location"]}}}],"metadata":{},"temperature":1.0,"top_p":1.0,"max_completion_tokens":null,"max_prompt_tokens":null,"truncation_strategy":{"type":"auto","last_messages":null},"incomplete_details":null,"usage":{"prompt_tokens":345,"completion_tokens":11,"total_tokens":356},"response_format":"auto","tool_choice":"auto","parallel_tool_calls":true}} + console.log(run); + } - event: done - data: [DONE] + main(); + response: | + { + "id": "run_abc123", + "object": "thread.run", + "created_at": 1699076792, + "assistant_id": "asst_abc123", + "thread_id": "thread_abc123", + "status": "queued", + "started_at": null, + "expires_at": 1699077392, + "cancelled_at": null, + "failed_at": null, + "completed_at": null, + "required_action": null, + "last_error": null, + "model": "gpt-4-turbo", + "instructions": "You are a helpful assistant.", + "tools": [], + "tool_resources": {}, + "metadata": {}, + "temperature": 1.0, + "top_p": 1.0, + "max_completion_tokens": null, + "max_prompt_tokens": null, + "truncation_strategy": { + "type": "auto", + "last_messages": null + }, + "incomplete_details": null, + "usage": null, + "response_format": "auto", + "tool_choice": "auto", + "parallel_tool_calls": true + } /threads/{thread_id}/runs: get: @@ -4195,151 +3743,148 @@ paths: Custom-Host: [] x-code-samples: - name: List runs - group: threads - beta: true - returns: A list of [run](https://platform.openai.com/docs/api-reference/runs/object) objects. - examples: - request: - curl: | - curl https://api.portkey.ai/v1/threads/thread_abc123/runs \ - -H "x-portkey-api-key: $PORTKEY_API_KEY" \ - -H "x-portkey-virtual-key: $PORTKEY_PROVIDER_VIRTUAL_KEY" \ - -H "Content-Type: application/json" \ - -H "OpenAI-Beta: assistants=v2" - python: | - from portkey_ai import Portkey + - lang: curl + source: | + curl https://api.portkey.ai/v1/threads/thread_abc123/runs \ + -H "x-portkey-api-key: $PORTKEY_API_KEY" \ + -H "x-portkey-virtual-key: $PORTKEY_PROVIDER_VIRTUAL_KEY" \ + -H "Content-Type: application/json" \ + -H "OpenAI-Beta: assistants=v2" + - lang: python + source: | + from portkey_ai import Portkey - client = Portkey( - api_key = "PORTKEY_API_KEY", - virtual_key = "PROVIDER_VIRTUAL_KEY" - ) + client = Portkey( + api_key = "PORTKEY_API_KEY", + virtual_key = "PROVIDER_VIRTUAL_KEY" + ) - runs = client.beta.threads.runs.list( - "thread_abc123" - ) + runs = client.beta.threads.runs.list( + "thread_abc123" + ) - print(runs) - node.js: | - import Portkey from 'portkey-ai'; + print(runs) + - lang: javascript + source: | + import Portkey from 'portkey-ai'; - const client = new Portkey({ - apiKey: 'PORTKEY_API_KEY', - virtualKey: 'PROVIDER_VIRTUAL_KEY' - }); + const client = new Portkey({ + apiKey: 'PORTKEY_API_KEY', + virtualKey: 'PROVIDER_VIRTUAL_KEY' + }); - async function main() { - const runs = await client.beta.threads.runs.list( - "thread_abc123" - ); + async function main() { + const runs = await client.beta.threads.runs.list( + "thread_abc123" + ); - console.log(runs); - } + console.log(runs); + } - main(); - response: | + main(); + response: | + { + "object": "list", + "data": [ { - "object": "list", - "data": [ + "id": "run_abc123", + "object": "thread.run", + "created_at": 1699075072, + "assistant_id": "asst_abc123", + "thread_id": "thread_abc123", + "status": "completed", + "started_at": 1699075072, + "expires_at": null, + "cancelled_at": null, + "failed_at": null, + "completed_at": 1699075073, + "last_error": null, + "model": "gpt-4-turbo", + "instructions": null, + "incomplete_details": null, + "tools": [ { - "id": "run_abc123", - "object": "thread.run", - "created_at": 1699075072, - "assistant_id": "asst_abc123", - "thread_id": "thread_abc123", - "status": "completed", - "started_at": 1699075072, - "expires_at": null, - "cancelled_at": null, - "failed_at": null, - "completed_at": 1699075073, - "last_error": null, - "model": "gpt-4-turbo", - "instructions": null, - "incomplete_details": null, - "tools": [ - { - "type": "code_interpreter" - } - ], - "tool_resources": { - "code_interpreter": { - "file_ids": [ - "file-abc123", - "file-abc456" - ] - } - }, - "metadata": {}, - "usage": { - "prompt_tokens": 123, - "completion_tokens": 456, - "total_tokens": 579 - }, - "temperature": 1.0, - "top_p": 1.0, - "max_prompt_tokens": 1000, - "max_completion_tokens": 1000, - "truncation_strategy": { - "type": "auto", - "last_messages": null - }, - "response_format": "auto", - "tool_choice": "auto", - "parallel_tool_calls": true - }, + "type": "code_interpreter" + } + ], + "tool_resources": { + "code_interpreter": { + "file_ids": [ + "file-abc123", + "file-abc456" + ] + } + }, + "metadata": {}, + "usage": { + "prompt_tokens": 123, + "completion_tokens": 456, + "total_tokens": 579 + }, + "temperature": 1.0, + "top_p": 1.0, + "max_prompt_tokens": 1000, + "max_completion_tokens": 1000, + "truncation_strategy": { + "type": "auto", + "last_messages": null + }, + "response_format": "auto", + "tool_choice": "auto", + "parallel_tool_calls": true + }, + { + "id": "run_abc456", + "object": "thread.run", + "created_at": 1699063290, + "assistant_id": "asst_abc123", + "thread_id": "thread_abc123", + "status": "completed", + "started_at": 1699063290, + "expires_at": null, + "cancelled_at": null, + "failed_at": null, + "completed_at": 1699063291, + "last_error": null, + "model": "gpt-4-turbo", + "instructions": null, + "incomplete_details": null, + "tools": [ { - "id": "run_abc456", - "object": "thread.run", - "created_at": 1699063290, - "assistant_id": "asst_abc123", - "thread_id": "thread_abc123", - "status": "completed", - "started_at": 1699063290, - "expires_at": null, - "cancelled_at": null, - "failed_at": null, - "completed_at": 1699063291, - "last_error": null, - "model": "gpt-4-turbo", - "instructions": null, - "incomplete_details": null, - "tools": [ - { - "type": "code_interpreter" - } - ], - "tool_resources": { - "code_interpreter": { - "file_ids": [ - "file-abc123", - "file-abc456" - ] - } - }, - "metadata": {}, - "usage": { - "prompt_tokens": 123, - "completion_tokens": 456, - "total_tokens": 579 - }, - "temperature": 1.0, - "top_p": 1.0, - "max_prompt_tokens": 1000, - "max_completion_tokens": 1000, - "truncation_strategy": { - "type": "auto", - "last_messages": null - }, - "response_format": "auto", - "tool_choice": "auto", - "parallel_tool_calls": true + "type": "code_interpreter" } ], - "first_id": "run_abc123", - "last_id": "run_abc456", - "has_more": false + "tool_resources": { + "code_interpreter": { + "file_ids": [ + "file-abc123", + "file-abc456" + ] + } + }, + "metadata": {}, + "usage": { + "prompt_tokens": 123, + "completion_tokens": 456, + "total_tokens": 579 + }, + "temperature": 1.0, + "top_p": 1.0, + "max_prompt_tokens": 1000, + "max_completion_tokens": 1000, + "truncation_strategy": { + "type": "auto", + "last_messages": null + }, + "response_format": "auto", + "tool_choice": "auto", + "parallel_tool_calls": true } + ], + "first_id": "run_abc123", + "last_id": "run_abc456", + "has_more": false + } post: operationId: createRun tags: @@ -4380,345 +3925,86 @@ paths: schema: $ref: "#/components/schemas/RunObject" x-code-samples: - name: Create run - group: threads - beta: true - returns: A [run](https://platform.openai.com/docs/api-reference/runs/object) object. - examples: - - title: Default - request: - curl: | - curl https://api.portkey.ai/v1/threads/thread_abc123/runs \ - -H "x-portkey-api-key: $PORTKEY_API_KEY" \ - -H "x-portkey-virtual-key: $PORTKEY_PROVIDER_VIRTUAL_KEY" \ - -H "Content-Type: application/json" \ - -H "OpenAI-Beta: assistants=v2" \ - -d '{ - "assistant_id": "asst_abc123" - }' - python: | - from portkey_ai import Portkey - - client = Portkey( - api_key = "PORTKEY_API_KEY", - virtual_key = "PROVIDER_VIRTUAL_KEY" - ) - - run = client.beta.threads.runs.create( - thread_id="thread_abc123", - assistant_id="asst_abc123" - ) - - print(run) - node.js: | - import Portkey from 'portkey-ai'; - - const client = new Portkey({ - apiKey: 'PORTKEY_API_KEY', - virtualKey: 'PROVIDER_VIRTUAL_KEY' - }); - - async function main() { - const run = await client.beta.threads.runs.create( - "thread_abc123", - { assistant_id: "asst_abc123" } - ); - - console.log(run); - } - - main(); - response: &run_object_example | - { - "id": "run_abc123", - "object": "thread.run", - "created_at": 1699063290, - "assistant_id": "asst_abc123", - "thread_id": "thread_abc123", - "status": "queued", - "started_at": 1699063290, - "expires_at": null, - "cancelled_at": null, - "failed_at": null, - "completed_at": 1699063291, - "last_error": null, - "model": "gpt-4-turbo", - "instructions": null, - "incomplete_details": null, - "tools": [ - { - "type": "code_interpreter" - } - ], - "metadata": {}, - "usage": null, - "temperature": 1.0, - "top_p": 1.0, - "max_prompt_tokens": 1000, - "max_completion_tokens": 1000, - "truncation_strategy": { - "type": "auto", - "last_messages": null - }, - "response_format": "auto", - "tool_choice": "auto", - "parallel_tool_calls": true - } - - title: Streaming - request: - curl: | - curl https://api.portkey.ai/v1/threads/thread_123/runs \ - -H "x-portkey-api-key: $PORTKEY_API_KEY" \ - -H "x-portkey-virtual-key: $PORTKEY_PROVIDER_VIRTUAL_KEY" \ - -H "Content-Type: application/json" \ - -H "OpenAI-Beta: assistants=v2" \ - -d '{ - "assistant_id": "asst_123", - "stream": true - }' - python: | - from portkey_ai import Portkey - - client = Portkey( - api_key = "PORTKEY_API_KEY", - virtual_key = "PROVIDER_VIRTUAL_KEY" - ) - - stream = client.beta.threads.runs.create( - thread_id="thread_123", - assistant_id="asst_123", - stream=True - ) - - for event in stream: - print(event) - node.js: | - import Portkey from 'portkey-ai'; - - const client = new Portkey({ - apiKey: 'PORTKEY_API_KEY', - virtualKey: 'PROVIDER_VIRTUAL_KEY' - }); - - async function main() { - const stream = await client.beta.threads.runs.create( - "thread_123", - { assistant_id: "asst_123", stream: true } - ); - - for await (const event of stream) { - console.log(event); - } - } - - main(); - response: | - event: thread.run.created - data: {"id":"run_123","object":"thread.run","created_at":1710330640,"assistant_id":"asst_123","thread_id":"thread_123","status":"queued","started_at":null,"expires_at":1710331240,"cancelled_at":null,"failed_at":null,"completed_at":null,"required_action":null,"last_error":null,"model":"gpt-4-turbo","instructions":null,"tools":[],"metadata":{},"temperature":1.0,"top_p":1.0,"max_completion_tokens":null,"max_prompt_tokens":null,"truncation_strategy":{"type":"auto","last_messages":null},"incomplete_details":null,"usage":null,"response_format":"auto","tool_choice":"auto","parallel_tool_calls":true}} + - lang: curl + source: | + curl https://api.portkey.ai/v1/threads/thread_abc123/runs \ + -H "x-portkey-api-key: $PORTKEY_API_KEY" \ + -H "x-portkey-virtual-key: $PORTKEY_PROVIDER_VIRTUAL_KEY" \ + -H "Content-Type: application/json" \ + -H "OpenAI-Beta: assistants=v2" \ + -d '{ + "assistant_id": "asst_abc123" + }' + - lang: python + source: | + from portkey_ai import Portkey - event: thread.run.queued - data: {"id":"run_123","object":"thread.run","created_at":1710330640,"assistant_id":"asst_123","thread_id":"thread_123","status":"queued","started_at":null,"expires_at":1710331240,"cancelled_at":null,"failed_at":null,"completed_at":null,"required_action":null,"last_error":null,"model":"gpt-4-turbo","instructions":null,"tools":[],"metadata":{},"temperature":1.0,"top_p":1.0,"max_completion_tokens":null,"max_prompt_tokens":null,"truncation_strategy":{"type":"auto","last_messages":null},"incomplete_details":null,"usage":null,"response_format":"auto","tool_choice":"auto","parallel_tool_calls":true}} + client = Portkey( + api_key = "PORTKEY_API_KEY", + virtual_key = "PROVIDER_VIRTUAL_KEY" + ) - event: thread.run.in_progress - data: {"id":"run_123","object":"thread.run","created_at":1710330640,"assistant_id":"asst_123","thread_id":"thread_123","status":"in_progress","started_at":1710330641,"expires_at":1710331240,"cancelled_at":null,"failed_at":null,"completed_at":null,"required_action":null,"last_error":null,"model":"gpt-4-turbo","instructions":null,"tools":[],"metadata":{},"temperature":1.0,"top_p":1.0,"max_completion_tokens":null,"max_prompt_tokens":null,"truncation_strategy":{"type":"auto","last_messages":null},"incomplete_details":null,"usage":null,"response_format":"auto","tool_choice":"auto","parallel_tool_calls":true}} + run = client.beta.threads.runs.create( + thread_id="thread_abc123", + assistant_id="asst_abc123" + ) - event: thread.run.step.created - data: {"id":"step_001","object":"thread.run.step","created_at":1710330641,"run_id":"run_123","assistant_id":"asst_123","thread_id":"thread_123","type":"message_creation","status":"in_progress","cancelled_at":null,"completed_at":null,"expires_at":1710331240,"failed_at":null,"last_error":null,"step_details":{"type":"message_creation","message_creation":{"message_id":"msg_001"}},"usage":null} + print(run) + - lang: javascript + source: | + import Portkey from 'portkey-ai'; - event: thread.run.step.in_progress - data: {"id":"step_001","object":"thread.run.step","created_at":1710330641,"run_id":"run_123","assistant_id":"asst_123","thread_id":"thread_123","type":"message_creation","status":"in_progress","cancelled_at":null,"completed_at":null,"expires_at":1710331240,"failed_at":null,"last_error":null,"step_details":{"type":"message_creation","message_creation":{"message_id":"msg_001"}},"usage":null} + const client = new Portkey({ + apiKey: 'PORTKEY_API_KEY', + virtualKey: 'PROVIDER_VIRTUAL_KEY' + }); - event: thread.message.created - data: {"id":"msg_001","object":"thread.message","created_at":1710330641,"assistant_id":"asst_123","thread_id":"thread_123","run_id":"run_123","status":"in_progress","incomplete_details":null,"incomplete_at":null,"completed_at":null,"role":"assistant","content":[],"metadata":{}} + async function main() { + const run = await client.beta.threads.runs.create( + "thread_abc123", + { assistant_id: "asst_abc123" } + ); - event: thread.message.in_progress - data: {"id":"msg_001","object":"thread.message","created_at":1710330641,"assistant_id":"asst_123","thread_id":"thread_123","run_id":"run_123","status":"in_progress","incomplete_details":null,"incomplete_at":null,"completed_at":null,"role":"assistant","content":[],"metadata":{}} + console.log(run); + } - event: thread.message.delta - data: {"id":"msg_001","object":"thread.message.delta","delta":{"content":[{"index":0,"type":"text","text":{"value":"Hello","annotations":[]}}]}} - - ... - - event: thread.message.delta - data: {"id":"msg_001","object":"thread.message.delta","delta":{"content":[{"index":0,"type":"text","text":{"value":" today"}}]}} - - event: thread.message.delta - data: {"id":"msg_001","object":"thread.message.delta","delta":{"content":[{"index":0,"type":"text","text":{"value":"?"}}]}} - - event: thread.message.completed - data: {"id":"msg_001","object":"thread.message","created_at":1710330641,"assistant_id":"asst_123","thread_id":"thread_123","run_id":"run_123","status":"completed","incomplete_details":null,"incomplete_at":null,"completed_at":1710330642,"role":"assistant","content":[{"type":"text","text":{"value":"Hello! How can I assist you today?","annotations":[]}}],"metadata":{}} - - event: thread.run.step.completed - data: {"id":"step_001","object":"thread.run.step","created_at":1710330641,"run_id":"run_123","assistant_id":"asst_123","thread_id":"thread_123","type":"message_creation","status":"completed","cancelled_at":null,"completed_at":1710330642,"expires_at":1710331240,"failed_at":null,"last_error":null,"step_details":{"type":"message_creation","message_creation":{"message_id":"msg_001"}},"usage":{"prompt_tokens":20,"completion_tokens":11,"total_tokens":31}} - - event: thread.run.completed - data: {"id":"run_123","object":"thread.run","created_at":1710330640,"assistant_id":"asst_123","thread_id":"thread_123","status":"completed","started_at":1710330641,"expires_at":null,"cancelled_at":null,"failed_at":null,"completed_at":1710330642,"required_action":null,"last_error":null,"model":"gpt-4-turbo","instructions":null,"tools":[],"metadata":{},"temperature":1.0,"top_p":1.0,"max_completion_tokens":null,"max_prompt_tokens":null,"truncation_strategy":{"type":"auto","last_messages":null},"incomplete_details":null,"usage":{"prompt_tokens":20,"completion_tokens":11,"total_tokens":31},"response_format":"auto","tool_choice":"auto","parallel_tool_calls":true}} - - event: done - data: [DONE] - - - title: Streaming with Functions - request: - curl: | - curl https://api.portkey.ai/v1/threads/thread_abc123/runs \ - -H "x-portkey-api-key: $PORTKEY_API_KEY" \ - -H "x-portkey-virtual-key: $PORTKEY_PROVIDER_VIRTUAL_KEY" \ - -H "Content-Type: application/json" \ - -H "OpenAI-Beta: assistants=v2" \ - -d '{ - "assistant_id": "asst_abc123", - "tools": [ - { - "type": "function", - "function": { - "name": "get_current_weather", - "description": "Get the current weather in a given location", - "parameters": { - "type": "object", - "properties": { - "location": { - "type": "string", - "description": "The city and state, e.g. San Francisco, CA" - }, - "unit": { - "type": "string", - "enum": ["celsius", "fahrenheit"] - } - }, - "required": ["location"] - } - } - } - ], - "stream": true - }' - python: | - from portkey_ai import Portkey - - client = Portkey( - api_key = "PORTKEY_API_KEY", - virtual_key = "PROVIDER_VIRTUAL_KEY" - ) - - tools = [ - { - "type": "function", - "function": { - "name": "get_current_weather", - "description": "Get the current weather in a given location", - "parameters": { - "type": "object", - "properties": { - "location": { - "type": "string", - "description": "The city and state, e.g. San Francisco, CA", - }, - "unit": {"type": "string", "enum": ["celsius", "fahrenheit"]}, - }, - "required": ["location"], - }, - } - } - ] - - stream = client.beta.threads.runs.create( - thread_id="thread_abc123", - assistant_id="asst_abc123", - tools=tools, - stream=True - ) - - for event in stream: - print(event) - node.js: | - import Portkey from 'portkey-ai'; - - const client = new Portkey({ - apiKey: 'PORTKEY_API_KEY', - virtualKey: 'PROVIDER_VIRTUAL_KEY' - }); - - const tools = [ - { - "type": "function", - "function": { - "name": "get_current_weather", - "description": "Get the current weather in a given location", - "parameters": { - "type": "object", - "properties": { - "location": { - "type": "string", - "description": "The city and state, e.g. San Francisco, CA", - }, - "unit": {"type": "string", "enum": ["celsius", "fahrenheit"]}, - }, - "required": ["location"], - }, - } - } - ]; - - async function main() { - const stream = await client.beta.threads.runs.create( - "thread_abc123", - { - assistant_id: "asst_abc123", - tools: tools, - stream: true - } - ); - - for await (const event of stream) { - console.log(event); - } - } - - main(); - response: | - event: thread.run.created - data: {"id":"run_123","object":"thread.run","created_at":1710348075,"assistant_id":"asst_123","thread_id":"thread_123","status":"queued","started_at":null,"expires_at":1710348675,"cancelled_at":null,"failed_at":null,"completed_at":null,"required_action":null,"last_error":null,"model":"gpt-4-turbo","instructions":null,"tools":[],"metadata":{},"temperature":1.0,"top_p":1.0,"max_completion_tokens":null,"max_prompt_tokens":null,"truncation_strategy":{"type":"auto","last_messages":null},"incomplete_details":null,"usage":null,"response_format":"auto","tool_choice":"auto","parallel_tool_calls":true}} - - event: thread.run.queued - data: {"id":"run_123","object":"thread.run","created_at":1710348075,"assistant_id":"asst_123","thread_id":"thread_123","status":"queued","started_at":null,"expires_at":1710348675,"cancelled_at":null,"failed_at":null,"completed_at":null,"required_action":null,"last_error":null,"model":"gpt-4-turbo","instructions":null,"tools":[],"metadata":{},"temperature":1.0,"top_p":1.0,"max_completion_tokens":null,"max_prompt_tokens":null,"truncation_strategy":{"type":"auto","last_messages":null},"incomplete_details":null,"usage":null,"response_format":"auto","tool_choice":"auto","parallel_tool_calls":true}} - - event: thread.run.in_progress - data: {"id":"run_123","object":"thread.run","created_at":1710348075,"assistant_id":"asst_123","thread_id":"thread_123","status":"in_progress","started_at":1710348075,"expires_at":1710348675,"cancelled_at":null,"failed_at":null,"completed_at":null,"required_action":null,"last_error":null,"model":"gpt-4-turbo","instructions":null,"tools":[],"metadata":{},"temperature":1.0,"top_p":1.0,"max_completion_tokens":null,"max_prompt_tokens":null,"truncation_strategy":{"type":"auto","last_messages":null},"incomplete_details":null,"usage":null,"response_format":"auto","tool_choice":"auto","parallel_tool_calls":true}} - - event: thread.run.step.created - data: {"id":"step_001","object":"thread.run.step","created_at":1710348076,"run_id":"run_123","assistant_id":"asst_123","thread_id":"thread_123","type":"message_creation","status":"in_progress","cancelled_at":null,"completed_at":null,"expires_at":1710348675,"failed_at":null,"last_error":null,"step_details":{"type":"message_creation","message_creation":{"message_id":"msg_001"}},"usage":null} - - event: thread.run.step.in_progress - data: {"id":"step_001","object":"thread.run.step","created_at":1710348076,"run_id":"run_123","assistant_id":"asst_123","thread_id":"thread_123","type":"message_creation","status":"in_progress","cancelled_at":null,"completed_at":null,"expires_at":1710348675,"failed_at":null,"last_error":null,"step_details":{"type":"message_creation","message_creation":{"message_id":"msg_001"}},"usage":null} - - event: thread.message.created - data: {"id":"msg_001","object":"thread.message","created_at":1710348076,"assistant_id":"asst_123","thread_id":"thread_123","run_id":"run_123","status":"in_progress","incomplete_details":null,"incomplete_at":null,"completed_at":null,"role":"assistant","content":[],"metadata":{}} - - event: thread.message.in_progress - data: {"id":"msg_001","object":"thread.message","created_at":1710348076,"assistant_id":"asst_123","thread_id":"thread_123","run_id":"run_123","status":"in_progress","incomplete_details":null,"incomplete_at":null,"completed_at":null,"role":"assistant","content":[],"metadata":{}} - - event: thread.message.delta - data: {"id":"msg_001","object":"thread.message.delta","delta":{"content":[{"index":0,"type":"text","text":{"value":"Hello","annotations":[]}}]}} - - ... - - event: thread.message.delta - data: {"id":"msg_001","object":"thread.message.delta","delta":{"content":[{"index":0,"type":"text","text":{"value":" today"}}]}} - - event: thread.message.delta - data: {"id":"msg_001","object":"thread.message.delta","delta":{"content":[{"index":0,"type":"text","text":{"value":"?"}}]}} - - event: thread.message.completed - data: {"id":"msg_001","object":"thread.message","created_at":1710348076,"assistant_id":"asst_123","thread_id":"thread_123","run_id":"run_123","status":"completed","incomplete_details":null,"incomplete_at":null,"completed_at":1710348077,"role":"assistant","content":[{"type":"text","text":{"value":"Hello! How can I assist you today?","annotations":[]}}],"metadata":{}} - - event: thread.run.step.completed - data: {"id":"step_001","object":"thread.run.step","created_at":1710348076,"run_id":"run_123","assistant_id":"asst_123","thread_id":"thread_123","type":"message_creation","status":"completed","cancelled_at":null,"completed_at":1710348077,"expires_at":1710348675,"failed_at":null,"last_error":null,"step_details":{"type":"message_creation","message_creation":{"message_id":"msg_001"}},"usage":{"prompt_tokens":20,"completion_tokens":11,"total_tokens":31}} - - event: thread.run.completed - data: {"id":"run_123","object":"thread.run","created_at":1710348075,"assistant_id":"asst_123","thread_id":"thread_123","status":"completed","started_at":1710348075,"expires_at":null,"cancelled_at":null,"failed_at":null,"completed_at":1710348077,"required_action":null,"last_error":null,"model":"gpt-4-turbo","instructions":null,"tools":[],"metadata":{},"temperature":1.0,"top_p":1.0,"max_completion_tokens":null,"max_prompt_tokens":null,"truncation_strategy":{"type":"auto","last_messages":null},"incomplete_details":null,"usage":{"prompt_tokens":20,"completion_tokens":11,"total_tokens":31},"response_format":"auto","tool_choice":"auto","parallel_tool_calls":true}} - - event: done - data: [DONE] + main(); + response: &run_object_example | + { + "id": "run_abc123", + "object": "thread.run", + "created_at": 1699063290, + "assistant_id": "asst_abc123", + "thread_id": "thread_abc123", + "status": "queued", + "started_at": 1699063290, + "expires_at": null, + "cancelled_at": null, + "failed_at": null, + "completed_at": 1699063291, + "last_error": null, + "model": "gpt-4-turbo", + "instructions": null, + "incomplete_details": null, + "tools": [ + { + "type": "code_interpreter" + } + ], + "metadata": {}, + "usage": null, + "temperature": 1.0, + "top_p": 1.0, + "max_prompt_tokens": 1000, + "max_completion_tokens": 1000, + "truncation_strategy": { + "type": "auto", + "last_messages": null + }, + "response_format": "auto", + "tool_choice": "auto", + "parallel_tool_calls": true + } /threads/{thread_id}/runs/{run_id}: get: @@ -4761,89 +4047,86 @@ paths: Custom-Host: [] x-code-samples: - name: Retrieve run - group: threads - beta: true - returns: The [run](https://platform.openai.com/docs/api-reference/runs/object) object matching the specified ID. - examples: - request: - curl: | - curl https://api.portkey.ai/v1/threads/thread_abc123/runs/run_abc123 \ - -H "x-portkey-api-key: $PORTKEY_API_KEY" \ - -H "x-portkey-virtual-key: $PORTKEY_PROVIDER_VIRTUAL_KEY" \ - -H "OpenAI-Beta: assistants=v2" - python: | - from portkey_ai import Portkey - - client = Portkey( - api_key = "PORTKEY_API_KEY", - virtual_key = "PROVIDER_VIRTUAL_KEY" - ) - - run = client.beta.threads.runs.retrieve( - thread_id="thread_abc123", - run_id="run_abc123" - ) - - print(run) - node.js: | - import Portkey from 'portkey-ai'; - - const client = new Portkey({ - apiKey: 'PORTKEY_API_KEY', - virtualKey: 'PROVIDER_VIRTUAL_KEY' - }); - - async function main() { - const run = await client.beta.threads.runs.retrieve( - "thread_abc123", - "run_abc123" - ); - - console.log(run); - } + - lang: curl + source: | + curl https://api.portkey.ai/v1/threads/thread_abc123/runs/run_abc123 \ + -H "x-portkey-api-key: $PORTKEY_API_KEY" \ + -H "x-portkey-virtual-key: $PORTKEY_PROVIDER_VIRTUAL_KEY" \ + -H "OpenAI-Beta: assistants=v2" + - lang: python + source: | + from portkey_ai import Portkey - main(); - response: | - { - "id": "run_abc123", - "object": "thread.run", - "created_at": 1699075072, - "assistant_id": "asst_abc123", - "thread_id": "thread_abc123", - "status": "completed", - "started_at": 1699075072, - "expires_at": null, - "cancelled_at": null, - "failed_at": null, - "completed_at": 1699075073, - "last_error": null, - "model": "gpt-4-turbo", - "instructions": null, - "incomplete_details": null, - "tools": [ - { - "type": "code_interpreter" - } - ], - "metadata": {}, - "usage": { - "prompt_tokens": 123, - "completion_tokens": 456, - "total_tokens": 579 - }, - "temperature": 1.0, - "top_p": 1.0, - "max_prompt_tokens": 1000, - "max_completion_tokens": 1000, - "truncation_strategy": { - "type": "auto", - "last_messages": null - }, - "response_format": "auto", - "tool_choice": "auto", - "parallel_tool_calls": true - } + client = Portkey( + api_key = "PORTKEY_API_KEY", + virtual_key = "PROVIDER_VIRTUAL_KEY" + ) + + run = client.beta.threads.runs.retrieve( + thread_id="thread_abc123", + run_id="run_abc123" + ) + + print(run) + - lang: javascript + source: | + import Portkey from 'portkey-ai'; + + const client = new Portkey({ + apiKey: 'PORTKEY_API_KEY', + virtualKey: 'PROVIDER_VIRTUAL_KEY' + }); + + async function main() { + const run = await client.beta.threads.runs.retrieve( + "thread_abc123", + "run_abc123" + ); + + console.log(run); + } + + main(); + response: | + { + "id": "run_abc123", + "object": "thread.run", + "created_at": 1699075072, + "assistant_id": "asst_abc123", + "thread_id": "thread_abc123", + "status": "completed", + "started_at": 1699075072, + "expires_at": null, + "cancelled_at": null, + "failed_at": null, + "completed_at": 1699075073, + "last_error": null, + "model": "gpt-4-turbo", + "instructions": null, + "incomplete_details": null, + "tools": [ + { + "type": "code_interpreter" + } + ], + "metadata": {}, + "usage": { + "prompt_tokens": 123, + "completion_tokens": 456, + "total_tokens": 579 + }, + "temperature": 1.0, + "top_p": 1.0, + "max_prompt_tokens": 1000, + "max_completion_tokens": 1000, + "truncation_strategy": { + "type": "auto", + "last_messages": null + }, + "response_format": "auto", + "tool_choice": "auto", + "parallel_tool_calls": true + } post: operationId: modifyRun tags: @@ -4890,111 +4173,108 @@ paths: Custom-Host: [] x-code-samples: - name: Modify run - group: threads - beta: true - returns: The modified [run](https://platform.openai.com/docs/api-reference/runs/object) object matching the specified ID. - examples: - request: - curl: | - curl https://api.portkey.ai/v1/threads/thread_abc123/runs/run_abc123 \ - -H "x-portkey-api-key: $PORTKEY_API_KEY" \ - -H "x-portkey-virtual-key: $PORTKEY_PROVIDER_VIRTUAL_KEY" \ - -H "Content-Type: application/json" \ - -H "OpenAI-Beta: assistants=v2" \ - -d '{ - "metadata": { - "user_id": "user_abc123" - } - }' - python: | - from portkey_ai import Portkey - - client = Portkey( - api_key = "PORTKEY_API_KEY", - virtual_key = "PROVIDER_VIRTUAL_KEY" - ) - - run = client.beta.threads.runs.update( - thread_id="thread_abc123", - run_id="run_abc123", - metadata={"user_id": "user_abc123"}, - ) - - print(run) - node.js: | - import Portkey from 'portkey-ai'; - - const client = new Portkey({ - apiKey: 'PORTKEY_API_KEY', - virtualKey: 'PROVIDER_VIRTUAL_KEY' - }); - - async function main() { - const run = await client.beta.threads.runs.update( - "thread_abc123", - "run_abc123", - { - metadata: { - user_id: "user_abc123", - }, - } - ); + - lang: curl + source: | + curl https://api.portkey.ai/v1/threads/thread_abc123/runs/run_abc123 \ + -H "x-portkey-api-key: $PORTKEY_API_KEY" \ + -H "x-portkey-virtual-key: $PORTKEY_PROVIDER_VIRTUAL_KEY" \ + -H "Content-Type: application/json" \ + -H "OpenAI-Beta: assistants=v2" \ + -d '{ + "metadata": { + "user_id": "user_abc123" + } + }' + - lang: python + source: | + from portkey_ai import Portkey - console.log(run); - } + client = Portkey( + api_key = "PORTKEY_API_KEY", + virtual_key = "PROVIDER_VIRTUAL_KEY" + ) + + run = client.beta.threads.runs.update( + thread_id="thread_abc123", + run_id="run_abc123", + metadata={"user_id": "user_abc123"}, + ) - main(); - response: | + print(run) + - lang: javascript + source: | + import Portkey from 'portkey-ai'; + + const client = new Portkey({ + apiKey: 'PORTKEY_API_KEY', + virtualKey: 'PROVIDER_VIRTUAL_KEY' + }); + + async function main() { + const run = await client.beta.threads.runs.update( + "thread_abc123", + "run_abc123", { - "id": "run_abc123", - "object": "thread.run", - "created_at": 1699075072, - "assistant_id": "asst_abc123", - "thread_id": "thread_abc123", - "status": "completed", - "started_at": 1699075072, - "expires_at": null, - "cancelled_at": null, - "failed_at": null, - "completed_at": 1699075073, - "last_error": null, - "model": "gpt-4-turbo", - "instructions": null, - "incomplete_details": null, - "tools": [ - { - "type": "code_interpreter" - } - ], - "tool_resources": { - "code_interpreter": { - "file_ids": [ - "file-abc123", - "file-abc456" - ] - } - }, - "metadata": { - "user_id": "user_abc123" - }, - "usage": { - "prompt_tokens": 123, - "completion_tokens": 456, - "total_tokens": 579 + metadata: { + user_id: "user_abc123", }, - "temperature": 1.0, - "top_p": 1.0, - "max_prompt_tokens": 1000, - "max_completion_tokens": 1000, - "truncation_strategy": { - "type": "auto", - "last_messages": null - }, - "response_format": "auto", - "tool_choice": "auto", - "parallel_tool_calls": true } + ); + + console.log(run); + } + + main(); + response: | + { + "id": "run_abc123", + "object": "thread.run", + "created_at": 1699075072, + "assistant_id": "asst_abc123", + "thread_id": "thread_abc123", + "status": "completed", + "started_at": 1699075072, + "expires_at": null, + "cancelled_at": null, + "failed_at": null, + "completed_at": 1699075073, + "last_error": null, + "model": "gpt-4-turbo", + "instructions": null, + "incomplete_details": null, + "tools": [ + { + "type": "code_interpreter" + } + ], + "tool_resources": { + "code_interpreter": { + "file_ids": [ + "file-abc123", + "file-abc456" + ] + } + }, + "metadata": { + "user_id": "user_abc123" + }, + "usage": { + "prompt_tokens": 123, + "completion_tokens": 456, + "total_tokens": 579 + }, + "temperature": 1.0, + "top_p": 1.0, + "max_prompt_tokens": 1000, + "max_completion_tokens": 1000, + "truncation_strategy": { + "type": "auto", + "last_messages": null + }, + "response_format": "auto", + "tool_choice": "auto", + "parallel_tool_calls": true + } /threads/{thread_id}/runs/{run_id}/submit_tool_outputs: post: @@ -5044,244 +4324,122 @@ paths: Custom-Host: [] x-code-samples: - name: Submit tool outputs to run - group: threads - beta: true - returns: The modified [run](https://platform.openai.com/docs/api-reference/runs/object) object matching the specified ID. - examples: - - title: Default - request: - curl: | - curl https://api.portkey.ai/v1/threads/thread_123/runs/run_123/submit_tool_outputs \ - -H "x-portkey-api-key: $PORTKEY_API_KEY" \ - -H "x-portkey-virtual-key: $PORTKEY_PROVIDER_VIRTUAL_KEY" \ - -H "Content-Type: application/json" \ - -H "OpenAI-Beta: assistants=v2" \ - -d '{ - "tool_outputs": [ - { - "tool_call_id": "call_001", - "output": "70 degrees and sunny." - } - ] - }' - python: | - from portkey_ai import Portkey - - client = Portkey( - api_key = "PORTKEY_API_KEY", - virtual_key = "PROVIDER_VIRTUAL_KEY" - ) - - run = client.beta.threads.runs.submit_tool_outputs( - thread_id="thread_123", - run_id="run_123", - tool_outputs=[ - { - "tool_call_id": "call_001", - "output": "70 degrees and sunny." - } - ] - ) - - print(run) - node.js: | - import Portkey from 'portkey-ai'; - - const client = new Portkey({ - apiKey: 'PORTKEY_API_KEY', - virtualKey: 'PROVIDER_VIRTUAL_KEY' - }); - - async function main() { - const run = await client.beta.threads.runs.submitToolOutputs( - "thread_123", - "run_123", - { - tool_outputs: [ - { - tool_call_id: "call_001", - output: "70 degrees and sunny.", - }, - ], - } - ); - - console.log(run); - } - - main(); - response: | - { - "id": "run_123", - "object": "thread.run", - "created_at": 1699075592, - "assistant_id": "asst_123", - "thread_id": "thread_123", - "status": "queued", - "started_at": 1699075592, - "expires_at": 1699076192, - "cancelled_at": null, - "failed_at": null, - "completed_at": null, - "last_error": null, - "model": "gpt-4-turbo", - "instructions": null, - "tools": [ - { - "type": "function", - "function": { - "name": "get_current_weather", - "description": "Get the current weather in a given location", - "parameters": { - "type": "object", - "properties": { - "location": { - "type": "string", - "description": "The city and state, e.g. San Francisco, CA" - }, - "unit": { - "type": "string", - "enum": ["celsius", "fahrenheit"] - } - }, - "required": ["location"] - } - } - } - ], - "metadata": {}, - "usage": null, - "temperature": 1.0, - "top_p": 1.0, - "max_prompt_tokens": 1000, - "max_completion_tokens": 1000, - "truncation_strategy": { - "type": "auto", - "last_messages": null - }, - "response_format": "auto", - "tool_choice": "auto", - "parallel_tool_calls": true - } - - - title: Streaming - request: - curl: | - curl https://api.portkey.ai/v1/threads/thread_123/runs/run_123/submit_tool_outputs \ - -H "x-portkey-api-key: $PORTKEY_API_KEY" \ - -H "x-portkey-virtual-key: $PORTKEY_PROVIDER_VIRTUAL_KEY" \ - -H "Content-Type: application/json" \ - -H "OpenAI-Beta: assistants=v2" \ - -d '{ - "tool_outputs": [ - { - "tool_call_id": "call_001", - "output": "70 degrees and sunny." - } - ], - "stream": true - }' - python: | - from portkey_ai import Portkey - - client = Portkey( - api_key = "PORTKEY_API_KEY", - virtual_key = "PROVIDER_VIRTUAL_KEY" - ) - - stream = client.beta.threads.runs.submit_tool_outputs( - thread_id="thread_123", - run_id="run_123", - tool_outputs=[ - { - "tool_call_id": "call_001", - "output": "70 degrees and sunny." - } - ], - stream=True - ) - - for event in stream: - print(event) - node.js: | - import Portkey from 'portkey-ai'; - - const client = new Portkey({ - apiKey: 'PORTKEY_API_KEY', - virtualKey: 'PROVIDER_VIRTUAL_KEY' - }); - - async function main() { - const stream = await client.beta.threads.runs.submitToolOutputs( - "thread_123", - "run_123", - { - tool_outputs: [ - { - tool_call_id: "call_001", - output: "70 degrees and sunny.", - }, - ], - } - ); - - for await (const event of stream) { - console.log(event); - } - } - - main(); - response: | - event: thread.run.step.completed - data: {"id":"step_001","object":"thread.run.step","created_at":1710352449,"run_id":"run_123","assistant_id":"asst_123","thread_id":"thread_123","type":"tool_calls","status":"completed","cancelled_at":null,"completed_at":1710352475,"expires_at":1710353047,"failed_at":null,"last_error":null,"step_details":{"type":"tool_calls","tool_calls":[{"id":"call_iWr0kQ2EaYMaxNdl0v3KYkx7","type":"function","function":{"name":"get_current_weather","arguments":"{\"location\":\"San Francisco, CA\",\"unit\":\"fahrenheit\"}","output":"70 degrees and sunny."}}]},"usage":{"prompt_tokens":291,"completion_tokens":24,"total_tokens":315}} - - event: thread.run.queued - data: {"id":"run_123","object":"thread.run","created_at":1710352447,"assistant_id":"asst_123","thread_id":"thread_123","status":"queued","started_at":1710352448,"expires_at":1710353047,"cancelled_at":null,"failed_at":null,"completed_at":null,"required_action":null,"last_error":null,"model":"gpt-4-turbo","instructions":null,"tools":[{"type":"function","function":{"name":"get_current_weather","description":"Get the current weather in a given location","parameters":{"type":"object","properties":{"location":{"type":"string","description":"The city and state, e.g. San Francisco, CA"},"unit":{"type":"string","enum":["celsius","fahrenheit"]}},"required":["location"]}}}],"metadata":{},"temperature":1.0,"top_p":1.0,"max_completion_tokens":null,"max_prompt_tokens":null,"truncation_strategy":{"type":"auto","last_messages":null},"incomplete_details":null,"usage":null,"response_format":"auto","tool_choice":"auto","parallel_tool_calls":true}} - - event: thread.run.in_progress - data: {"id":"run_123","object":"thread.run","created_at":1710352447,"assistant_id":"asst_123","thread_id":"thread_123","status":"in_progress","started_at":1710352475,"expires_at":1710353047,"cancelled_at":null,"failed_at":null,"completed_at":null,"required_action":null,"last_error":null,"model":"gpt-4-turbo","instructions":null,"tools":[{"type":"function","function":{"name":"get_current_weather","description":"Get the current weather in a given location","parameters":{"type":"object","properties":{"location":{"type":"string","description":"The city and state, e.g. San Francisco, CA"},"unit":{"type":"string","enum":["celsius","fahrenheit"]}},"required":["location"]}}}],"metadata":{},"temperature":1.0,"top_p":1.0,"max_completion_tokens":null,"max_prompt_tokens":null,"truncation_strategy":{"type":"auto","last_messages":null},"incomplete_details":null,"usage":null,"response_format":"auto","tool_choice":"auto","parallel_tool_calls":true}} - - event: thread.run.step.created - data: {"id":"step_002","object":"thread.run.step","created_at":1710352476,"run_id":"run_123","assistant_id":"asst_123","thread_id":"thread_123","type":"message_creation","status":"in_progress","cancelled_at":null,"completed_at":null,"expires_at":1710353047,"failed_at":null,"last_error":null,"step_details":{"type":"message_creation","message_creation":{"message_id":"msg_002"}},"usage":null} - - event: thread.run.step.in_progress - data: {"id":"step_002","object":"thread.run.step","created_at":1710352476,"run_id":"run_123","assistant_id":"asst_123","thread_id":"thread_123","type":"message_creation","status":"in_progress","cancelled_at":null,"completed_at":null,"expires_at":1710353047,"failed_at":null,"last_error":null,"step_details":{"type":"message_creation","message_creation":{"message_id":"msg_002"}},"usage":null} - - event: thread.message.created - data: {"id":"msg_002","object":"thread.message","created_at":1710352476,"assistant_id":"asst_123","thread_id":"thread_123","run_id":"run_123","status":"in_progress","incomplete_details":null,"incomplete_at":null,"completed_at":null,"role":"assistant","content":[],"metadata":{}} - - event: thread.message.in_progress - data: {"id":"msg_002","object":"thread.message","created_at":1710352476,"assistant_id":"asst_123","thread_id":"thread_123","run_id":"run_123","status":"in_progress","incomplete_details":null,"incomplete_at":null,"completed_at":null,"role":"assistant","content":[],"metadata":{}} - - event: thread.message.delta - data: {"id":"msg_002","object":"thread.message.delta","delta":{"content":[{"index":0,"type":"text","text":{"value":"The","annotations":[]}}]}} - - event: thread.message.delta - data: {"id":"msg_002","object":"thread.message.delta","delta":{"content":[{"index":0,"type":"text","text":{"value":" current"}}]}} - - event: thread.message.delta - data: {"id":"msg_002","object":"thread.message.delta","delta":{"content":[{"index":0,"type":"text","text":{"value":" weather"}}]}} + - lang: curl + source: | + curl https://api.portkey.ai/v1/threads/thread_123/runs/run_123/submit_tool_outputs \ + -H "x-portkey-api-key: $PORTKEY_API_KEY" \ + -H "x-portkey-virtual-key: $PORTKEY_PROVIDER_VIRTUAL_KEY" \ + -H "Content-Type: application/json" \ + -H "OpenAI-Beta: assistants=v2" \ + -d '{ + "tool_outputs": [ + { + "tool_call_id": "call_001", + "output": "70 degrees and sunny." + } + ] + }' + - lang: python + source: | + from portkey_ai import Portkey - ... + client = Portkey( + api_key = "PORTKEY_API_KEY", + virtual_key = "PROVIDER_VIRTUAL_KEY" + ) - event: thread.message.delta - data: {"id":"msg_002","object":"thread.message.delta","delta":{"content":[{"index":0,"type":"text","text":{"value":" sunny"}}]}} + run = client.beta.threads.runs.submit_tool_outputs( + thread_id="thread_123", + run_id="run_123", + tool_outputs=[ + { + "tool_call_id": "call_001", + "output": "70 degrees and sunny." + } + ] + ) - event: thread.message.delta - data: {"id":"msg_002","object":"thread.message.delta","delta":{"content":[{"index":0,"type":"text","text":{"value":"."}}]}} + print(run) + - lang: javascript + source: | + import Portkey from 'portkey-ai'; - event: thread.message.completed - data: {"id":"msg_002","object":"thread.message","created_at":1710352476,"assistant_id":"asst_123","thread_id":"thread_123","run_id":"run_123","status":"completed","incomplete_details":null,"incomplete_at":null,"completed_at":1710352477,"role":"assistant","content":[{"type":"text","text":{"value":"The current weather in San Francisco, CA is 70 degrees Fahrenheit and sunny.","annotations":[]}}],"metadata":{}} + const client = new Portkey({ + apiKey: 'PORTKEY_API_KEY', + virtualKey: 'PROVIDER_VIRTUAL_KEY' + }); - event: thread.run.step.completed - data: {"id":"step_002","object":"thread.run.step","created_at":1710352476,"run_id":"run_123","assistant_id":"asst_123","thread_id":"thread_123","type":"message_creation","status":"completed","cancelled_at":null,"completed_at":1710352477,"expires_at":1710353047,"failed_at":null,"last_error":null,"step_details":{"type":"message_creation","message_creation":{"message_id":"msg_002"}},"usage":{"prompt_tokens":329,"completion_tokens":18,"total_tokens":347}} + async function main() { + const run = await client.beta.threads.runs.submitToolOutputs( + "thread_123", + "run_123", + { + tool_outputs: [ + { + tool_call_id: "call_001", + output: "70 degrees and sunny.", + }, + ], + } + ); - event: thread.run.completed - data: {"id":"run_123","object":"thread.run","created_at":1710352447,"assistant_id":"asst_123","thread_id":"thread_123","status":"completed","started_at":1710352475,"expires_at":null,"cancelled_at":null,"failed_at":null,"completed_at":1710352477,"required_action":null,"last_error":null,"model":"gpt-4-turbo","instructions":null,"tools":[{"type":"function","function":{"name":"get_current_weather","description":"Get the current weather in a given location","parameters":{"type":"object","properties":{"location":{"type":"string","description":"The city and state, e.g. San Francisco, CA"},"unit":{"type":"string","enum":["celsius","fahrenheit"]}},"required":["location"]}}}],"metadata":{},"temperature":1.0,"top_p":1.0,"max_completion_tokens":null,"max_prompt_tokens":null,"truncation_strategy":{"type":"auto","last_messages":null},"incomplete_details":null,"usage":{"prompt_tokens":20,"completion_tokens":11,"total_tokens":31},"response_format":"auto","tool_choice":"auto","parallel_tool_calls":true}} + console.log(run); + } - event: done - data: [DONE] + main(); + response: | + { + "id": "run_123", + "object": "thread.run", + "created_at": 1699075592, + "assistant_id": "asst_123", + "thread_id": "thread_123", + "status": "queued", + "started_at": 1699075592, + "expires_at": 1699076192, + "cancelled_at": null, + "failed_at": null, + "completed_at": null, + "last_error": null, + "model": "gpt-4-turbo", + "instructions": null, + "tools": [ + { + "type": "function", + "function": { + "name": "get_current_weather", + "description": "Get the current weather in a given location", + "parameters": { + "type": "object", + "properties": { + "location": { + "type": "string", + "description": "The city and state, e.g. San Francisco, CA" + }, + "unit": { + "type": "string", + "enum": ["celsius", "fahrenheit"] + } + }, + "required": ["location"] + } + } + } + ], + "metadata": {}, + "usage": null, + "temperature": 1.0, + "top_p": 1.0, + "max_prompt_tokens": 1000, + "max_completion_tokens": 1000, + "truncation_strategy": { + "type": "auto", + "last_messages": null + }, + "response_format": "auto", + "tool_choice": "auto", + "parallel_tool_calls": true + } /threads/{thread_id}/runs/{run_id}/cancel: post: @@ -5324,84 +4482,81 @@ paths: Custom-Host: [] x-code-samples: - name: Cancel a run - group: threads - beta: true - returns: The modified [run](https://platform.openai.com/docs/api-reference/runs/object) object matching the specified ID. - examples: - request: - curl: | - curl https://api.portkey.ai/v1/threads/thread_abc123/runs/run_abc123/cancel \ - -H "x-portkey-api-key: $PORTKEY_API_KEY" \ - -H "x-portkey-virtual-key: $PORTKEY_PROVIDER_VIRTUAL_KEY" \ - -H "OpenAI-Beta: assistants=v2" \ - -X POST - python: | - from portkey_ai import Portkey - - client = Portkey( - api_key = "PORTKEY_API_KEY", - virtual_key = "PROVIDER_VIRTUAL_KEY" - ) - - run = client.beta.threads.runs.cancel( - thread_id="thread_abc123", - run_id="run_abc123" - ) - - print(run) - node.js: | - import Portkey from 'portkey-ai'; - - const client = new Portkey({ - apiKey: 'PORTKEY_API_KEY', - virtualKey: 'PROVIDER_VIRTUAL_KEY' - }); - - async function main() { - const run = await client.beta.threads.runs.cancel( - "thread_abc123", - "run_abc123" - ); - - console.log(run); - } + - lang: curl + source: | + curl https://api.portkey.ai/v1/threads/thread_abc123/runs/run_abc123/cancel \ + -H "x-portkey-api-key: $PORTKEY_API_KEY" \ + -H "x-portkey-virtual-key: $PORTKEY_PROVIDER_VIRTUAL_KEY" \ + -H "OpenAI-Beta: assistants=v2" \ + -X POST + - lang: python + source: | + from portkey_ai import Portkey - main(); - response: | - { - "id": "run_abc123", - "object": "thread.run", - "created_at": 1699076126, - "assistant_id": "asst_abc123", - "thread_id": "thread_abc123", - "status": "cancelling", - "started_at": 1699076126, - "expires_at": 1699076726, - "cancelled_at": null, - "failed_at": null, - "completed_at": null, - "last_error": null, - "model": "gpt-4-turbo", - "instructions": "You summarize books.", - "tools": [ - { - "type": "file_search" - } - ], - "tool_resources": { - "file_search": { - "vector_store_ids": ["vs_123"] - } - }, - "metadata": {}, - "usage": null, - "temperature": 1.0, - "top_p": 1.0, - "response_format": "auto", - "tool_choice": "auto", - "parallel_tool_calls": true - } + client = Portkey( + api_key = "PORTKEY_API_KEY", + virtual_key = "PROVIDER_VIRTUAL_KEY" + ) + + run = client.beta.threads.runs.cancel( + thread_id="thread_abc123", + run_id="run_abc123" + ) + + print(run) + - lang: javascript + source: | + import Portkey from 'portkey-ai'; + + const client = new Portkey({ + apiKey: 'PORTKEY_API_KEY', + virtualKey: 'PROVIDER_VIRTUAL_KEY' + }); + + async function main() { + const run = await client.beta.threads.runs.cancel( + "thread_abc123", + "run_abc123" + ); + + console.log(run); + } + + main(); + response: | + { + "id": "run_abc123", + "object": "thread.run", + "created_at": 1699076126, + "assistant_id": "asst_abc123", + "thread_id": "thread_abc123", + "status": "cancelling", + "started_at": 1699076126, + "expires_at": 1699076726, + "cancelled_at": null, + "failed_at": null, + "completed_at": null, + "last_error": null, + "model": "gpt-4-turbo", + "instructions": "You summarize books.", + "tools": [ + { + "type": "file_search" + } + ], + "tool_resources": { + "file_search": { + "vector_store_ids": ["vs_123"] + } + }, + "metadata": {}, + "usage": null, + "temperature": 1.0, + "top_p": 1.0, + "response_format": "auto", + "tool_choice": "auto", + "parallel_tool_calls": true + } /threads/{thread_id}/runs/{run_id}/steps: get: @@ -5468,84 +4623,81 @@ paths: Custom-Host: [] x-code-samples: - name: List run steps - group: threads - beta: true - returns: A list of [run step](https://platform.openai.com/docs/api-reference/runs/step-object) objects. - examples: - request: - curl: | - curl https://api.portkey.ai/v1/threads/thread_abc123/runs/run_abc123/steps \ - -H "x-portkey-api-key: $PORTKEY_API_KEY" \ - -H "x-portkey-virtual-key: $PORTKEY_PROVIDER_VIRTUAL_KEY" \ - -H "Content-Type: application/json" \ - -H "OpenAI-Beta: assistants=v2" - python: | - from portkey_ai import Portkey - - client = Portkey( - api_key = "PORTKEY_API_KEY", - virtual_key = "PROVIDER_VIRTUAL_KEY" - ) - - run_steps = client.beta.threads.runs.steps.list( - thread_id="thread_abc123", - run_id="run_abc123" - ) - - print(run_steps) - node.js: | - import Portkey from 'portkey-ai'; - - const client = new Portkey({ - apiKey: 'PORTKEY_API_KEY', - virtualKey: 'PROVIDER_VIRTUAL_KEY' - }); - - async function main() { - const runStep = await client.beta.threads.runs.steps.list( - "thread_abc123", - "run_abc123" - ); - console.log(runStep); - } + - lang: curl + source: | + curl https://api.portkey.ai/v1/threads/thread_abc123/runs/run_abc123/steps \ + -H "x-portkey-api-key: $PORTKEY_API_KEY" \ + -H "x-portkey-virtual-key: $PORTKEY_PROVIDER_VIRTUAL_KEY" \ + -H "Content-Type: application/json" \ + -H "OpenAI-Beta: assistants=v2" + - lang: python + source: | + from portkey_ai import Portkey - main(); - response: | - { - "object": "list", - "data": [ - { - "id": "step_abc123", - "object": "thread.run.step", - "created_at": 1699063291, - "run_id": "run_abc123", - "assistant_id": "asst_abc123", - "thread_id": "thread_abc123", + client = Portkey( + api_key = "PORTKEY_API_KEY", + virtual_key = "PROVIDER_VIRTUAL_KEY" + ) + + run_steps = client.beta.threads.runs.steps.list( + thread_id="thread_abc123", + run_id="run_abc123" + ) + + print(run_steps) + - lang: javascript + source: | + import Portkey from 'portkey-ai'; + + const client = new Portkey({ + apiKey: 'PORTKEY_API_KEY', + virtualKey: 'PROVIDER_VIRTUAL_KEY' + }); + + async function main() { + const runStep = await client.beta.threads.runs.steps.list( + "thread_abc123", + "run_abc123" + ); + console.log(runStep); + } + + main(); + response: | + { + "object": "list", + "data": [ + { + "id": "step_abc123", + "object": "thread.run.step", + "created_at": 1699063291, + "run_id": "run_abc123", + "assistant_id": "asst_abc123", + "thread_id": "thread_abc123", + "type": "message_creation", + "status": "completed", + "cancelled_at": null, + "completed_at": 1699063291, + "expired_at": null, + "failed_at": null, + "last_error": null, + "step_details": { "type": "message_creation", - "status": "completed", - "cancelled_at": null, - "completed_at": 1699063291, - "expired_at": null, - "failed_at": null, - "last_error": null, - "step_details": { - "type": "message_creation", - "message_creation": { - "message_id": "msg_abc123" - } - }, - "usage": { - "prompt_tokens": 123, - "completion_tokens": 456, - "total_tokens": 579 + "message_creation": { + "message_id": "msg_abc123" } + }, + "usage": { + "prompt_tokens": 123, + "completion_tokens": 456, + "total_tokens": 579 } - ], - "first_id": "step_abc123", - "last_id": "step_abc456", - "has_more": false - } + } + ], + "first_id": "step_abc123", + "last_id": "step_abc456", + "has_more": false + } /threads/{thread_id}/runs/{run_id}/steps/{step_id}: get: @@ -5594,78 +4746,75 @@ paths: Custom-Host: [] x-code-samples: - name: Retrieve run step - group: threads - beta: true - returns: The [run step](https://platform.openai.com/docs/api-reference/runs/step-object) object matching the specified ID. - examples: - request: - curl: | - curl https://api.portkey.ai/v1/threads/thread_abc123/runs/run_abc123/steps/step_abc123 \ - -H "x-portkey-api-key: $PORTKEY_API_KEY" \ - -H "x-portkey-virtual-key: $PORTKEY_PROVIDER_VIRTUAL_KEY" \ - -H "Content-Type: application/json" \ - -H "OpenAI-Beta: assistants=v2" - python: | - from portkey_ai import Portkey - - client = Portkey( - api_key = "PORTKEY_API_KEY", - virtual_key = "PROVIDER_VIRTUAL_KEY" - ) - - run_step = client.beta.threads.runs.steps.retrieve( - thread_id="thread_abc123", - run_id="run_abc123", - step_id="step_abc123" - ) - - print(run_step) - node.js: | - import Portkey from 'portkey-ai'; - - const client = new Portkey({ - apiKey: 'PORTKEY_API_KEY', - virtualKey: 'PROVIDER_VIRTUAL_KEY' - }); - - async function main() { - const runStep = await client.beta.threads.runs.steps.retrieve( - "thread_abc123", - "run_abc123", - "step_abc123" - ); - console.log(runStep); - } + - lang: curl + source: | + curl https://api.portkey.ai/v1/threads/thread_abc123/runs/run_abc123/steps/step_abc123 \ + -H "x-portkey-api-key: $PORTKEY_API_KEY" \ + -H "x-portkey-virtual-key: $PORTKEY_PROVIDER_VIRTUAL_KEY" \ + -H "Content-Type: application/json" \ + -H "OpenAI-Beta: assistants=v2" + - lang: python + source: | + from portkey_ai import Portkey - main(); - response: &run_step_object_example | - { - "id": "step_abc123", - "object": "thread.run.step", - "created_at": 1699063291, - "run_id": "run_abc123", - "assistant_id": "asst_abc123", - "thread_id": "thread_abc123", + client = Portkey( + api_key = "PORTKEY_API_KEY", + virtual_key = "PROVIDER_VIRTUAL_KEY" + ) + + run_step = client.beta.threads.runs.steps.retrieve( + thread_id="thread_abc123", + run_id="run_abc123", + step_id="step_abc123" + ) + + print(run_step) + - lang: javascript + source: | + import Portkey from 'portkey-ai'; + + const client = new Portkey({ + apiKey: 'PORTKEY_API_KEY', + virtualKey: 'PROVIDER_VIRTUAL_KEY' + }); + + async function main() { + const runStep = await client.beta.threads.runs.steps.retrieve( + "thread_abc123", + "run_abc123", + "step_abc123" + ); + console.log(runStep); + } + + main(); + response: &run_step_object_example | + { + "id": "step_abc123", + "object": "thread.run.step", + "created_at": 1699063291, + "run_id": "run_abc123", + "assistant_id": "asst_abc123", + "thread_id": "thread_abc123", + "type": "message_creation", + "status": "completed", + "cancelled_at": null, + "completed_at": 1699063291, + "expired_at": null, + "failed_at": null, + "last_error": null, + "step_details": { "type": "message_creation", - "status": "completed", - "cancelled_at": null, - "completed_at": 1699063291, - "expired_at": null, - "failed_at": null, - "last_error": null, - "step_details": { - "type": "message_creation", - "message_creation": { - "message_id": "msg_abc123" - } - }, - "usage": { - "prompt_tokens": 123, - "completion_tokens": 456, - "total_tokens": 579 + "message_creation": { + "message_id": "msg_abc123" } + }, + "usage": { + "prompt_tokens": 123, + "completion_tokens": 456, + "total_tokens": 579 } + } /vector_stores: get: @@ -5720,79 +4869,76 @@ paths: Custom-Host: [] x-code-samples: - name: List vector stores - group: vector_stores - beta: true - returns: A list of [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) objects. - examples: - request: - curl: | - curl https://api.portkey.ai/v1/vector_stores \ - -H "x-portkey-api-key: $PORTKEY_API_KEY" \ - -H "x-portkey-virtual-key: $PORTKEY_PROVIDER_VIRTUAL_KEY" \ - -H "Content-Type: application/json" \ - -H "OpenAI-Beta: assistants=v2" - python: | - from portkey_ai import Portkey - - client = Portkey( - api_key = "PORTKEY_API_KEY", - virtual_key = "PROVIDER_VIRTUAL_KEY" - ) - - vector_stores = client.beta.vector_stores.list() - print(vector_stores) - node.js: | - import Portkey from 'portkey-ai'; - - const client = new Portkey({ - apiKey: 'PORTKEY_API_KEY', - virtualKey: 'PROVIDER_VIRTUAL_KEY' - }); - - async function main() { - const vectorStores = await client.beta.vectorStores.list(); - console.log(vectorStores); - } + - lang: curl + source: | + curl https://api.portkey.ai/v1/vector_stores \ + -H "x-portkey-api-key: $PORTKEY_API_KEY" \ + -H "x-portkey-virtual-key: $PORTKEY_PROVIDER_VIRTUAL_KEY" \ + -H "Content-Type: application/json" \ + -H "OpenAI-Beta: assistants=v2" + - lang: python + source: | + from portkey_ai import Portkey - main(); - response: | - { - "object": "list", - "data": [ - { - "id": "vs_abc123", - "object": "vector_store", - "created_at": 1699061776, - "name": "Support FAQ", - "bytes": 139920, - "file_counts": { - "in_progress": 0, - "completed": 3, - "failed": 0, - "cancelled": 0, - "total": 3 - } - }, - { - "id": "vs_abc456", - "object": "vector_store", - "created_at": 1699061776, - "name": "Support FAQ v2", - "bytes": 139920, - "file_counts": { - "in_progress": 0, - "completed": 3, - "failed": 0, - "cancelled": 0, - "total": 3 - } + client = Portkey( + api_key = "PORTKEY_API_KEY", + virtual_key = "PROVIDER_VIRTUAL_KEY" + ) + + vector_stores = client.beta.vector_stores.list() + print(vector_stores) + - lang: javascript + source: | + import Portkey from 'portkey-ai'; + + const client = new Portkey({ + apiKey: 'PORTKEY_API_KEY', + virtualKey: 'PROVIDER_VIRTUAL_KEY' + }); + + async function main() { + const vectorStores = await client.beta.vectorStores.list(); + console.log(vectorStores); + } + + main(); + response: | + { + "object": "list", + "data": [ + { + "id": "vs_abc123", + "object": "vector_store", + "created_at": 1699061776, + "name": "Support FAQ", + "bytes": 139920, + "file_counts": { + "in_progress": 0, + "completed": 3, + "failed": 0, + "cancelled": 0, + "total": 3 } - ], - "first_id": "vs_abc123", - "last_id": "vs_abc456", - "has_more": false - } + }, + { + "id": "vs_abc456", + "object": "vector_store", + "created_at": 1699061776, + "name": "Support FAQ v2", + "bytes": 139920, + "file_counts": { + "in_progress": 0, + "completed": 3, + "failed": 0, + "cancelled": 0, + "total": 3 + } + } + ], + "first_id": "vs_abc123", + "last_id": "vs_abc456", + "has_more": false + } post: operationId: createVectorStore tags: @@ -5812,78 +4958,75 @@ paths: schema: $ref: "#/components/schemas/VectorStoreObject" - security: - - Portkey-Key: [] - Virtual-Key: [] - - Portkey-Key: [] - Provider-Auth: [] - Provider-Name: [] - - Portkey-Key: [] - Config: [] - - Portkey-Key: [] - Provider-Auth: [] - Provider-Name: [] - Custom-Host: [] + security: + - Portkey-Key: [] + Virtual-Key: [] + - Portkey-Key: [] + Provider-Auth: [] + Provider-Name: [] + - Portkey-Key: [] + Config: [] + - Portkey-Key: [] + Provider-Auth: [] + Provider-Name: [] + Custom-Host: [] + + x-code-samples: + - lang: curl + source: | + curl https://api.portkey.ai/v1/vector_stores \ + -H "x-portkey-api-key: $PORTKEY_API_KEY" \ + -H "x-portkey-virtual-key: $PORTKEY_PROVIDER_VIRTUAL_KEY" \ + -H "Content-Type: application/json" \ + -H "OpenAI-Beta: assistants=v2" + -d '{ + "name": "Support FAQ" + }' + - lang: python + source: | + from portkey_ai import Portkey + + client = Portkey( + api_key = "PORTKEY_API_KEY", + virtual_key = "PROVIDER_VIRTUAL_KEY" + ) + + vector_store = client.beta.vector_stores.create( + name="Support FAQ" + ) + print(vector_store) + - lang: javascript + source: | + import Portkey from 'portkey-ai'; - x-code-samples: - name: Create vector store - group: vector_stores - beta: true - returns: A [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) object. - examples: - request: - curl: | - curl https://api.portkey.ai/v1/vector_stores \ - -H "x-portkey-api-key: $PORTKEY_API_KEY" \ - -H "x-portkey-virtual-key: $PORTKEY_PROVIDER_VIRTUAL_KEY" \ - -H "Content-Type: application/json" \ - -H "OpenAI-Beta: assistants=v2" - -d '{ - "name": "Support FAQ" - }' - python: | - from portkey_ai import Portkey - - client = Portkey( - api_key = "PORTKEY_API_KEY", - virtual_key = "PROVIDER_VIRTUAL_KEY" - ) - - vector_store = client.beta.vector_stores.create( - name="Support FAQ" - ) - print(vector_store) - node.js: | - import Portkey from 'portkey-ai'; - - const client = new Portkey({ - apiKey: 'PORTKEY_API_KEY', - virtualKey: 'PROVIDER_VIRTUAL_KEY' - }); - - async function main() { - const vectorStore = await client.beta.vectorStores.create({ - name: "Support FAQ" - }); - console.log(vectorStore); - } + const client = new Portkey({ + apiKey: 'PORTKEY_API_KEY', + virtualKey: 'PROVIDER_VIRTUAL_KEY' + }); - main(); - response: | - { - "id": "vs_abc123", - "object": "vector_store", - "created_at": 1699061776, - "name": "Support FAQ", - "bytes": 139920, - "file_counts": { - "in_progress": 0, - "completed": 3, - "failed": 0, - "cancelled": 0, - "total": 3 - } - } + async function main() { + const vectorStore = await client.beta.vectorStores.create({ + name: "Support FAQ" + }); + console.log(vectorStore); + } + + main(); + response: | + { + "id": "vs_abc123", + "object": "vector_store", + "created_at": 1699061776, + "name": "Support FAQ", + "bytes": 139920, + "file_counts": { + "in_progress": 0, + "completed": 3, + "failed": 0, + "cancelled": 0, + "total": 3 + } + } /vector_stores/{vector_store_id}: get: @@ -5920,52 +5063,49 @@ paths: Custom-Host: [] x-code-samples: - name: Retrieve vector store - group: vector_stores - beta: true - returns: The [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) object matching the specified ID. - examples: - request: - curl: | - curl https://api.portkey.ai/v1/vector_stores/vs_abc123 \ - -H "x-portkey-api-key: $PORTKEY_API_KEY" \ - -H "x-portkey-virtual-key: $PORTKEY_PROVIDER_VIRTUAL_KEY" \ - -H "Content-Type: application/json" \ - -H "OpenAI-Beta: assistants=v2" - python: | - from portkey_ai import Portkey - - client = Portkey( - api_key = "PORTKEY_API_KEY", - virtual_key = "PROVIDER_VIRTUAL_KEY" - ) - - vector_store = client.beta.vector_stores.retrieve( - vector_store_id="vs_abc123" - ) - print(vector_store) - node.js: | - import Portkey from 'portkey-ai'; - - const client = new Portkey({ - apiKey: 'PORTKEY_API_KEY', - virtualKey: 'PROVIDER_VIRTUAL_KEY' - }); - - async function main() { - const vectorStore = await client.beta.vectorStores.retrieve( - "vs_abc123" - ); - console.log(vectorStore); - } + - lang: curl + source: | + curl https://api.portkey.ai/v1/vector_stores/vs_abc123 \ + -H "x-portkey-api-key: $PORTKEY_API_KEY" \ + -H "x-portkey-virtual-key: $PORTKEY_PROVIDER_VIRTUAL_KEY" \ + -H "Content-Type: application/json" \ + -H "OpenAI-Beta: assistants=v2" + - lang: python + source: | + from portkey_ai import Portkey - main(); - response: | - { - "id": "vs_abc123", - "object": "vector_store", - "created_at": 1699061776 - } + client = Portkey( + api_key = "PORTKEY_API_KEY", + virtual_key = "PROVIDER_VIRTUAL_KEY" + ) + + vector_store = client.beta.vector_stores.retrieve( + vector_store_id="vs_abc123" + ) + print(vector_store) + - lang: javascript + source: | + import Portkey from 'portkey-ai'; + + const client = new Portkey({ + apiKey: 'PORTKEY_API_KEY', + virtualKey: 'PROVIDER_VIRTUAL_KEY' + }); + + async function main() { + const vectorStore = await client.beta.vectorStores.retrieve( + "vs_abc123" + ); + console.log(vectorStore); + } + + main(); + response: | + { + "id": "vs_abc123", + "object": "vector_store", + "created_at": 1699061776 + } post: operationId: modifyVectorStore tags: @@ -6006,68 +5146,65 @@ paths: Custom-Host: [] x-code-samples: - name: Modify vector store - group: vector_stores - beta: true - returns: The modified [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) object. - examples: - request: - curl: | - curl https://api.portkey.ai/v1/vector_stores/vs_abc123 \ - -H "x-portkey-api-key: $PORTKEY_API_KEY" \ - -H "x-portkey-virtual-key: $PORTKEY_PROVIDER_VIRTUAL_KEY" \ - -H "Content-Type: application/json" \ - -H "OpenAI-Beta: assistants=v2" - -d '{ - "name": "Support FAQ" - }' - python: | - from portkey_ai import Portkey - - client = Portkey( - api_key = "PORTKEY_API_KEY", - virtual_key = "PROVIDER_VIRTUAL_KEY" - ) - - vector_store = client.beta.vector_stores.update( - vector_store_id="vs_abc123", - name="Support FAQ" - ) - print(vector_store) - node.js: | - import Portkey from 'portkey-ai'; - - const client = new Portkey({ - apiKey: 'PORTKEY_API_KEY', - virtualKey: 'PROVIDER_VIRTUAL_KEY' - }); - - async function main() { - const vectorStore = await client.beta.vectorStores.update( - "vs_abc123", - { - name: "Support FAQ" - } - ); - console.log(vectorStore); - } + - lang: curl + source: | + curl https://api.portkey.ai/v1/vector_stores/vs_abc123 \ + -H "x-portkey-api-key: $PORTKEY_API_KEY" \ + -H "x-portkey-virtual-key: $PORTKEY_PROVIDER_VIRTUAL_KEY" \ + -H "Content-Type: application/json" \ + -H "OpenAI-Beta: assistants=v2" + -d '{ + "name": "Support FAQ" + }' + - lang: python + source: | + from portkey_ai import Portkey + + client = Portkey( + api_key = "PORTKEY_API_KEY", + virtual_key = "PROVIDER_VIRTUAL_KEY" + ) + + vector_store = client.beta.vector_stores.update( + vector_store_id="vs_abc123", + name="Support FAQ" + ) + print(vector_store) + - lang: javascript + source: | + import Portkey from 'portkey-ai'; + + const client = new Portkey({ + apiKey: 'PORTKEY_API_KEY', + virtualKey: 'PROVIDER_VIRTUAL_KEY' + }); - main(); - response: | + async function main() { + const vectorStore = await client.beta.vectorStores.update( + "vs_abc123", { - "id": "vs_abc123", - "object": "vector_store", - "created_at": 1699061776, - "name": "Support FAQ", - "bytes": 139920, - "file_counts": { - "in_progress": 0, - "completed": 3, - "failed": 0, - "cancelled": 0, - "total": 3 - } + name: "Support FAQ" } + ); + console.log(vectorStore); + } + + main(); + response: | + { + "id": "vs_abc123", + "object": "vector_store", + "created_at": 1699061776, + "name": "Support FAQ", + "bytes": 139920, + "file_counts": { + "in_progress": 0, + "completed": 3, + "failed": 0, + "cancelled": 0, + "total": 3 + } + } delete: operationId: deleteVectorStore @@ -6103,53 +5240,50 @@ paths: Custom-Host: [] x-code-samples: - name: Delete vector store - group: vector_stores - beta: true - returns: Deletion status - examples: - request: - curl: | - curl https://api.portkey.ai/v1/vector_stores/vs_abc123 \ - -H "x-portkey-api-key: $PORTKEY_API_KEY" \ - -H "x-portkey-virtual-key: $PORTKEY_PROVIDER_VIRTUAL_KEY" \ - -H "Content-Type: application/json" \ - -H "OpenAI-Beta: assistants=v2" \ - -X DELETE - python: | - from portkey_ai import Portkey - - client = Portkey( - api_key = "PORTKEY_API_KEY", - virtual_key = "PROVIDER_VIRTUAL_KEY" - ) - - deleted_vector_store = client.beta.vector_stores.delete( - vector_store_id="vs_abc123" - ) - print(deleted_vector_store) - node.js: | - import Portkey from 'portkey-ai'; - - const client = new Portkey({ - apiKey: 'PORTKEY_API_KEY', - virtualKey: 'PROVIDER_VIRTUAL_KEY' - }); - - async function main() { - const deletedVectorStore = await client.beta.vectorStores.del( - "vs_abc123" - ); - console.log(deletedVectorStore); - } + - lang: curl + source: | + curl https://api.portkey.ai/v1/vector_stores/vs_abc123 \ + -H "x-portkey-api-key: $PORTKEY_API_KEY" \ + -H "x-portkey-virtual-key: $PORTKEY_PROVIDER_VIRTUAL_KEY" \ + -H "Content-Type: application/json" \ + -H "OpenAI-Beta: assistants=v2" \ + -X DELETE + - lang: python + source: | + from portkey_ai import Portkey - main(); - response: | - { - id: "vs_abc123", - object: "vector_store.deleted", - deleted: true - } + client = Portkey( + api_key = "PORTKEY_API_KEY", + virtual_key = "PROVIDER_VIRTUAL_KEY" + ) + + deleted_vector_store = client.beta.vector_stores.delete( + vector_store_id="vs_abc123" + ) + print(deleted_vector_store) + - lang: javascript + source: | + import Portkey from 'portkey-ai'; + + const client = new Portkey({ + apiKey: 'PORTKEY_API_KEY', + virtualKey: 'PROVIDER_VIRTUAL_KEY' + }); + + async function main() { + const deletedVectorStore = await client.beta.vectorStores.del( + "vs_abc123" + ); + console.log(deletedVectorStore); + } + + main(); + response: | + { + "id": "vs_abc123", + "object": "vector_store.deleted", + "deleted": true + } /vector_stores/{vector_store_id}/files: get: @@ -6216,67 +5350,64 @@ paths: Custom-Host: [] x-code-samples: - name: List vector store files - group: vector_stores - beta: true - returns: A list of [vector store file](https://platform.openai.com/docs/api-reference/vector-stores-files/file-object) objects. - examples: - request: - curl: | - curl https://api.portkey.ai/v1/vector_stores/vs_abc123/files \ - -H "x-portkey-api-key: $PORTKEY_API_KEY" \ - -H "x-portkey-virtual-key: $PORTKEY_PROVIDER_VIRTUAL_KEY" \ - -H "Content-Type: application/json" \ - -H "OpenAI-Beta: assistants=v2" - python: | - from portkey_ai import Portkey - - client = Portkey( - api_key = "PORTKEY_API_KEY", - virtual_key = "PROVIDER_VIRTUAL_KEY" - ) - - vector_store_files = client.beta.vector_stores.files.list( - vector_store_id="vs_abc123" - ) - print(vector_store_files) - node.js: | - import Portkey from 'portkey-ai'; - - const client = new Portkey({ - apiKey: 'PORTKEY_API_KEY', - virtualKey: 'PROVIDER_VIRTUAL_KEY' - }); - - async function main() { - const vectorStoreFiles = await client.beta.vectorStores.files.list( - "vs_abc123" - ); - console.log(vectorStoreFiles); - } + - lang: curl + source: | + curl https://api.portkey.ai/v1/vector_stores/vs_abc123/files \ + -H "x-portkey-api-key: $PORTKEY_API_KEY" \ + -H "x-portkey-virtual-key: $PORTKEY_PROVIDER_VIRTUAL_KEY" \ + -H "Content-Type: application/json" \ + -H "OpenAI-Beta: assistants=v2" + - lang: python + source: | + from portkey_ai import Portkey + + client = Portkey( + api_key = "PORTKEY_API_KEY", + virtual_key = "PROVIDER_VIRTUAL_KEY" + ) + + vector_store_files = client.beta.vector_stores.files.list( + vector_store_id="vs_abc123" + ) + print(vector_store_files) + - lang: javascript + source: | + import Portkey from 'portkey-ai'; + + const client = new Portkey({ + apiKey: 'PORTKEY_API_KEY', + virtualKey: 'PROVIDER_VIRTUAL_KEY' + }); + + async function main() { + const vectorStoreFiles = await client.beta.vectorStores.files.list( + "vs_abc123" + ); + console.log(vectorStoreFiles); + } - main(); - response: | + main(); + response: | + { + "object": "list", + "data": [ { - "object": "list", - "data": [ - { - "id": "file-abc123", - "object": "vector_store.file", - "created_at": 1699061776, - "vector_store_id": "vs_abc123" - }, - { - "id": "file-abc456", - "object": "vector_store.file", - "created_at": 1699061776, - "vector_store_id": "vs_abc123" - } - ], - "first_id": "file-abc123", - "last_id": "file-abc456", - "has_more": false + "id": "file-abc123", + "object": "vector_store.file", + "created_at": 1699061776, + "vector_store_id": "vs_abc123" + }, + { + "id": "file-abc456", + "object": "vector_store.file", + "created_at": 1699061776, + "vector_store_id": "vs_abc123" } + ], + "first_id": "file-abc123", + "last_id": "file-abc456", + "has_more": false + } post: operationId: createVectorStoreFile tags: @@ -6319,63 +5450,60 @@ paths: Custom-Host: [] x-code-samples: - name: Create vector store file - group: vector_stores - beta: true - returns: A [vector store file](https://platform.openai.com/docs/api-reference/vector-stores-files/file-object) object. - examples: - request: - curl: | - curl https://api.portkey.ai/v1/vector_stores/vs_abc123/files \ - -H "x-portkey-api-key: $PORTKEY_API_KEY" \ - -H "x-portkey-virtual-key: $PORTKEY_PROVIDER_VIRTUAL_KEY" \ - -H "Content-Type: application/json" \ - -H "OpenAI-Beta: assistants=v2" \ - -d '{ - "file_id": "file-abc123" - }' - python: | - from portkey_ai import Portkey - - client = Portkey( - api_key = "PORTKEY_API_KEY", - virtual_key = "PROVIDER_VIRTUAL_KEY" - ) - - vector_store_file = client.beta.vector_stores.files.create( - vector_store_id="vs_abc123", - file_id="file-abc123" - ) - print(vector_store_file) - node.js: | - import Portkey from 'portkey-ai'; - - const client = new Portkey({ - apiKey: 'PORTKEY_API_KEY', - virtualKey: 'PROVIDER_VIRTUAL_KEY' - }); - - async function main() { - const myVectorStoreFile = await client.beta.vectorStores.files.create( - "vs_abc123", - { - file_id: "file-abc123" - } - ); - console.log(myVectorStoreFile); - } + - lang: curl + source: | + curl https://api.portkey.ai/v1/vector_stores/vs_abc123/files \ + -H "x-portkey-api-key: $PORTKEY_API_KEY" \ + -H "x-portkey-virtual-key: $PORTKEY_PROVIDER_VIRTUAL_KEY" \ + -H "Content-Type: application/json" \ + -H "OpenAI-Beta: assistants=v2" \ + -d '{ + "file_id": "file-abc123" + }' + - lang: python + source: | + from portkey_ai import Portkey + + client = Portkey( + api_key = "PORTKEY_API_KEY", + virtual_key = "PROVIDER_VIRTUAL_KEY" + ) + + vector_store_file = client.beta.vector_stores.files.create( + vector_store_id="vs_abc123", + file_id="file-abc123" + ) + print(vector_store_file) + - lang: javascript + source: | + import Portkey from 'portkey-ai'; + + const client = new Portkey({ + apiKey: 'PORTKEY_API_KEY', + virtualKey: 'PROVIDER_VIRTUAL_KEY' + }); - main(); - response: | + async function main() { + const myVectorStoreFile = await client.beta.vectorStores.files.create( + "vs_abc123", { - "id": "file-abc123", - "object": "vector_store.file", - "created_at": 1699061776, - "usage_bytes": 1234, - "vector_store_id": "vs_abcd", - "status": "completed", - "last_error": null + file_id: "file-abc123" } + ); + console.log(myVectorStoreFile); + } + + main(); + response: | + { + "id": "file-abc123", + "object": "vector_store.file", + "created_at": 1699061776, + "usage_bytes": 1234, + "vector_store_id": "vs_abcd", + "status": "completed", + "last_error": null + } /vector_stores/{vector_store_id}/files/{file_id}: get: @@ -6420,57 +5548,54 @@ paths: Custom-Host: [] x-code-samples: - name: Retrieve vector store file - group: vector_stores - beta: true - returns: The [vector store file](https://platform.openai.com/docs/api-reference/vector-stores-files/file-object) object. - examples: - request: - curl: | - curl https://api.portkey.ai/v1/vector_stores/vs_abc123/files/file-abc123 \ - -H "x-portkey-api-key: $PORTKEY_API_KEY" \ - -H "x-portkey-virtual-key: $PORTKEY_PROVIDER_VIRTUAL_KEY" \ - -H "Content-Type: application/json" \ - -H "OpenAI-Beta: assistants=v2" - python: | - from portkey_ai import Portkey - - client = Portkey( - api_key = "PORTKEY_API_KEY", - virtual_key = "PROVIDER_VIRTUAL_KEY" - ) - - vector_store_file = client.beta.vector_stores.files.retrieve( - vector_store_id="vs_abc123", - file_id="file-abc123" - ) - print(vector_store_file) - node.js: | - import Portkey from 'portkey-ai'; - - const client = new Portkey({ - apiKey: 'PORTKEY_API_KEY', - virtualKey: 'PROVIDER_VIRTUAL_KEY' - }); - - async function main() { - const vectorStoreFile = await client.beta.vectorStores.files.retrieve( - "vs_abc123", - "file-abc123" - ); - console.log(vectorStoreFile); - } + - lang: curl + source: | + curl https://api.portkey.ai/v1/vector_stores/vs_abc123/files/file-abc123 \ + -H "x-portkey-api-key: $PORTKEY_API_KEY" \ + -H "x-portkey-virtual-key: $PORTKEY_PROVIDER_VIRTUAL_KEY" \ + -H "Content-Type: application/json" \ + -H "OpenAI-Beta: assistants=v2" + - lang: python + source: | + from portkey_ai import Portkey - main(); - response: | - { - "id": "file-abc123", - "object": "vector_store.file", - "created_at": 1699061776, - "vector_store_id": "vs_abcd", - "status": "completed", - "last_error": null - } + client = Portkey( + api_key = "PORTKEY_API_KEY", + virtual_key = "PROVIDER_VIRTUAL_KEY" + ) + + vector_store_file = client.beta.vector_stores.files.retrieve( + vector_store_id="vs_abc123", + file_id="file-abc123" + ) + print(vector_store_file) + - lang: javascript + source: | + import Portkey from 'portkey-ai'; + + const client = new Portkey({ + apiKey: 'PORTKEY_API_KEY', + virtualKey: 'PROVIDER_VIRTUAL_KEY' + }); + + async function main() { + const vectorStoreFile = await client.beta.vectorStores.files.retrieve( + "vs_abc123", + "file-abc123" + ); + console.log(vectorStoreFile); + } + + main(); + response: | + { + "id": "file-abc123", + "object": "vector_store.file", + "created_at": 1699061776, + "vector_store_id": "vs_abcd", + "status": "completed", + "last_error": null + } delete: operationId: deleteVectorStoreFile tags: @@ -6511,55 +5636,52 @@ paths: Custom-Host: [] x-code-samples: - name: Delete vector store file - group: vector_stores - beta: true - returns: Deletion status - examples: - request: - curl: | - curl https://api.portkey.ai/v1/vector_stores/vs_abc123/files/file-abc123 \ - -H "x-portkey-api-key: $PORTKEY_API_KEY" \ - -H "x-portkey-virtual-key: $PORTKEY_PROVIDER_VIRTUAL_KEY" \ - -H "Content-Type: application/json" \ - -H "OpenAI-Beta: assistants=v2" \ - -X DELETE - python: | - from portkey_ai import Portkey - - client = Portkey( - api_key = "PORTKEY_API_KEY", - virtual_key = "PROVIDER_VIRTUAL_KEY" - ) - - deleted_vector_store_file = client.beta.vector_stores.files.delete( - vector_store_id="vs_abc123", - file_id="file-abc123" - ) - print(deleted_vector_store_file) - node.js: | - import Portkey from 'portkey-ai'; - - const client = new Portkey({ - apiKey: 'PORTKEY_API_KEY', - virtualKey: 'PROVIDER_VIRTUAL_KEY' - }); - - async function main() { - const deletedVectorStoreFile = await client.beta.vectorStores.files.del( - "vs_abc123", - "file-abc123" - ); - console.log(deletedVectorStoreFile); - } + - lang: curl + source: | + curl https://api.portkey.ai/v1/vector_stores/vs_abc123/files/file-abc123 \ + -H "x-portkey-api-key: $PORTKEY_API_KEY" \ + -H "x-portkey-virtual-key: $PORTKEY_PROVIDER_VIRTUAL_KEY" \ + -H "Content-Type: application/json" \ + -H "OpenAI-Beta: assistants=v2" \ + -X DELETE + - lang: python + source: | + from portkey_ai import Portkey - main(); - response: | - { - id: "file-abc123", - object: "vector_store.file.deleted", - deleted: true - } + client = Portkey( + api_key = "PORTKEY_API_KEY", + virtual_key = "PROVIDER_VIRTUAL_KEY" + ) + + deleted_vector_store_file = client.beta.vector_stores.files.delete( + vector_store_id="vs_abc123", + file_id="file-abc123" + ) + print(deleted_vector_store_file) + - lang: javascript + source: | + import Portkey from 'portkey-ai'; + + const client = new Portkey({ + apiKey: 'PORTKEY_API_KEY', + virtualKey: 'PROVIDER_VIRTUAL_KEY' + }); + + async function main() { + const deletedVectorStoreFile = await client.beta.vectorStores.files.del( + "vs_abc123", + "file-abc123" + ); + console.log(deletedVectorStoreFile); + } + + main(); + response: | + { + "id": "file-abc123", + "object": "vector_store.file.deleted", + "deleted": true + } /vector_stores/{vector_store_id}/file_batches: post: @@ -6604,68 +5726,65 @@ paths: Custom-Host: [] x-code-samples: - name: Create vector store file batch - group: vector_stores - beta: true - returns: A [vector store file batch](https://platform.openai.com/docs/api-reference/vector-stores-file-batches/batch-object) object. - examples: - request: - curl: | - curl https://api.portkey.ai/v1/vector_stores/vs_abc123/file_batches \ - -H "x-portkey-api-key: $PORTKEY_API_KEY" \ - -H "x-portkey-virtual-key: $PORTKEY_PROVIDER_VIRTUAL_KEY" \ - -H "Content-Type: application/json \ - -H "OpenAI-Beta: assistants=v2" \ - -d '{ - "file_ids": ["file-abc123", "file-abc456"] - }' - python: | - from portkey_ai import Portkey - - client = Portkey( - api_key = "PORTKEY_API_KEY", - virtual_key = "PROVIDER_VIRTUAL_KEY" - ) - - vector_store_file_batch = client.beta.vector_stores.file_batches.create( - vector_store_id="vs_abc123", - file_ids=["file-abc123", "file-abc456"] - ) - print(vector_store_file_batch) - node.js: | - import Portkey from 'portkey-ai'; - - const client = new Portkey({ - apiKey: 'PORTKEY_API_KEY', - virtualKey: 'PROVIDER_VIRTUAL_KEY' - }); - - async function main() { - const myVectorStoreFileBatch = await client.beta.vectorStores.fileBatches.create( - "vs_abc123", - { - file_ids: ["file-abc123", "file-abc456"] - } - ); - console.log(myVectorStoreFileBatch); - } + - lang: curl + source: | + curl https://api.portkey.ai/v1/vector_stores/vs_abc123/file_batches \ + -H "x-portkey-api-key: $PORTKEY_API_KEY" \ + -H "x-portkey-virtual-key: $PORTKEY_PROVIDER_VIRTUAL_KEY" \ + -H "Content-Type: application/json \ + -H "OpenAI-Beta: assistants=v2" \ + -d '{ + "file_ids": ["file-abc123", "file-abc456"] + }' + - lang: python + source: | + from portkey_ai import Portkey + + client = Portkey( + api_key = "PORTKEY_API_KEY", + virtual_key = "PROVIDER_VIRTUAL_KEY" + ) + + vector_store_file_batch = client.beta.vector_stores.file_batches.create( + vector_store_id="vs_abc123", + file_ids=["file-abc123", "file-abc456"] + ) + print(vector_store_file_batch) + - lang: javascript + source: | + import Portkey from 'portkey-ai'; + + const client = new Portkey({ + apiKey: 'PORTKEY_API_KEY', + virtualKey: 'PROVIDER_VIRTUAL_KEY' + }); - main(); - response: | + async function main() { + const myVectorStoreFileBatch = await client.beta.vectorStores.fileBatches.create( + "vs_abc123", { - "id": "vsfb_abc123", - "object": "vector_store.file_batch", - "created_at": 1699061776, - "vector_store_id": "vs_abc123", - "status": "in_progress", - "file_counts": { - "in_progress": 1, - "completed": 1, - "failed": 0, - "cancelled": 0, - "total": 0, - } + file_ids: ["file-abc123", "file-abc456"] + } + ); + console.log(myVectorStoreFileBatch); + } + + main(); + response: | + { + "id": "vsfb_abc123", + "object": "vector_store.file_batch", + "created_at": 1699061776, + "vector_store_id": "vs_abc123", + "status": "in_progress", + "file_counts": { + "in_progress": 1, + "completed": 1, + "failed": 0, + "cancelled": 0, + "total": 0, } + } /vector_stores/{vector_store_id}/file_batches/{batch_id}: get: @@ -6710,63 +5829,60 @@ paths: Custom-Host: [] x-code-samples: - name: Retrieve vector store file batch - group: vector_stores - beta: true - returns: The [vector store file batch](https://platform.openai.com/docs/api-reference/vector-stores-file-batches/batch-object) object. - examples: - request: - curl: | - curl https://api.portkey.ai/v1/vector_stores/vs_abc123/files_batches/vsfb_abc123 \ - -H "x-portkey-api-key: $PORTKEY_API_KEY" \ - -H "x-portkey-virtual-key: $PORTKEY_PROVIDER_VIRTUAL_KEY" \ - -H "Content-Type: application/json" \ - -H "OpenAI-Beta: assistants=v2" - python: | - from portkey_ai import Portkey - - client = Portkey( - api_key = "PORTKEY_API_KEY", - virtual_key = "PROVIDER_VIRTUAL_KEY" - ) - - vector_store_file_batch = client.beta.vector_stores.file_batches.retrieve( - vector_store_id="vs_abc123", - batch_id="vsfb_abc123" - ) - print(vector_store_file_batch) - node.js: | - import Portkey from 'portkey-ai'; - - const client = new Portkey({ - apiKey: 'PORTKEY_API_KEY', - virtualKey: 'PROVIDER_VIRTUAL_KEY' - }); - - async function main() { - const vectorStoreFileBatch = await client.beta.vectorStores.fileBatches.retrieve( - "vs_abc123", - "vsfb_abc123" - ); - console.log(vectorStoreFileBatch); - } + - lang: curl + source: | + curl https://api.portkey.ai/v1/vector_stores/vs_abc123/files_batches/vsfb_abc123 \ + -H "x-portkey-api-key: $PORTKEY_API_KEY" \ + -H "x-portkey-virtual-key: $PORTKEY_PROVIDER_VIRTUAL_KEY" \ + -H "Content-Type: application/json" \ + -H "OpenAI-Beta: assistants=v2" + - lang: python + source: | + from portkey_ai import Portkey - main(); - response: | - { - "id": "vsfb_abc123", - "object": "vector_store.file_batch", - "created_at": 1699061776, - "vector_store_id": "vs_abc123", - "status": "in_progress", - "file_counts": { - "in_progress": 1, - "completed": 1, - "failed": 0, - "cancelled": 0, - "total": 0, - } + client = Portkey( + api_key = "PORTKEY_API_KEY", + virtual_key = "PROVIDER_VIRTUAL_KEY" + ) + + vector_store_file_batch = client.beta.vector_stores.file_batches.retrieve( + vector_store_id="vs_abc123", + batch_id="vsfb_abc123" + ) + print(vector_store_file_batch) + - lang: javascript + source: | + import Portkey from 'portkey-ai'; + + const client = new Portkey({ + apiKey: 'PORTKEY_API_KEY', + virtualKey: 'PROVIDER_VIRTUAL_KEY' + }); + + async function main() { + const vectorStoreFileBatch = await client.beta.vectorStores.fileBatches.retrieve( + "vs_abc123", + "vsfb_abc123" + ); + console.log(vectorStoreFileBatch); + } + + main(); + response: | + { + "id": "vsfb_abc123", + "object": "vector_store.file_batch", + "created_at": 1699061776, + "vector_store_id": "vs_abc123", + "status": "in_progress", + "file_counts": { + "in_progress": 1, + "completed": 1, + "failed": 0, + "cancelled": 0, + "total": 0, } + } /vector_stores/{vector_store_id}/file_batches/{batch_id}/cancel: post: @@ -6809,64 +5925,61 @@ paths: Custom-Host: [] x-code-samples: - name: Cancel vector store file batch - group: vector_stores - beta: true - returns: The modified vector store file batch object. - examples: - request: - curl: | - curl https://api.portkey.ai/v1/vector_stores/vs_abc123/files_batches/vsfb_abc123/cancel \ - -H "x-portkey-api-key: $PORTKEY_API_KEY" \ - -H "x-portkey-virtual-key: $PORTKEY_PROVIDER_VIRTUAL_KEY" \ - -H "Content-Type: application/json" \ - -H "OpenAI-Beta: assistants=v2" \ - -X POST - python: | - from portkey_ai import Portkey - - client = Portkey( - api_key = "PORTKEY_API_KEY", - virtual_key = "PROVIDER_VIRTUAL_KEY" - ) - - deleted_vector_store_file_batch = client.beta.vector_stores.file_batches.cancel( - vector_store_id="vs_abc123", - file_batch_id="vsfb_abc123" - ) - print(deleted_vector_store_file_batch) - node.js: | - import Portkey from 'portkey-ai'; - - const client = new Portkey({ - apiKey: 'PORTKEY_API_KEY', - virtualKey: 'PROVIDER_VIRTUAL_KEY' - }); - - async function main() { - const deletedVectorStoreFileBatch = await client.vector_stores.fileBatches.cancel( - "vs_abc123", - "vsfb_abc123" - ); - console.log(deletedVectorStoreFileBatch); - } + - lang: curl + source: | + curl https://api.portkey.ai/v1/vector_stores/vs_abc123/files_batches/vsfb_abc123/cancel \ + -H "x-portkey-api-key: $PORTKEY_API_KEY" \ + -H "x-portkey-virtual-key: $PORTKEY_PROVIDER_VIRTUAL_KEY" \ + -H "Content-Type: application/json" \ + -H "OpenAI-Beta: assistants=v2" \ + -X POST + - lang: python + source: | + from portkey_ai import Portkey - main(); - response: | - { - "id": "vsfb_abc123", - "object": "vector_store.file_batch", - "created_at": 1699061776, - "vector_store_id": "vs_abc123", - "status": "cancelling", - "file_counts": { - "in_progress": 12, - "completed": 3, - "failed": 0, - "cancelled": 0, - "total": 15, - } - } + client = Portkey( + api_key = "PORTKEY_API_KEY", + virtual_key = "PROVIDER_VIRTUAL_KEY" + ) + + deleted_vector_store_file_batch = client.beta.vector_stores.file_batches.cancel( + vector_store_id="vs_abc123", + file_batch_id="vsfb_abc123" + ) + print(deleted_vector_store_file_batch) + - lang: javascript + source: | + import Portkey from 'portkey-ai'; + + const client = new Portkey({ + apiKey: 'PORTKEY_API_KEY', + virtualKey: 'PROVIDER_VIRTUAL_KEY' + }); + + async function main() { + const deletedVectorStoreFileBatch = await client.vector_stores.fileBatches.cancel( + "vs_abc123", + "vsfb_abc123" + ); + console.log(deletedVectorStoreFileBatch); + } + + main(); + response: | + { + "id": "vsfb_abc123", + "object": "vector_store.file_batch", + "created_at": 1699061776, + "vector_store_id": "vs_abc123", + "status": "cancelling", + "file_counts": { + "in_progress": 12, + "completed": 3, + "failed": 0, + "cancelled": 0, + "total": 15, + } + } /vector_stores/{vector_store_id}/file_batches/{batch_id}/files: get: @@ -6939,69 +6052,66 @@ paths: Custom-Host: [] x-code-samples: - name: List vector store files in a batch - group: vector_stores - beta: true - returns: A list of [vector store file](https://platform.openai.com/docs/api-reference/vector-stores-files/file-object) objects. - examples: - request: - curl: | - curl https://api.portkey.ai/v1/vector_stores/vs_abc123/files_batches/vsfb_abc123/files \ - -H "x-portkey-api-key: $PORTKEY_API_KEY" \ - -H "x-portkey-virtual-key: $PORTKEY_PROVIDER_VIRTUAL_KEY" \ - -H "Content-Type: application/json" \ - -H "OpenAI-Beta: assistants=v2" - python: | - from portkey_ai import Portkey - - client = Portkey( - api_key = "PORTKEY_API_KEY", - virtual_key = "PROVIDER_VIRTUAL_KEY" - ) - - vector_store_files = client.beta.vector_stores.file_batches.list_files( - vector_store_id="vs_abc123", - batch_id="vsfb_abc123" - ) - print(vector_store_files) - node.js: | - import Portkey from 'portkey-ai'; - - const client = new Portkey({ - apiKey: 'PORTKEY_API_KEY', - virtualKey: 'PROVIDER_VIRTUAL_KEY' - }); - - async function main() { - const vectorStoreFiles = await client.beta.vectorStores.fileBatches.listFiles( - "vs_abc123", - "vsfb_abc123" - ); - console.log(vectorStoreFiles); - } + - lang: curl + source: | + curl https://api.portkey.ai/v1/vector_stores/vs_abc123/files_batches/vsfb_abc123/files \ + -H "x-portkey-api-key: $PORTKEY_API_KEY" \ + -H "x-portkey-virtual-key: $PORTKEY_PROVIDER_VIRTUAL_KEY" \ + -H "Content-Type: application/json" \ + -H "OpenAI-Beta: assistants=v2" + - lang: python + source: | + from portkey_ai import Portkey - main(); - response: | - { - "object": "list", - "data": [ - { - "id": "file-abc123", - "object": "vector_store.file", - "created_at": 1699061776, - "vector_store_id": "vs_abc123" - }, - { - "id": "file-abc456", - "object": "vector_store.file", - "created_at": 1699061776, - "vector_store_id": "vs_abc123" - } - ], - "first_id": "file-abc123", - "last_id": "file-abc456", - "has_more": false - } + client = Portkey( + api_key = "PORTKEY_API_KEY", + virtual_key = "PROVIDER_VIRTUAL_KEY" + ) + + vector_store_files = client.beta.vector_stores.file_batches.list_files( + vector_store_id="vs_abc123", + batch_id="vsfb_abc123" + ) + print(vector_store_files) + - lang: javascript + source: | + import Portkey from 'portkey-ai'; + + const client = new Portkey({ + apiKey: 'PORTKEY_API_KEY', + virtualKey: 'PROVIDER_VIRTUAL_KEY' + }); + + async function main() { + const vectorStoreFiles = await client.beta.vectorStores.fileBatches.listFiles( + "vs_abc123", + "vsfb_abc123" + ); + console.log(vectorStoreFiles); + } + + main(); + response: | + { + "object": "list", + "data": [ + { + "id": "file-abc123", + "object": "vector_store.file", + "created_at": 1699061776, + "vector_store_id": "vs_abc123" + }, + { + "id": "file-abc456", + "object": "vector_store.file", + "created_at": 1699061776, + "vector_store_id": "vs_abc123" + } + ], + "first_id": "file-abc123", + "last_id": "file-abc456", + "has_more": false + } /batches: post: