|
8593 | 8593 | }, |
8594 | 8594 | { |
8595 | 8595 | "$ref": "#/components/schemas/OpenAIResponseInputMessageContentImage" |
| 8596 | + }, |
| 8597 | + { |
| 8598 | + "$ref": "#/components/schemas/OpenAIResponseInputMessageContentFile" |
8596 | 8599 | } |
8597 | 8600 | ], |
8598 | 8601 | "discriminator": { |
8599 | 8602 | "propertyName": "type", |
8600 | 8603 | "mapping": { |
8601 | 8604 | "input_text": "#/components/schemas/OpenAIResponseInputMessageContentText", |
8602 | | - "input_image": "#/components/schemas/OpenAIResponseInputMessageContentImage" |
| 8605 | + "input_image": "#/components/schemas/OpenAIResponseInputMessageContentImage", |
| 8606 | + "input_file": "#/components/schemas/OpenAIResponseInputMessageContentFile" |
8603 | 8607 | } |
8604 | 8608 | } |
8605 | 8609 | }, |
| 8610 | + "OpenAIResponseInputMessageContentFile": { |
| 8611 | + "type": "object", |
| 8612 | + "properties": { |
| 8613 | + "type": { |
| 8614 | + "type": "string", |
| 8615 | + "const": "input_file", |
| 8616 | + "default": "input_file", |
| 8617 | + "description": "The type of the input item. Always `input_file`." |
| 8618 | + }, |
| 8619 | + "file_data": { |
| 8620 | + "type": "string", |
| 8621 | + "description": "The data of the file to be sent to the model." |
| 8622 | + }, |
| 8623 | + "file_id": { |
| 8624 | + "type": "string", |
| 8625 | + "description": "(Optional) The ID of the file to be sent to the model." |
| 8626 | + }, |
| 8627 | + "file_url": { |
| 8628 | + "type": "string", |
| 8629 | + "description": "The URL of the file to be sent to the model." |
| 8630 | + }, |
| 8631 | + "filename": { |
| 8632 | + "type": "string", |
| 8633 | + "description": "The name of the file to be sent to the model." |
| 8634 | + } |
| 8635 | + }, |
| 8636 | + "additionalProperties": false, |
| 8637 | + "required": [ |
| 8638 | + "type" |
| 8639 | + ], |
| 8640 | + "title": "OpenAIResponseInputMessageContentFile", |
| 8641 | + "description": "File content for input messages in OpenAI response format." |
| 8642 | + }, |
8606 | 8643 | "OpenAIResponseInputMessageContentImage": { |
8607 | 8644 | "type": "object", |
8608 | 8645 | "properties": { |
|
8630 | 8667 | "default": "input_image", |
8631 | 8668 | "description": "Content type identifier, always \"input_image\"" |
8632 | 8669 | }, |
| 8670 | + "file_id": { |
| 8671 | + "type": "string", |
| 8672 | + "description": "(Optional) The ID of the file to be sent to the model." |
| 8673 | + }, |
8633 | 8674 | "image_url": { |
8634 | 8675 | "type": "string", |
8635 | 8676 | "description": "(Optional) URL of the image content" |
|
8993 | 9034 | "type": "string", |
8994 | 9035 | "description": "(Optional) ID of the previous response in a conversation" |
8995 | 9036 | }, |
| 9037 | + "prompt": { |
| 9038 | + "$ref": "#/components/schemas/Prompt", |
| 9039 | + "description": "(Optional) Prompt object with ID, version, and variables" |
| 9040 | + }, |
8996 | 9041 | "status": { |
8997 | 9042 | "type": "string", |
8998 | 9043 | "description": "Current status of the response generation" |
|
9610 | 9655 | "title": "OpenAIResponseUsage", |
9611 | 9656 | "description": "Usage information for OpenAI response." |
9612 | 9657 | }, |
| 9658 | + "Prompt": { |
| 9659 | + "type": "object", |
| 9660 | + "properties": { |
| 9661 | + "prompt": { |
| 9662 | + "type": "string", |
| 9663 | + "description": "The system prompt text with variable placeholders. Variables are only supported when using the Responses API." |
| 9664 | + }, |
| 9665 | + "version": { |
| 9666 | + "type": "integer", |
| 9667 | + "description": "Version (integer starting at 1, incremented on save)" |
| 9668 | + }, |
| 9669 | + "prompt_id": { |
| 9670 | + "type": "string", |
| 9671 | + "description": "Unique identifier formatted as 'pmpt_<48-digit-hash>'" |
| 9672 | + }, |
| 9673 | + "variables": { |
| 9674 | + "type": "array", |
| 9675 | + "items": { |
| 9676 | + "type": "string" |
| 9677 | + }, |
| 9678 | + "description": "List of prompt variable names that can be used in the prompt template" |
| 9679 | + }, |
| 9680 | + "is_default": { |
| 9681 | + "type": "boolean", |
| 9682 | + "default": false, |
| 9683 | + "description": "Boolean indicating whether this version is the default version for this prompt" |
| 9684 | + } |
| 9685 | + }, |
| 9686 | + "additionalProperties": false, |
| 9687 | + "required": [ |
| 9688 | + "version", |
| 9689 | + "prompt_id", |
| 9690 | + "variables", |
| 9691 | + "is_default" |
| 9692 | + ], |
| 9693 | + "title": "Prompt", |
| 9694 | + "description": "A prompt resource representing a stored OpenAI Compatible prompt template in Llama Stack." |
| 9695 | + }, |
9613 | 9696 | "ResponseGuardrailSpec": { |
9614 | 9697 | "type": "object", |
9615 | 9698 | "properties": { |
|
9766 | 9849 | "title": "OpenAIResponseInputToolMCP", |
9767 | 9850 | "description": "Model Context Protocol (MCP) tool configuration for OpenAI response inputs." |
9768 | 9851 | }, |
| 9852 | + "OpenAIResponsePromptParam": { |
| 9853 | + "type": "object", |
| 9854 | + "properties": { |
| 9855 | + "id": { |
| 9856 | + "type": "string", |
| 9857 | + "description": "Unique identifier of the prompt template" |
| 9858 | + }, |
| 9859 | + "variables": { |
| 9860 | + "type": "object", |
| 9861 | + "additionalProperties": { |
| 9862 | + "$ref": "#/components/schemas/OpenAIResponseInputMessageContent" |
| 9863 | + }, |
| 9864 | + "description": "Dictionary of variable names to OpenAIResponseInputMessageContent structure for template substitution" |
| 9865 | + }, |
| 9866 | + "version": { |
| 9867 | + "type": "string", |
| 9868 | + "description": "Version number of the prompt to use (defaults to latest if not specified)" |
| 9869 | + } |
| 9870 | + }, |
| 9871 | + "additionalProperties": false, |
| 9872 | + "required": [ |
| 9873 | + "id" |
| 9874 | + ], |
| 9875 | + "title": "OpenAIResponsePromptParam", |
| 9876 | + "description": "Prompt object that is used for OpenAI responses." |
| 9877 | + }, |
9769 | 9878 | "CreateOpenaiResponseRequest": { |
9770 | 9879 | "type": "object", |
9771 | 9880 | "properties": { |
|
9787 | 9896 | "type": "string", |
9788 | 9897 | "description": "The underlying LLM used for completions." |
9789 | 9898 | }, |
| 9899 | + "prompt": { |
| 9900 | + "$ref": "#/components/schemas/OpenAIResponsePromptParam", |
| 9901 | + "description": "Prompt object with ID, version, and variables." |
| 9902 | + }, |
9790 | 9903 | "instructions": { |
9791 | 9904 | "type": "string" |
9792 | 9905 | }, |
|
9875 | 9988 | "type": "string", |
9876 | 9989 | "description": "(Optional) ID of the previous response in a conversation" |
9877 | 9990 | }, |
| 9991 | + "prompt": { |
| 9992 | + "$ref": "#/components/schemas/Prompt", |
| 9993 | + "description": "(Optional) Prompt object with ID, version, and variables" |
| 9994 | + }, |
9878 | 9995 | "status": { |
9879 | 9996 | "type": "string", |
9880 | 9997 | "description": "Current status of the response generation" |
|
0 commit comments