The workaround is to disable instrumentation, but that means for the entire codebase, and it's really useful so that would be a shame.
Sorry for the long stacktrace, it looks like the args include a bunch of API info.
wkok.openai-clojure.api/create-embedding api.clj: 183
wkok.openai-clojure.api/create-embedding api.clj: 188
wkok.openai-clojure.core/response-for core.clj: 26
clojure.core/deref core.clj: 2337
...
wkok.openai-clojure.openai/fn openai.clj: 81
wkok.openai-clojure.openai/bootstrap-openapi openai.clj: 78
...
martian.core/bootstrap-openapi core.cljc: 128
martian.core/bootstrap-openapi core.cljc: 132
...
orchestra.spec.test/spec-checking-fn/fn test.cljc: 36
orchestra.spec.test/spec-checking-fn/conform! test.cljc: 30
clojure.lang.ExceptionInfo: Call to martian.core/build-instance did not conform to spec.
clojure.spec.alpha/args: ("https://api.openai.com/v1"
({:description nil,
:method :get,
:produces ["application/json"],
:path-schema {:model java.lang.String},
:query-schema nil,
:parameter-aliases
{:path-schema {},
:query-schema {},
:body-schema {},
:form-schema {},
:headers-schema {}},
:form-schema nil,
:path-parts ["/models/" :model],
:headers-schema nil,
:openapi-definition
{:operationId "retrieveModel",
:tags ["OpenAI"],
:summary
"Retrieves a model instance, providing basic information about the model such as the owner and permissioning.",
:parameters
[{:in "path",
:name "model",
:required true,
:schema {:type "string", :example "text-davinci-001"},
:description "The ID of the model to use for this request"}],
:responses
{:200
{:description "OK",
:content
#:application{:json
{:schema {:$ref "#/components/schemas/Model"}}}}},
:x-oaiMeta
{:name "Retrieve model",
:group "models",
:path "retrieve",
:examples
{:curl
"curl https://api.openai.com/v1/models/VAR_model_id \\\n -H 'Authorization: Bearer YOUR_API_KEY'\n",
:python
"import os\nimport openai\nopenai.api_key = os.getenv(\"OPENAI_API_KEY\")\nopenai.Model.retrieve(\"VAR_model_id\")\n",
:node.js
"const { Configuration, OpenAIApi } = require(\"openai\");\nconst configuration = new Configuration({\n apiKey: process.env.OPENAI_API_KEY,\n});\nconst openai = new OpenAIApi(configuration);\nconst response = await openai.retrieveModel(\"VAR_model_id\");\n"},
:response
"{\n \"id\": \"VAR_model_id\",\n \"object\": \"model\",\n \"owned_by\": \"openai\",\n \"permission\": [...]\n}\n"}},
...}
{:description nil,
:method :delete,
:produces ["application/json"],
:path-schema {:model java.lang.String},
:query-schema nil,
:parameter-aliases
{:path-schema {},
:query-schema {},
:body-schema {},
:form-schema {},
:headers-schema {}},
:form-schema nil,
:path-parts ["/models/" :model],
:headers-schema nil,
:openapi-definition
{:operationId "deleteModel",
:tags ["OpenAI"],
:summary
"Delete a fine-tuned model. You must have the Owner role in your organization.",
:parameters
[{:in "path",
:name "model",
:required true,
:schema
{:type "string", :example "curie:ft-acmeco-2021-03-03-21-44-20"},
:description "The model to delete"}],
:responses
{:200
{:description "OK",
:content
#:application{:json
{:schema
{:$ref
"#/components/schemas/DeleteModelResponse"}}}}},
:x-oaiMeta
{:name "Delete fine-tune model",
:group "fine-tunes",
:path "delete-model",
:examples
{:curl
"curl https://api.openai.com/v1/models/curie:ft-acmeco-2021-03-03-21-44-20 \\\n -X DELETE \\\n -H \"Authorization: Bearer YOUR_API_KEY\"\n",
:python
"import os\nimport openai\nopenai.api_key = os.getenv(\"OPENAI_API_KEY\")\nopenai.Model.delete(\"curie:ft-acmeco-2021-03-03-21-44-20\")\n",
:node.js
"const { Configuration, OpenAIApi } = require(\"openai\");\nconst configuration = new Configuration({\n apiKey: process.env.OPENAI_API_KEY,\n});\nconst openai = new OpenAIApi(configuration);\nconst response = await openai.deleteModel('curie:ft-acmeco-2021-03-03-21-44-20');\n"},
:response
"{\n \"id\": \"curie:ft-acmeco-2021-03-03-21-44-20\",\n \"object\": \"model\",\n \"deleted\": true\n}\n"}},
...}
{:description nil,
:method :get,
:produces ["application/json"],
:path-schema nil,
:query-schema nil,
:parameter-aliases
{:path-schema {},
:query-schema {},
:body-schema {},
:form-schema {},
:headers-schema {}},
:form-schema nil,
:path-parts ["/engines"],
:headers-schema nil,
:openapi-definition
{:operationId "listEngines",
:deprecated true,
:tags ["OpenAI"],
:summary
"Lists the currently available (non-finetuned) models, and provides basic information about each one such as the owner and availability.",
:responses
{:200
{:description "OK",
:content
#:application{:json
{:schema
{:$ref
"#/components/schemas/ListEnginesResponse"}}}}},
:x-oaiMeta
{:name "List engines",
:group "engines",
:path "list",
:examples
{:curl
"curl https://api.openai.com/v1/engines \\\n -H 'Authorization: Bearer YOUR_API_KEY'\n",
:python
"import os\nimport openai\nopenai.api_key = os.getenv(\"OPENAI_API_KEY\")\nopenai.Engine.list()\n",
:node.js
"const { Configuration, OpenAIApi } = require(\"openai\");\nconst configuration = new Configuration({\n apiKey: process.env.OPENAI_API_KEY,\n});\nconst openai = new OpenAIApi(configuration);\nconst response = await openai.listEngines();\n"},
:response
"{\n \"data\": [\n {\n \"id\": \"engine-id-0\",\n \"object\": \"engine\",\n \"owner\": \"organization-owner\",\n \"ready\": true\n },\n {\n \"id\": \"engine-id-2\",\n \"object\": \"engine\",\n \"owner\": \"organization-owner\",\n \"ready\": true\n },\n {\n \"id\": \"engine-id-3\",\n \"object\": \"engine\",\n \"owner\": \"openai\",\n \"ready\": false\n },\n ],\n \"object\": \"list\"\n}\n"}},
...}
{:description nil,
:method :post,
:produces ["application/json"],
:path-schema nil,
:query-schema nil,
:parameter-aliases
{:path-schema {},
:query-schema {},
:body-schema
{[:body]
{:return-metadata :return_metadata,
:search-model :search_model,
:return-prompt :return_prompt,
:logit-bias :logit_bias,
:max-examples :max_examples}},
:form-schema {},
:headers-schema {}},
:form-schema nil,
:path-parts ["/classifications"],
:headers-schema nil,
:openapi-definition
{:operationId "createClassification",
:deprecated true,
:tags ["OpenAI"],
:summary
"Classifies the specified `query` using provided examples.\n\nThe endpoint first [searches](/docs/api-reference/searches) over the labeled examples\nto select the ones most relevant for the particular query. Then, the relevant examples\nare combined with the query to construct a prompt to produce the final label via the\n[completions](/docs/api-reference/completions) endpoint.\n\nLabeled examples can be provided via an uploaded `file`, or explicitly listed in the\nrequest using the `examples` parameter for quick tests and small scale use cases.\n",
:requestBody
{:required true,
:content
#:application{:json
{:schema
{:$ref
"#/components/schemas/CreateClassificationRequest"}}}},
:responses
{:200
{:description "OK",
:content
#:application{:json
{:schema
{:$ref
"#/components/schemas/CreateClassificationResponse"}}}}},
:x-oaiMeta
{:name "Create classification",
:group "classifications",
:path "create",
:examples
{:curl
"curl https://api.openai.com/v1/classifications \\\n -X POST \\\n -H \"Authorization: Bearer YOUR_API_KEY\" \\\n -H 'Content-Type: application/json' \\\n -d '{\n \"examples\": [\n [\"A happy moment\", \"Positive\"],\n [\"I am sad.\", \"Negative\"],\n [\"I am feeling awesome\", \"Positive\"]],\n \"query\": \"It is a raining day :(\",\n \"search_model\": \"ada\",\n \"model\": \"curie\",\n \"labels\":[\"Positive\", \"Negative\", \"Neutral\"]\n }'\n",
:python
"import os\nimport openai\nopenai.api_key = os.getenv(\"OPENAI_API_KEY\")\nopenai.Classification.create(\n search_model=\"ada\",\n model=\"curie\",\n examples=[\n [\"A happy moment\", \"Positive\"],\n [\"I am sad.\", \"Negative\"],\n [\"I am feeling awesome\", \"Positive\"]\n ],\n query=\"It is a raining day :(\",\n labels=[\"Positive\", \"Negative\", \"Neutral\"],\n)\n",
:node.js
"const { Configuration, OpenAIApi } = require(\"openai\");\nconst configuration = new Configuration({\n apiKey: process.env.OPENAI_API_KEY,\n});\nconst openai = new OpenAIApi(configuration);\nconst response = await openai.createClassification({\n search_model: \"ada\",\n model: \"curie\",\n examples: [\n [\"A happy moment\", \"Positive\"],\n [\"I am sad.\", \"Negative\"],\n [\"I am feeling awesome\", \"Positive\"]\n ],\n query:\"It is a raining day :(\",\n labels: [\"Positive\", \"Negative\", \"Neutral\"],\n});\n"},
:parameters
"{\n \"examples\": [\n [\"A happy moment\", \"Positive\"],\n [\"I am sad.\", \"Negative\"],\n [\"I am feeling awesome\", \"Positive\"]\n ],\n \"labels\": [\"Positive\", \"Negative\", \"Neutral\"],\n \"query\": \"It is a raining day :(\",\n \"search_model\": \"ada\",\n \"model\": \"curie\"\n}\n",
:response
"{\n \"completion\": \"cmpl-2euN7lUVZ0d4RKbQqRV79IiiE6M1f\",\n \"label\": \"Negative\",\n \"model\": \"curie:2020-05-03\",\n \"object\": \"classification\",\n \"search_model\": \"ada\",\n \"selected_examples\": [\n {\n \"document\": 1,\n \"label\": \"Negative\",\n \"text\": \"I am sad.\"\n },\n {\n \"document\": 0,\n \"label\": \"Positive\",\n \"text\": \"A happy moment\"\n },\n {\n \"document\": 2,\n \"label\": \"Positive\",\n \"text\": \"I am feeling awesome\"\n }\n ]\n}\n"}},
...}
{:description nil,
:method :post,
:produces ["application/json"],
:path-schema {:engine_id java.lang.String},
:query-schema nil,
:parameter-aliases
{:path-schema {[] {:engine-id :engine_id}},
:query-schema {},
:body-schema
{[:body]
{:max-rerank :max_rerank, :return-metadata :return_metadata}},
:form-schema {},
:headers-schema {}},
:form-schema nil,
:path-parts ["/engines/" :engine_id "/search"],
:headers-schema nil,
:openapi-definition
{:operationId "createSearch",
:deprecated true,
:tags ["OpenAI"],
:summary
"The search endpoint computes similarity scores between provided query and documents. Documents can be passed directly to the API if there are no more than 200 of them.\n\nTo go beyond the 200 document limit, documents can be processed offline and then used for efficient retrieval at query time. When `file` is set, the search endpoint searches over all the documents in the given file and returns up to the `max_rerank` number of documents. These documents will be returned along with their search scores.\n\nThe similarity score is a positive score that usually ranges from 0 to 300 (but can sometimes go higher), where a score above 200 usually means the document is semantically similar to the query.\n",
:parameters
[{:in "path",
:name "engine_id",
:required true,
:schema {:type "string", :example "davinci"},
:description
"The ID of the engine to use for this request. You can select one of `ada`, `babbage`, `curie`, or `davinci`."}],
:requestBody
{:required true,
:content
#:application{:json
{:schema
{:$ref
"#/components/schemas/CreateSearchRequest"}}}},
:responses
{:200
{:description "OK",
:content
#:application{:json
{:schema
{:$ref
"#/components/schemas/CreateSearchResponse"}}}}},
:x-oaiMeta
{:name "Create search",
:group "searches",
:path "create",
:examples
{:curl
"curl https://api.openai.com/v1/engines/davinci/search \\\n -H \"Content-Type: application/json\" \\\n -H 'Authorization: Bearer YOUR_API_KEY' \\\n -d '{\n \"documents\": [\"White House\", \"hospital\", \"school\"],\n \"query\": \"the president\"\n}'\n",
:python
"import os\nimport openai\nopenai.api_key = os.getenv(\"OPENAI_API_KEY\")\nopenai.Engine(\"davinci\").search(\n documents=[\"White House\", \"hospital\", \"school\"],\n query=\"the president\"\n)\n",
:node.js
"const { Configuration, OpenAIApi } = require(\"openai\");\nconst configuration = new Configuration({\n apiKey: process.env.OPENAI_API_KEY,\n});\nconst openai = new OpenAIApi(configuration);\nconst response = await openai.createSearch(\"davinci\", {\n documents: [\"White House\", \"hospital\", \"school\"],\n query: \"the president\",\n});\n"},
:parameters
"{\n \"documents\": [\n \"White House\",\n \"hospital\",\n \"school\"\n ],\n \"query\": \"the president\"\n}\n",
:response
"{\n \"data\": [\n {\n \"document\": 0,\n \"object\": \"search_result\",\n \"score\": 215.412\n },\n {\n \"document\": 1,\n \"object\": \"search_result\",\n \"score\": 40.316\n },\n {\n \"document\": 2,\n \"object\": \"search_result\",\n \"score\": 55.226\n }\n ],\n \"object\": \"list\"\n}\n"}},
...}
{:description nil,
:method :post,
:produces ["application/json"],
:path-schema nil,
:query-schema nil,
:parameter-aliases
{:path-schema {},
:query-schema {},
:body-schema {[:body] {:response-format :response_format}},
:form-schema {},
:headers-schema {}},
:form-schema nil,
:path-parts ["/images/variations"],
:headers-schema nil,
:openapi-definition
{:operationId "createImageVariation",
:tags ["OpenAI"],
:summary "Creates a variation of a given image.",
:requestBody
{:required true,
:content
#:multipart{:form-data
{:schema
{:$ref
"#/components/schemas/CreateImageVariationRequest"}}}},
:responses
{:200
{:description "OK",
:content
#:application{:json
{:schema
{:$ref "#/components/schemas/ImagesResponse"}}}}},
:x-oaiMeta
{:name "Create image variation",
:group "images",
:path "create-variation",
:beta true,
:examples
{:curl
"curl https://api.openai.com/v1/images/variations \\\n -H 'Authorization: Bearer YOUR_API_KEY' \\\n -F image='@otter.png' \\\n -F n=2 \\\n -F size=\"1024x1024\"\n",
:python
"import os\nimport openai\nopenai.api_key = os.getenv(\"OPENAI_API_KEY\")\nopenai.Image.create_variation(\n image=open(\"otter.png\", \"rb\"),\n n=2,\n size=\"1024x1024\"\n)\n",
:node.js
"const { Configuration, OpenAIApi } = require(\"openai\");\nconst configuration = new Configuration({\n apiKey: process.env.OPENAI_API_KEY,\n});\nconst openai = new OpenAIApi(configuration);\nconst response = await openai.createImageVariation(\n fs.createReadStream(\"otter.png\"),\n 2,\n \"1024x1024\"\n);\n"},
:response
"{\n \"created\": 1589478378,\n \"data\": [\n {\n \"url\": \"https://...\"\n },\n {\n \"url\": \"https://...\"\n }\n ]\n}\n"}},
...}
{:description nil,
:method :get,
:produces ["application/json"],
:path-schema {:fine_tune_id java.lang.String},
:query-schema nil,
:parameter-aliases
{:path-schema {[] {:fine-tune-id :fine_tune_id}},
:query-schema {},
:body-schema {},
:form-schema {},
:headers-schema {}},
:form-schema nil,
:path-parts ["/fine-tunes/" :fine_tune_id],
:headers-schema nil,
:openapi-definition
{:operationId "retrieveFineTune",
:tags ["OpenAI"],
:summary
"Gets info about the fine-tune job.\n\n[Learn more about Fine-tuning](/docs/guides/fine-tuning)\n",
:parameters
[{:in "path",
:name "fine_tune_id",
:required true,
:schema {:type "string", :example "ft-AF1WoRqd3aJAHsqc9NY7iL8F"},
:description "The ID of the fine-tune job\n"}],
:responses
{:200
{:description "OK",
:content
#:application{:json
{:schema
{:$ref "#/components/schemas/FineTune"}}}}},
:x-oaiMeta
{:name "Retrieve fine-tune",
:group "fine-tunes",
:path "retrieve",
:examples
{:curl
"curl https://api.openai.com/v1/fine-tunes/ft-AF1WoRqd3aJAHsqc9NY7iL8F \\\n -H \"Authorization: Bearer YOUR_API_KEY\"\n",
:python
"import os\nimport openai\nopenai.api_key = os.getenv(\"OPENAI_API_KEY\")\nopenai.FineTune.retrieve(id=\"ft-AF1WoRqd3aJAHsqc9NY7iL8F\")\n",
:node.js
"const { Configuration, OpenAIApi } = require(\"openai\");\nconst configuration = new Configuration({\n apiKey: process.env.OPENAI_API_KEY,\n});\nconst openai = new OpenAIApi(configuration);\nconst response = await openai.retrieveFineTune(\"ft-AF1WoRqd3aJAHsqc9NY7iL8F\");\n"},
:response
"{\n \"id\": \"ft-AF1WoRqd3aJAHsqc9NY7iL8F\",\n \"object\": \"fine-tune\",\n \"model\": \"curie\",\n \"created_at\": 1614807352,\n \"events\": [\n {\n \"object\": \"fine-tune-event\",\n \"created_at\": 1614807352,\n \"level\": \"info\",\n \"message\": \"Job enqueued. Waiting for jobs ahead to complete. Queue number: 0.\"\n },\n {\n \"object\": \"fine-tune-event\",\n \"created_at\": 1614807356,\n \"level\": \"info\",\n \"message\": \"Job started.\"\n },\n {\n \"object\": \"fine-tune-event\",\n \"created_at\": 1614807861,\n \"level\": \"info\",\n \"message\": \"Uploaded snapshot: curie:ft-acmeco-2021-03-03-21-44-20.\"\n },\n {\n \"object\": \"fine-tune-event\",\n \"created_at\": 1614807864,\n \"level\": \"info\",\n \"message\": \"Uploaded result files: file-QQm6ZpqdNwAaVC3aSz5sWwLT.\"\n },\n {\n \"object\": \"fine-tune-event\",\n \"created_at\": 1614807864,\n \"level\": \"info\",\n \"message\": \"Job succeeded.\"\n }\n ],\n \"fine_tuned_model\": \"curie:ft-acmeco-2021-03-03-21-44-20\",\n \"hyperparams\": {\n \"batch_size\": 4,\n \"learning_rate_multiplier\": 0.1,\n \"n_epochs\": 4,\n \"prompt_loss_weight\": 0.1,\n },\n \"organization_id\": \"org-...\",\n \"result_files\": [\n {\n \"id\": \"file-QQm6ZpqdNwAaVC3aSz5sWwLT\",\n \"object\": \"file\",\n \"bytes\": 81509,\n \"created_at\": 1614807863,\n \"filename\": \"compiled_results.csv\",\n \"purpose\": \"fine-tune-results\"\n }\n ],\n \"status\": \"succeeded\",\n \"validation_files\": [],\n \"training_files\": [\n {\n \"id\": \"file-XGinujblHPwGLSztz8cPS8XY\",\n \"object\": \"file\",\n \"bytes\": 1547276,\n \"created_at\": 1610062281,\n \"filename\": \"my-data-train.jsonl\",\n \"purpose\": \"fine-tune-train\"\n }\n ],\n \"updated_at\": 1614807865,\n}\n"}},
...}
{:description nil,
:method :post,
:produces ["application/json"],
:path-schema nil,
:query-schema nil,
:parameter-aliases
{:path-schema {},
:query-schema {},
:body-schema {[:body] {:response-format :response_format}},
:form-schema {},
:headers-schema {}},
:form-schema nil,
:path-parts ["/images/generations"],
:headers-schema nil,
:openapi-definition
{:operationId "createImage",
:tags ["OpenAI"],
:summary "Creates an image given a prompt.",
:requestBody
{:required true,
:content
#:application{:json
{:schema
{:$ref
"#/components/schemas/CreateImageRequest"}}}},
:responses
{:200
{:description "OK",
:content
#:application{:json
{:schema
{:$ref "#/components/schemas/ImagesResponse"}}}}},
:x-oaiMeta
{:name "Create image",
:group "images",
:path "create",
:beta true,
:examples
{:curl
"curl https://api.openai.com/v1/images/generations \\\n -H 'Content-Type: application/json' \\\n -H 'Authorization: Bearer YOUR_API_KEY' \\\n -d '{\n \"prompt\": \"A cute baby sea otter\",\n \"n\": 2,\n \"size\": \"1024x1024\"\n}'\n",
:python
"import os\nimport openai\nopenai.api_key = os.getenv(\"OPENAI_API_KEY\")\nopenai.Image.create(\n prompt=\"A cute baby sea otter\",\n n=2,\n size=\"1024x1024\"\n)\n",
:node.js
"const { Configuration, OpenAIApi } = require(\"openai\");\nconst configuration = new Configuration({\n apiKey: process.env.OPENAI_API_KEY,\n});\nconst openai = new OpenAIApi(configuration);\nconst response = await openai.createImage({\n prompt: \"A cute baby sea otter\",\n n: 2,\n size: \"1024x1024\",\n});\n"},
:parameters
"{\n \"prompt\": \"A cute baby sea otter\",\n \"n\": 2,\n \"size\": \"1024x1024\"\n}\n",
:response
"{\n \"created\": 1589478378,\n \"data\": [\n {\n \"url\": \"https://...\"\n },\n {\n \"url\": \"https://...\"\n }\n ]\n}\n"}},
...}
{:description nil,
:method :post,
:produces ["application/json"],
:path-schema nil,
:query-schema nil,
:parameter-aliases
{:path-schema {},
:query-schema {},
:body-schema
{[:body]
{:presence-penalty :presence_penalty,
:max-tokens :max_tokens,
:frequency-penalty :frequency_penalty,
:logit-bias :logit_bias,
:top-p :top_p}},
:form-schema {},
:headers-schema {}},
:form-schema nil,
:path-parts ["/chat/completions"],
:headers-schema nil,
:openapi-definition
{:operationId "createChatCompletion",
:tags ["OpenAI"],
:summary "Creates a completion for the chat message",
:requestBody
{:required true,
:content
#:application{:json
{:schema
{:$ref
"#/components/schemas/CreateChatCompletionRequest"}}}},
:responses
{:200
{:description "OK",
:content
#:application{:json
{:schema
{:$ref
"#/components/schemas/CreateChatCompletionResponse"}}}}},
:x-oaiMeta
{:name "Create chat completion",
:group "chat",
:path "create",
:beta true,
:examples
{:curl
"curl https://api.openai.com/v1/chat/completions \\\n -H 'Content-Type: application/json' \\\n -H 'Authorization: Bearer YOUR_API_KEY' \\\n -d '{\n \"model\": \"gpt-3.5-turbo\",\n \"messages\": [{\"role\": \"user\", \"content\": \"Hello!\"}]\n}'\n",
:python
"import os\nimport openai\nopenai.api_key = os.getenv(\"OPENAI_API_KEY\")\n\ncompletion = openai.ChatCompletion.create(\n model=\"gpt-3.5-turbo\",\n messages=[\n {\"role\": \"user\", \"content\": \"Hello!\"}\n ]\n)\n\nprint(completion.choices[0].message)\n",
:node.js
"const { Configuration, OpenAIApi } = require(\"openai\");\n\nconst configuration = new Configuration({\n apiKey: process.env.OPENAI_API_KEY,\n});\nconst openai = new OpenAIApi(configuration);\n\nconst completion = await openai.createChatCompletion({\n model: \"gpt-3.5-turbo\",\n messages: [{role: \"user\", content: \"Hello world\"}],\n});\nconsole.log(completion.data.choices[0].message);\n"},
:parameters
"{\n \"model\": \"gpt-3.5-turbo\",\n \"messages\": [{\"role\": \"user\", \"content\": \"Hello!\"}]\n}\n",
:response
"{\n \"id\": \"chatcmpl-123\",\n \"object\": \"chat.completion\",\n \"created\": 1677652288,\n \"choices\": [{\n \"index\": 0,\n \"message\": {\n \"role\": \"assistant\",\n \"content\": \"\\n\\nHello there, how may I assist you today?\",\n },\n \"finish_reason\": \"stop\"\n }],\n \"usage\": {\n \"prompt_tokens\": 9,\n \"completion_tokens\": 12,\n \"total_tokens\": 21\n }\n}\n"}},
...}
{:description nil,
:method :get,
:produces ["application/json"],
:path-schema {:file_id java.lang.String},
:query-schema nil,
:parameter-aliases
{:path-schema {[] {:file-id :file_id}},
:query-schema {},
:body-schema {},
:form-schema {},
:headers-schema {}},
:form-schema nil,
:path-parts ["/files/" :file_id "/content"],
:headers-schema nil,
:openapi-definition
{:operationId "downloadFile",
:tags ["OpenAI"],
:summary "Returns the contents of the specified file",
:parameters
[{:in "path",
:name "file_id",
:required true,
:schema {:type "string"},
:description "The ID of the file to use for this request"}],
:responses
{:200
{:description "OK",
:content #:application{:json {:schema {:type "string"}}}}},
:x-oaiMeta
{:name "Retrieve file content",
:group "files",
:path "retrieve-content",
:examples
{:curl
"curl https://api.openai.com/v1/files/file-XjGxS3KTG0uNmNOK362iJua3/content \\\n -H 'Authorization: Bearer YOUR_API_KEY' > file.jsonl\n",
:python
"import os\nimport openai\nopenai.api_key = os.getenv(\"OPENAI_API_KEY\")\ncontent = openai.File.download(\"file-XjGxS3KTG0uNmNOK362iJua3\")\n",
:node.js
"const { Configuration, OpenAIApi } = require(\"openai\");\nconst configuration = new Configuration({\n apiKey: process.env.OPENAI_API_KEY,\n});\nconst openai = new OpenAIApi(configuration);\nconst response = await openai.downloadFile(\"file-XjGxS3KTG0uNmNOK362iJua3\");\n"}}},
...}
...)
{:interceptors
({:name :martian.interceptors/keywordize-params,
:enter
#object[martian.interceptors$fn__37838 0x17567a6d "martian.interceptors$fn__37838@17567a6d"]}
{:name :martian.interceptors/method,
:enter
#object[martian.interceptors$fn__37828 0x50856023 "martian.interceptors$fn__37828@50856023"]}
{:name :martian.interceptors/url,
:enter
#object[martian.interceptors$fn__37832 0x5e22586b "martian.interceptors$fn__37832@5e22586b"]}
{:name :martian.interceptors/query-params,
:enter
#object[martian.interceptors$fn__37841 0x4950bcbc "martian.interceptors$fn__37841@4950bcbc"]}
{:name :martian.interceptors/body-params,
:enter
#object[martian.interceptors$fn__37845 0x4bd51462 "martian.interceptors$fn__37845@4bd51462"]}
{:name :martian.interceptors/form-params,
:enter
#object[martian.interceptors$fn__37856 0x5365c557 "martian.interceptors$fn__37856@5365c557"]}
{:name :martian.interceptors/header-params,
:enter
#object[martian.interceptors$fn__37860 0x273f775d "martian.interceptors$fn__37860@273f775d"]}
{:name :martian.interceptors/enqueue-route-specific-interceptors,
:enter
#object[martian.interceptors$fn__37865 0x34d59ddc "martian.interceptors$fn__37865@34d59ddc"]}
{:name :martian.interceptors/encode-body,
:encodes
("application/transit+msgpack"
"application/transit+json"
"application/edn"
"application/json"),
:enter
#object[martian.interceptors$encode_body$fn__37872 0x2fed08a "martian.interceptors$encode_body$fn__37872@2fed08a"]}
{:name :martian.interceptors/coerce-response,
:decodes
("application/transit+msgpack"
"application/transit+json"
"application/edn"
"application/json"),
:enter
#object[martian.interceptors$coerce_response$fn__37881 0x39ae35dd "martian.interceptors$coerce_response$fn__37881@39ae35dd"],
:leave
#object[martian.interceptors$coerce_response$fn__37888 0x5e16b0ba "martian.interceptors$coerce_response$fn__37888@5e16b0ba"]}
...)})
clojure.spec.alpha/failure: :instrument
clojure.spec.alpha/fn: martian.core/build-instance
clojure.spec.alpha/problems: ({:path [:handlers :consumes :clojure.spec.alpha/nil],
:pred nil?,
:val [nil],
:via [:martian.spec/handler :martian.spec/content-types],
:in [1 0 :consumes]}
{:path [:handlers :consumes :clojure.spec.alpha/pred],
:pred string?,
:val nil,
:via [:martian.spec/handler :martian.spec/content-types],
:in [1 0 :consumes 0]}
{:path [:handlers :consumes :clojure.spec.alpha/nil],
:pred nil?,
:val [nil],
:via [:martian.spec/handler :martian.spec/content-types],
:in [1 1 :consumes]}
{:path [:handlers :consumes :clojure.spec.alpha/pred],
:pred string?,
:val nil,
:via [:martian.spec/handler :martian.spec/content-types],
:in [1 1 :consumes 0]}
{:path [:handlers :consumes :clojure.spec.alpha/nil],
:pred nil?,
:val [nil],
:via [:martian.spec/handler :martian.spec/content-types],
:in [1 2 :consumes]}
{:path [:handlers :consumes :clojure.spec.alpha/pred],
:pred string?,
:val nil,
:via [:martian.spec/handler :martian.spec/content-types],
:in [1 2 :consumes 0]}
{:path [:handlers :consumes :clojure.spec.alpha/nil],
:pred nil?,
:val [nil],
:via [:martian.spec/handler :martian.spec/content-types],
:in [1 6 :consumes]}
{:path [:handlers :consumes :clojure.spec.alpha/pred],
:pred string?,
:val nil,
:via [:martian.spec/handler :martian.spec/content-types],
:in [1 6 :consumes 0]}
{:path [:handlers :consumes :clojure.spec.alpha/nil],
:pred nil?,
:val [nil],
:via [:martian.spec/handler :martian.spec/content-types],
:in [1 9 :consumes]}
{:path [:handlers :consumes :clojure.spec.alpha/pred],
:pred string?,
:val nil,
:via [:martian.spec/handler :martian.spec/content-types],
:in [1 9 :consumes 0]}
...)