From 924d525f83d226de56f0afec332981ba2947a8f2 Mon Sep 17 00:00:00 2001 From: jacoblee93 Date: Fri, 24 Jan 2025 14:56:11 -0800 Subject: [PATCH] Update withStructuredOutput defaults --- .../docs/integrations/chat/openai.ipynb | 100 +++++++++++++++++- langchain/package.json | 2 +- libs/langchain-community/package.json | 2 +- libs/langchain-openai/package.json | 2 +- libs/langchain-openai/src/chat_models.ts | 14 +++ yarn.lock | 20 +--- 6 files changed, 119 insertions(+), 21 deletions(-) diff --git a/docs/core_docs/docs/integrations/chat/openai.ipynb b/docs/core_docs/docs/integrations/chat/openai.ipynb index 800ed7d0b568..9718c1612eb1 100644 --- a/docs/core_docs/docs/integrations/chat/openai.ipynb +++ b/docs/core_docs/docs/integrations/chat/openai.ipynb @@ -625,7 +625,7 @@ "id": "045668fe", "metadata": {}, "source": [ - "### Structured output\n", + "## Structured output\n", "\n", "We can also pass `strict: true` to the [`.withStructuredOutput()`](https://js.langchain.com/docs/how_to/structured_output/#the-.withstructuredoutput-method). Here's an example:" ] @@ -664,6 +664,104 @@ "}]);" ] }, + { + "cell_type": "markdown", + "id": "0194ec1f", + "metadata": {}, + "source": [ + "### Reasoning models\n", + "\n", + "```{=mdx}\n", + ":::caution Compatibility\n", + "\n", + "The below points apply to `@langchain/openai>=0.4.0`. Please see here for a [guide on upgrading](/docs/how_to/installation/#installing-integration-packages).\n", + "\n", + ":::\n", + "```\n", + "\n", + "When using reasoning models like `o1`, the default method for `withStructuredOutput` is OpenAI's built-in method for structured output (equivalent to passing `method: \"jsonSchema\"` as an option into `withStructuredOutput`). JSON schema mostly works the same as other models, but with one important caveat: when defining schema, `z.optional()` is not respected, and you should instead use `z.nullable()`.\n", + "\n", + "Here's an example:" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "d2a04807", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "{ color: 'No color mentioned' }\n" + ] + } + ], + "source": [ + "import { z } from \"zod\";\n", + "import { ChatOpenAI } from \"@langchain/openai\";\n", + "\n", + "// Will not work\n", + "const reasoningModelSchemaOptional = z.object({\n", + " color: z.optional(z.string()).describe(\"A color mentioned in the input\"),\n", + "});\n", + "\n", + "const reasoningModelOptionalSchema = new ChatOpenAI({\n", + " model: \"o1\",\n", + "}).withStructuredOutput(reasoningModelSchemaOptional, {\n", + " name: \"extract_color\",\n", + "});\n", + "\n", + "await reasoningModelOptionalSchema.invoke([{\n", + " role: \"user\",\n", + " content: `I am 6'5\" tall and love fruit.`\n", + "}]);" + ] + }, + { + "cell_type": "markdown", + "id": "69854ed4", + "metadata": {}, + "source": [ + "And here's an example with `z.nullable()`:" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "5f4bb1bc", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "{ color: null }\n" + ] + } + ], + "source": [ + "import { z } from \"zod\";\n", + "import { ChatOpenAI } from \"@langchain/openai\";\n", + "\n", + "// Will not work\n", + "const reasoningModelSchemaNullable = z.object({\n", + " color: z.nullable(z.string()).describe(\"A color mentioned in the input\"),\n", + "});\n", + "\n", + "const reasoningModelNullableSchema = new ChatOpenAI({\n", + " model: \"o1\",\n", + "}).withStructuredOutput(reasoningModelSchemaNullable, {\n", + " name: \"extract_color\",\n", + "});\n", + "\n", + "await reasoningModelNullableSchema.invoke([{\n", + " role: \"user\",\n", + " content: `I am 6'5\" tall and love fruit.`\n", + "}]);" + ] + }, { "cell_type": "markdown", "id": "af20e756", diff --git a/langchain/package.json b/langchain/package.json index 9d89d69c3eb8..3cac6a12534b 100644 --- a/langchain/package.json +++ b/langchain/package.json @@ -525,7 +525,7 @@ } }, "dependencies": { - "@langchain/openai": ">=0.1.0 <0.4.0", + "@langchain/openai": ">=0.1.0 <0.5.0", "@langchain/textsplitters": ">=0.0.0 <0.2.0", "js-tiktoken": "^1.0.12", "js-yaml": "^4.1.0", diff --git a/libs/langchain-community/package.json b/libs/langchain-community/package.json index 0bbe9f64db6a..3f82a5e45ebb 100644 --- a/libs/langchain-community/package.json +++ b/libs/langchain-community/package.json @@ -35,7 +35,7 @@ "author": "LangChain", "license": "MIT", "dependencies": { - "@langchain/openai": ">=0.2.0 <0.4.0", + "@langchain/openai": ">=0.2.0 <0.5.0", "binary-extensions": "^2.2.0", "expr-eval": "^2.0.2", "flat": "^5.0.2", diff --git a/libs/langchain-openai/package.json b/libs/langchain-openai/package.json index cc4fca466255..79903f4ef25f 100644 --- a/libs/langchain-openai/package.json +++ b/libs/langchain-openai/package.json @@ -1,6 +1,6 @@ { "name": "@langchain/openai", - "version": "0.4.0-rc.0", + "version": "0.3.17", "description": "OpenAI integrations for LangChain.js", "type": "module", "engines": { diff --git a/libs/langchain-openai/src/chat_models.ts b/libs/langchain-openai/src/chat_models.ts index f21fe993797b..b6da672416fa 100644 --- a/libs/langchain-openai/src/chat_models.ts +++ b/libs/langchain-openai/src/chat_models.ts @@ -1918,6 +1918,20 @@ export class ChatOpenAI< ); } + if ( + !this.model.startsWith("gpt-3") && + !this.model.startsWith("gpt-4-") && + this.model !== "gpt-4" + ) { + if (method === undefined) { + method = "jsonSchema"; + } + } else if (method === "jsonSchema") { + console.warn( + `[WARNING]: JSON Schema is not supported for model "${this.model}". Falling back to tool calling.` + ); + } + if (method === "jsonMode") { llm = this.bind({ response_format: { type: "json_object" }, diff --git a/yarn.lock b/yarn.lock index 6a1519014edb..99064c320de7 100644 --- a/yarn.lock +++ b/yarn.lock @@ -11903,7 +11903,7 @@ __metadata: "@jest/globals": ^29.5.0 "@lancedb/lancedb": ^0.13.0 "@langchain/core": "workspace:*" - "@langchain/openai": ">=0.2.0 <0.4.0" + "@langchain/openai": ">=0.2.0 <0.5.0" "@langchain/scripts": ">=0.1.0 <0.2.0" "@langchain/standard-tests": 0.0.0 "@layerup/layerup-security": ^1.5.12 @@ -12968,21 +12968,7 @@ __metadata: languageName: unknown linkType: soft -"@langchain/openai@npm:>=0.1.0 <0.4.0, @langchain/openai@npm:>=0.2.0 <0.4.0, @langchain/openai@npm:~0.3.0": - version: 0.3.17 - resolution: "@langchain/openai@npm:0.3.17" - dependencies: - js-tiktoken: ^1.0.12 - openai: ^4.77.0 - zod: ^3.22.4 - zod-to-json-schema: ^3.22.3 - peerDependencies: - "@langchain/core": ">=0.3.29 <0.4.0" - checksum: af88894dcfa8381c0b0df924e085796995f5d5ba2a2657ea72b4181b35c5d92b0040c5cf305378c1f48a8f1f04d4a3b0b29ba1d84f80cedf5dab8bc46d7d5c6c - languageName: node - linkType: hard - -"@langchain/openai@workspace:*, @langchain/openai@workspace:^, @langchain/openai@workspace:libs/langchain-openai": +"@langchain/openai@>=0.1.0 <0.5.0, @langchain/openai@>=0.2.0 <0.5.0, @langchain/openai@workspace:*, @langchain/openai@workspace:^, @langchain/openai@workspace:libs/langchain-openai, @langchain/openai@~0.3.0": version: 0.0.0-use.local resolution: "@langchain/openai@workspace:libs/langchain-openai" dependencies: @@ -33447,7 +33433,7 @@ __metadata: "@langchain/groq": "*" "@langchain/mistralai": "*" "@langchain/ollama": "*" - "@langchain/openai": ">=0.1.0 <0.4.0" + "@langchain/openai": ">=0.1.0 <0.5.0" "@langchain/scripts": ">=0.1.0 <0.2.0" "@langchain/textsplitters": ">=0.0.0 <0.2.0" "@swc/core": ^1.3.90