|
145 | 145 | ],
|
146 | 146 | "source": [
|
147 | 147 | "const aiMsg = await llm.invoke([\n",
|
148 |
| - " [\n", |
149 |
| - " \"system\",\n", |
150 |
| - " \"You are a helpful assistant that translates English to French. Translate the user sentence.\",\n", |
151 |
| - " ],\n", |
152 |
| - " [\"human\", \"I love programming.\"],\n", |
| 148 | + " {\n", |
| 149 | + " role: \"system\",\n", |
| 150 | + " content: \"You are a helpful assistant that translates English to French. Translate the user sentence.\",\n", |
| 151 | + " },\n", |
| 152 | + " { role: \"user\", content: \"I love programming.\" },\n", |
153 | 153 | "])\n",
|
154 | 154 | "aiMsg"
|
155 | 155 | ]
|
|
174 | 174 | "console.log(aiMsg.content)"
|
175 | 175 | ]
|
176 | 176 | },
|
| 177 | + { |
| 178 | + "cell_type": "markdown", |
| 179 | + "id": "ce0414fe", |
| 180 | + "metadata": {}, |
| 181 | + "source": [ |
| 182 | + "## Json invocation" |
| 183 | + ] |
| 184 | + }, |
| 185 | + { |
| 186 | + "cell_type": "code", |
| 187 | + "execution_count": 4, |
| 188 | + "id": "3f0a7a2a", |
| 189 | + "metadata": {}, |
| 190 | + "outputs": [ |
| 191 | + { |
| 192 | + "name": "stdout", |
| 193 | + "output_type": "stream", |
| 194 | + "text": [ |
| 195 | + "{\n", |
| 196 | + " aiInvokeMsgContent: '{\\n\"result\": 6\\n}',\n", |
| 197 | + " aiBindMsg: '{\\n\"result\": 6\\n}'\n", |
| 198 | + "}\n" |
| 199 | + ] |
| 200 | + } |
| 201 | + ], |
| 202 | + "source": [ |
| 203 | + "const messages = [\n", |
| 204 | + " {\n", |
| 205 | + " role: \"system\",\n", |
| 206 | + " content: \"You are a math tutor that handles math exercises and makes output in json in format { result: number }.\",\n", |
| 207 | + " },\n", |
| 208 | + " { role: \"user\", content: \"2 + 2 * 2\" },\n", |
| 209 | + "];\n", |
| 210 | + "\n", |
| 211 | + "const aiInvokeMsg = await llm.invoke(messages, { response_format: { type: \"json_object\" } });\n", |
| 212 | + "\n", |
| 213 | + "// if you want not to pass response_format in every invoke, you can bind it to the instance\n", |
| 214 | + "const llmWithResponseFormat = llm.bind({ response_format: { type: \"json_object\" } });\n", |
| 215 | + "const aiBindMsg = await llmWithResponseFormat.invoke(messages);\n", |
| 216 | + "\n", |
| 217 | + "// they are the same\n", |
| 218 | + "console.log({ aiInvokeMsgContent: aiInvokeMsg.content, aiBindMsg: aiBindMsg.content });" |
| 219 | + ] |
| 220 | + }, |
177 | 221 | {
|
178 | 222 | "cell_type": "markdown",
|
179 | 223 | "id": "18e2bfc0-7e78-4528-a73f-499ac150dca8",
|
|
186 | 230 | },
|
187 | 231 | {
|
188 | 232 | "cell_type": "code",
|
189 |
| - "execution_count": 4, |
| 233 | + "execution_count": 5, |
190 | 234 | "id": "e197d1d7-a070-4c96-9f8a-a0e86d046e0b",
|
191 | 235 | "metadata": {},
|
192 | 236 | "outputs": [
|
|
0 commit comments