I was running example with local ollama and that is the output that I have received:
Traceback (most recent call last):
File "/home/mkrajewski/structured_output_experiment/funcchain_test.py", line 18, in <module>
poem = analyze("I really like when my dog does a trick!")
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/mkrajewski/structured_output_experiment/funcchain_test.py", line 15, in analyze
return chain()
^^^^^^^
File "/home/mkrajewski/anaconda3/envs/ollama_structure/lib/python3.11/site-packages/funcchain/syntax/executable.py", line 64, in chain
result = chain.invoke(input_kwargs, {"run_name": get_parent_frame(2).function, "callbacks": callbacks})
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/mkrajewski/anaconda3/envs/ollama_structure/lib/python3.11/site-packages/langchain_core/runnables/base.py", line 2053, in invoke
input = step.invoke(
^^^^^^^^^^^^
File "/home/mkrajewski/anaconda3/envs/ollama_structure/lib/python3.11/site-packages/langchain_core/language_models/chat_models.py", line 166, in invoke
self.generate_prompt(
File "/home/mkrajewski/anaconda3/envs/ollama_structure/lib/python3.11/site-packages/langchain_core/language_models/chat_models.py", line 544, in generate_prompt
return self.generate(prompt_messages, stop=stop, callbacks=callbacks, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/mkrajewski/anaconda3/envs/ollama_structure/lib/python3.11/site-packages/langchain_core/language_models/chat_models.py", line 408, in generate
raise e
File "/home/mkrajewski/anaconda3/envs/ollama_structure/lib/python3.11/site-packages/langchain_core/language_models/chat_models.py", line 398, in generate
self._generate_with_cache(
File "/home/mkrajewski/anaconda3/envs/ollama_structure/lib/python3.11/site-packages/langchain_core/language_models/chat_models.py", line 577, in _generate_with_cache
return self._generate(
^^^^^^^^^^^^^^^
File "/home/mkrajewski/anaconda3/envs/ollama_structure/lib/python3.11/site-packages/langchain_community/chat_models/ollama.py", line 255, in _generate
final_chunk = self._chat_stream_with_aggregation(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/mkrajewski/anaconda3/envs/ollama_structure/lib/python3.11/site-packages/langchain_community/chat_models/ollama.py", line 188, in _chat_stream_with_aggregation
for stream_resp in self._create_chat_stream(messages, stop, **kwargs):
File "/home/mkrajewski/anaconda3/envs/ollama_structure/lib/python3.11/site-packages/langchain_community/chat_models/ollama.py", line 161, in _create_chat_stream
yield from self._create_stream(
^^^^^^^^^^^^^^^^^^^^
File "/home/mkrajewski/anaconda3/envs/ollama_structure/lib/python3.11/site-packages/langchain_community/llms/ollama.py", line 240, in _create_stream
raise ValueError(
ValueError: Ollama call failed with status code 400. Details: invalid options: grammar