from agno.agent import Agent
from agno.models.ollama import Ollama
agent = Agent(model=Ollama(id=“phi3.5:latest”), markdown=True)
agent.print_response(“Share a 2 sentence horror story”)
Here is output :
agent.print_response(“Share a 2 sentence horror story”)
File “D:\Program Files\anaconda\envs\sentinenv\Lib\site-packages\agno\agent\agent.py”, line 4006, in print_response
run_response = self.run(
^^^^^^^^^
File “D:\Program Files\anaconda\envs\sentinenv\Lib\site-packages\agno\agent\agent.py”, line 994, in run
return next(resp)
^^^^^^^^^^
File “D:\Program Files\anaconda\envs\sentinenv\Lib\site-packages\agno\agent\agent.py”, line 706, in _run
model_response = self.model.response(messages=run_messages.messages)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File “D:\Program Files\anaconda\envs\sentinenv\Lib\site-packages\agno\models\base.py”, line 177, in response
assistant_message, has_tool_calls = self._process_model_response(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File “D:\Program Files\anaconda\envs\sentinenv\Lib\site-packages\agno\models\base.py”, line 313, in _process_model_response
response = self.invoke(messages=messages)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File “D:\Program Files\anaconda\envs\sentinenv\Lib\site-packages\agno\models\ollama\chat.py”, line 200, in invoke
return self.get_client().chat(
^^^^^^^^^^^^^^^^^^^^^^^
File “D:\Program Files\anaconda\envs\sentinenv\Lib\site-packages\ollama_client.py”, line 333, in chat
return self._request(
^^^^^^^^^^^^^^
File “D:\Program Files\anaconda\envs\sentinenv\Lib\site-packages\ollama_client.py”, line 178, in _request
return cls(**self._request_raw(*args, **kwargs).json())
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File “D:\Program Files\anaconda\envs\sentinenv\Lib\site-packages\ollama_client.py”, line 124, in _request_raw
raise ConnectionError(CONNECTION_ERROR_MESSAGE) from None
ConnectionError: Failed to connect to Ollama. Please check that Ollama is downloaded, running and accessible. Download Ollama on macOS
Ollama server is running
(base) PS C:\Users\18845> curl http://localhost:11434
StatusCode : 200
StatusDescription : OK
Content : Ollama is running
RawContent : HTTP/1.1 200 OK
Content-Length: 17
Content-Type: text/plain; charset=utf-8
Date: Wed, 09 Apr 2025 10:25:00 GMT
Ollama is running
Forms : {}
Headers : {[Content-Length, 17], [Content-Type, text/plain; charset=utf-8], [Date, Wed, 09 Apr 2025 10:25:00
GMT]}
Images : {}
InputFields : {}
Links : {}
ParsedHtml : mshtml.HTMLDocumentClass
RawContentLength : 17