pythonnixollamaflake

How can I adjust the Nix Flake configuration for my virtual environment to ensure the successful execution of a Python script reliant on Ollama?


I want to create a virtual environment where I can run this c.py using ollama. (like in this example https://python.langchain.com/docs/integrations/llms/ollama)

c.py:

from langchain_community.llms import Ollama
llm = Ollama(model="llama2")
llm.invoke("Tell me a joke")

I created the virtual environemtn with this flake

{
 description = "Python environment with ollama";

 inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable";
 inputs.flake-utils.url = "github:numtide/flake-utils";

 outputs = { self, nixpkgs, flake-utils }:
    flake-utils.lib.eachDefaultSystem (system:
      let
        pkgs = nixpkgs.legacyPackages.${system};
        python = pkgs.python311;
        ollama=pkgs.ollama;
        Py = python.withPackages (ps: with ps; [
          langchain
        ]);
      in {
        devShells.default = pkgs.mkShell {
          buildInputs = [
            ollama
            Py
          ];
        };
      });
}

The envirnment seems to be create without problem after running nix develop

But when I tried to run the code, I get the following error.

  python c.py



Traceback (most recent call last):   File "/nix/store/rjh6glh0f6l27f893pknrg7p87ajhp65-python3-3.11.8-env/lib/python3.11/site-packages/urllib3/connection.py", line 203, in _new_conn
    sock = connection.create_connection(
           ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^   File "/nix/store/rjh6glh0f6l27f893pknrg7p87ajhp65-python3-3.11.8-env/lib/python3.11/site-packages/urllib3/util/connection.py", line 85, in create_connection
    raise err   File "/nix/store/rjh6glh0f6l27f893pknrg7p87ajhp65-python3-3.11.8-env/lib/python3.11/site-packages/urllib3/util/connection.py", line 73, in create_connection
    sock.connect(sa) ConnectionRefusedError: [Errno 111] Connection refused

The above exception was the direct cause of the following exception:

Traceback (most recent call last):   File "/nix/store/rjh6glh0f6l27f893pknrg7p87ajhp65-python3-3.11.8-env/lib/python3.11/site-packages/urllib3/connectionpool.py", line 790, in urlopen
    response = self._make_request(
               ^^^^^^^^^^^^^^^^^^^   File "/nix/store/rjh6glh0f6l27f893pknrg7p87ajhp65-python3-3.11.8-env/lib/python3.11/site-packages/urllib3/connectionpool.py", line 496, in _make_request
    conn.request(   File "/nix/store/rjh6glh0f6l27f893pknrg7p87ajhp65-python3-3.11.8-env/lib/python3.11/site-packages/urllib3/connection.py", line 395, in request
    self.endheaders()   File "/nix/store/3v2ch16fkl50i85n05h5ckss8pxx6836-python3-3.11.8/lib/python3.11/http/client.py", line 1293, in endheaders
    self._send_output(message_body, encode_chunked=encode_chunked)   File "/nix/store/3v2ch16fkl50i85n05h5ckss8pxx6836-python3-3.11.8/lib/python3.11/http/client.py", line 1052, in _send_output
    self.send(msg)   File "/nix/store/3v2ch16fkl50i85n05h5ckss8pxx6836-python3-3.11.8/lib/python3.11/http/client.py", line 990, in send
    self.connect()   File "/nix/store/rjh6glh0f6l27f893pknrg7p87ajhp65-python3-3.11.8-env/lib/python3.11/site-packages/urllib3/connection.py", line 243, in connect
    self.sock = self._new_conn()
                ^^^^^^^^^^^^^^^^   File "/nix/store/rjh6glh0f6l27f893pknrg7p87ajhp65-python3-3.11.8-env/lib/python3.11/site-packages/urllib3/connection.py", line 218, in _new_conn
    raise NewConnectionError( urllib3.exceptions.NewConnectionError: <urllib3.connection.HTTPConnection object at 0x7ffff4b1d510>: Failed to establish a new connection: [Errno 111] Connection refused

The above exception was the direct cause of the following exception:

Traceback (most recent call last):   File "/nix/store/rjh6glh0f6l27f893pknrg7p87ajhp65-python3-3.11.8-env/lib/python3.11/site-packages/requests/adapters.py", line 486, in send
    resp = conn.urlopen(
           ^^^^^^^^^^^^^   File "/nix/store/rjh6glh0f6l27f893pknrg7p87ajhp65-python3-3.11.8-env/lib/python3.11/site-packages/urllib3/connectionpool.py", line 844, in urlopen
    retries = retries.increment(
              ^^^^^^^^^^^^^^^^^^   File "/nix/store/rjh6glh0f6l27f893pknrg7p87ajhp65-python3-3.11.8-env/lib/python3.11/site-packages/urllib3/util/retry.py", line 515, in increment
    raise MaxRetryError(_pool, url, reason) from reason  # type: ignore[arg-type]
    ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ urllib3.exceptions.MaxRetryError: HTTPConnectionPool(host='localhost', port=11434): Max retries exceeded with url: /api/generate (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ffff4b1d510>: Failed to establish a new connection: [Errno 111] Connection refused'))

During handling of the above exception, another exception occurred:

Traceback (most recent call last):   File "/mnt/c/Users/Pierre-Olivier/Documents/python/llm/ollama/c.py", line 3, in <module>
    llm.invoke("Tell me a joke")   File "/nix/store/rjh6glh0f6l27f893pknrg7p87ajhp65-python3-3.11.8-env/lib/python3.11/site-packages/langchain_core/language_models/llms.py", line 273, in invoke
    self.generate_prompt(   File "/nix/store/rjh6glh0f6l27f893pknrg7p87ajhp65-python3-3.11.8-env/lib/python3.11/site-packages/langchain_core/language_models/llms.py", line 568, in generate_prompt
    return self.generate(prompt_strings, stop=stop, callbacks=callbacks, **kwargs)
           ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/nix/store/rjh6glh0f6l27f893pknrg7p87ajhp65-python3-3.11.8-env/lib/python3.11/site-packages/langchain_core/language_models/llms.py", line 741, in generate
    output = self._generate_helper(
             ^^^^^^^^^^^^^^^^^^^^^^   File "/nix/store/rjh6glh0f6l27f893pknrg7p87ajhp65-python3-3.11.8-env/lib/python3.11/site-packages/langchain_core/language_models/llms.py", line 605, in _generate_helper
    raise e   File "/nix/store/rjh6glh0f6l27f893pknrg7p87ajhp65-python3-3.11.8-env/lib/python3.11/site-packages/langchain_core/language_models/llms.py", line 592, in _generate_helper
    self._generate(   File "/nix/store/rjh6glh0f6l27f893pknrg7p87ajhp65-python3-3.11.8-env/lib/python3.11/site-packages/langchain_community/llms/ollama.py", line 408, in _generate
    final_chunk = super()._stream_with_aggregation(
                  ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^   File "/nix/store/rjh6glh0f6l27f893pknrg7p87ajhp65-python3-3.11.8-env/lib/python3.11/site-packages/langchain_community/llms/ollama.py", line 317, in _stream_with_aggregation
    for stream_resp in self._create_generate_stream(prompt, stop, **kwargs):   File "/nix/store/rjh6glh0f6l27f893pknrg7p87ajhp65-python3-3.11.8-env/lib/python3.11/site-packages/langchain_community/llms/ollama.py", line 159, in _create_generate_stream
    yield from self._create_stream(
               ^^^^^^^^^^^^^^^^^^^^   File "/nix/store/rjh6glh0f6l27f893pknrg7p87ajhp65-python3-3.11.8-env/lib/python3.11/site-packages/langchain_community/llms/ollama.py", line 220, in _create_stream
    response = requests.post(
               ^^^^^^^^^^^^^^   File "/nix/store/rjh6glh0f6l27f893pknrg7p87ajhp65-python3-3.11.8-env/lib/python3.11/site-packages/requests/api.py", line 115, in post
    return request("post", url, data=data, json=json, **kwargs)
           ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^   File "/nix/store/rjh6glh0f6l27f893pknrg7p87ajhp65-python3-3.11.8-env/lib/python3.11/site-packages/requests/api.py", line 59, in request
    return session.request(method=method, url=url, **kwargs)
           ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^   File "/nix/store/rjh6glh0f6l27f893pknrg7p87ajhp65-python3-3.11.8-env/lib/python3.11/site-packages/requests/sessions.py", line 589, in request
    resp = self.send(prep, **send_kwargs)
           ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^   File "/nix/store/rjh6glh0f6l27f893pknrg7p87ajhp65-python3-3.11.8-env/lib/python3.11/site-packages/requests/sessions.py", line 703, in send
    r = adapter.send(request, **kwargs)
        ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^   File "/nix/store/rjh6glh0f6l27f893pknrg7p87ajhp65-python3-3.11.8-env/lib/python3.11/site-packages/requests/adapters.py", line 519, in send
    raise ConnectionError(e, request=request) requests.exceptions.ConnectionError: HTTPConnectionPool(host='localhost', port=11434): Max retries exceeded with url: /api/generate (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7ffff4b1d510>: Failed to establish a new connection: [Errno 111] Connection refused'))

Solution

  • This code works but it can probably be improved:

    import subprocess
    import atexit
    
    ollama_server = subprocess.Popen(["ollama", "serve"])
    atexit.register(ollama_server.terminate)
    
    from langchain_community.chat_models import ChatOllama
    from langchain_core.output_parsers import StrOutputParser
    from langchain_core.prompts import ChatPromptTemplate
    
    llm = ChatOllama(model="mistral")
    prompt = ChatPromptTemplate.from_template("compute {topic}")
    chain = prompt | llm | StrOutputParser()
    
    print(chain.invoke({"topic": "1+1"}))