
    ug                     @    d dl Z d dlZd dlZd dlmZmZ d dlmZ ddZy)    N)	HTTPErrorConnectError)OllamaEmbeddingFunctionc                  V   t         j                  j                  d      t         j                  j                  d      t        j                  d       	 t        j                  t         j                  j                  dd            } | j                          t        t         j                  j                  d      xs dt         j                  j                  d       d	      } |d
dg      }t        |      dk(  sJ y# t        t        f$ r t        j                  d       Y w xY w)z
    To set up the Ollama server, follow instructions at: https://github.com/ollama/ollama?tab=readme-ov-file
    Export the OLLAMA_SERVER_URL and OLLAMA_MODEL environment variables.
    OLLAMA_SERVER_URLNOLLAMA_MODELzNOLLAMA_SERVER_URL or OLLAMA_MODEL environment variable not set. Skipping test. z)Ollama server not running. Skipping test.znomic-embed-textz/embeddings)
model_nameurlz"Here is an article about llamas...zthis is another article   )osenvirongetpytestskiphttpxraise_for_statusr   r   r   len)responseef
embeddingss      T/var/www/openai/venv/lib/python3.12/site-packages/chromadb/test/ef/test_ollama_ef.pytest_ollamar   
   s     	

*+3::>>.)1\	
A99RZZ^^,?DE!!# 
!::>>.1G5Gzz~~123;?
B 9;TUVJz?a |$ A?@As   AD $D('D()returnN)r   r   r   r   r   "chromadb.utils.embedding_functionsr   r        r   <module>r      s    	   ) F r   