fixing loal llm

This commit is contained in:
2025-10-08 22:52:49 -04:00
parent ebf39480b6
commit 51b9932389
2 changed files with 6 additions and 5 deletions

10
llm.py
View File

@@ -9,10 +9,13 @@ import logging
logging.basicConfig(level=logging.INFO) logging.basicConfig(level=logging.INFO)
class LLMClient: class LLMClient:
def __init__(self): def __init__(self):
try: try:
self.ollama_client = ollama.Client(host=os.getenv("OLLAMA_URL", "http://localhost:11434")) self.ollama_client = Client(
host=os.getenv("OLLAMA_URL", "http://localhost:11434")
)
client.chat( client.chat(
model="gemma3:4b", messages=[{"role": "system", "content": "test"}] model="gemma3:4b", messages=[{"role": "system", "content": "test"}]
) )
@@ -43,10 +46,7 @@ class LLMClient:
"role": "system", "role": "system",
"content": system_prompt, "content": system_prompt,
}, },
{ {"role": "user", "content": prompt},
"role": "user",
"content": prompt
},
], ],
) )
output = response.output_text output = response.output_text

View File

@@ -42,6 +42,7 @@ ppngx = PaperlessNGXService()
llm_client = LLMClient() llm_client = LLMClient()
def index_using_pdf_llm(): def index_using_pdf_llm():
files = ppngx.get_data() files = ppngx.get_data()
for file in files: for file in files: