From 51b9932389bc0074dd1f60e078a02edb31431d35 Mon Sep 17 00:00:00 2001 From: Ryan Chen Date: Wed, 8 Oct 2025 22:52:49 -0400 Subject: [PATCH] fixing loal llm --- llm.py | 10 +++++----- main.py | 1 + 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/llm.py b/llm.py index fbb05ca..003b667 100644 --- a/llm.py +++ b/llm.py @@ -9,10 +9,13 @@ import logging logging.basicConfig(level=logging.INFO) + class LLMClient: def __init__(self): try: - self.ollama_client = ollama.Client(host=os.getenv("OLLAMA_URL", "http://localhost:11434")) + self.ollama_client = Client( + host=os.getenv("OLLAMA_URL", "http://localhost:11434") + ) client.chat( model="gemma3:4b", messages=[{"role": "system", "content": "test"}] ) @@ -43,10 +46,7 @@ class LLMClient: "role": "system", "content": system_prompt, }, - { - "role": "user", - "content": prompt - }, + {"role": "user", "content": prompt}, ], ) output = response.output_text diff --git a/main.py b/main.py index 9e0369e..8ec4f34 100644 --- a/main.py +++ b/main.py @@ -42,6 +42,7 @@ ppngx = PaperlessNGXService() llm_client = LLMClient() + def index_using_pdf_llm(): files = ppngx.get_data() for file in files: