2 Commits

Author SHA1 Message Date
Ryan Chen
7161c09a4e do not fully delete lol 2025-10-24 08:47:59 -04:00
Ryan Chen
68d73b62e8 Instituting LLM fallback to OpenAI if gaming PC is not on 2025-10-24 08:44:08 -04:00
2 changed files with 33 additions and 28 deletions

13
llm.py
View File

@@ -4,9 +4,14 @@ from ollama import Client
from openai import OpenAI from openai import OpenAI
import logging import logging
from dotenv import load_dotenv
load_dotenv()
logging.basicConfig(level=logging.INFO) logging.basicConfig(level=logging.INFO)
TRY_OLLAMA = os.getenv("TRY_OLLAMA", False)
class LLMClient: class LLMClient:
def __init__(self): def __init__(self):
@@ -30,7 +35,9 @@ class LLMClient:
prompt: str, prompt: str,
system_prompt: str, system_prompt: str,
): ):
# Instituting a fallback if my gaming PC is not on
if self.PROVIDER == "ollama": if self.PROVIDER == "ollama":
try:
response = self.ollama_client.chat( response = self.ollama_client.chat(
model="gemma3:4b", model="gemma3:4b",
messages=[ messages=[
@@ -41,9 +48,11 @@ class LLMClient:
{"role": "user", "content": prompt}, {"role": "user", "content": prompt},
], ],
) )
print(response)
output = response.message.content output = response.message.content
elif self.PROVIDER == "openai": return output
except Exception as e:
logging.error(f"Could not connect to OLLAMA: {str(e)}")
response = self.openai_client.responses.create( response = self.openai_client.responses.create(
model="gpt-4o-mini", model="gpt-4o-mini",
input=[ input=[

View File

@@ -197,10 +197,6 @@ def filter_indexed_files(docs):
if __name__ == "__main__": if __name__ == "__main__":
args = parser.parse_args() args = parser.parse_args()
if args.reindex: if args.reindex:
with sqlite3.connect("./visited.db") as conn:
c = conn.cursor()
c.execute("DELETE FROM indexed_documents")
logging.info("Fetching documents from Paperless-NGX") logging.info("Fetching documents from Paperless-NGX")
ppngx = PaperlessNGXService() ppngx = PaperlessNGXService()
docs = ppngx.get_data() docs = ppngx.get_data()