2 Commits

Author SHA1 Message Date
Ryan Chen
7161c09a4e do not fully delete lol 2025-10-24 08:47:59 -04:00
Ryan Chen
68d73b62e8 Instituting LLM fallback to OpenAI if gaming PC is not on 2025-10-24 08:44:08 -04:00
2 changed files with 33 additions and 28 deletions

57
llm.py
View File

@@ -4,9 +4,14 @@ from ollama import Client
from openai import OpenAI from openai import OpenAI
import logging import logging
from dotenv import load_dotenv
load_dotenv()
logging.basicConfig(level=logging.INFO) logging.basicConfig(level=logging.INFO)
TRY_OLLAMA = os.getenv("TRY_OLLAMA", False)
class LLMClient: class LLMClient:
def __init__(self): def __init__(self):
@@ -30,31 +35,35 @@ class LLMClient:
prompt: str, prompt: str,
system_prompt: str, system_prompt: str,
): ):
# Instituting a fallback if my gaming PC is not on
if self.PROVIDER == "ollama": if self.PROVIDER == "ollama":
response = self.ollama_client.chat( try:
model="gemma3:4b", response = self.ollama_client.chat(
messages=[ model="gemma3:4b",
{ messages=[
"role": "system", {
"content": system_prompt, "role": "system",
}, "content": system_prompt,
{"role": "user", "content": prompt}, },
], {"role": "user", "content": prompt},
) ],
print(response) )
output = response.message.content output = response.message.content
elif self.PROVIDER == "openai": return output
response = self.openai_client.responses.create( except Exception as e:
model="gpt-4o-mini", logging.error(f"Could not connect to OLLAMA: {str(e)}")
input=[
{ response = self.openai_client.responses.create(
"role": "system", model="gpt-4o-mini",
"content": system_prompt, input=[
}, {
{"role": "user", "content": prompt}, "role": "system",
], "content": system_prompt,
) },
output = response.output_text {"role": "user", "content": prompt},
],
)
output = response.output_text
return output return output

View File

@@ -197,10 +197,6 @@ def filter_indexed_files(docs):
if __name__ == "__main__": if __name__ == "__main__":
args = parser.parse_args() args = parser.parse_args()
if args.reindex: if args.reindex:
with sqlite3.connect("./visited.db") as conn:
c = conn.cursor()
c.execute("DELETE FROM indexed_documents")
logging.info("Fetching documents from Paperless-NGX") logging.info("Fetching documents from Paperless-NGX")
ppngx = PaperlessNGXService() ppngx = PaperlessNGXService()
docs = ppngx.get_data() docs = ppngx.get_data()