Compare commits
5 Commits
user-suppo
...
7161c09a4e
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7161c09a4e | ||
|
|
68d73b62e8 | ||
|
|
6b616137d3 | ||
|
|
841b6ebd4f | ||
|
|
45a5e92aee |
@@ -23,6 +23,7 @@ RUN uv pip install --system -e .
|
||||
|
||||
# Copy application code
|
||||
COPY *.py ./
|
||||
COPY blueprints ./blueprints
|
||||
COPY startup.sh ./
|
||||
RUN chmod +x startup.sh
|
||||
|
||||
@@ -43,4 +44,4 @@ ENV PYTHONPATH=/app
|
||||
ENV CHROMADB_PATH=/app/chromadb
|
||||
|
||||
# Run the startup script
|
||||
CMD ["./startup.sh"]
|
||||
CMD ["./startup.sh"]
|
||||
|
||||
1
blueprints/__init__.py
Normal file
1
blueprints/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
# Blueprints package
|
||||
57
llm.py
57
llm.py
@@ -4,9 +4,14 @@ from ollama import Client
|
||||
from openai import OpenAI
|
||||
|
||||
import logging
|
||||
from dotenv import load_dotenv
|
||||
|
||||
load_dotenv()
|
||||
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
|
||||
TRY_OLLAMA = os.getenv("TRY_OLLAMA", False)
|
||||
|
||||
|
||||
class LLMClient:
|
||||
def __init__(self):
|
||||
@@ -30,31 +35,35 @@ class LLMClient:
|
||||
prompt: str,
|
||||
system_prompt: str,
|
||||
):
|
||||
# Instituting a fallback if my gaming PC is not on
|
||||
if self.PROVIDER == "ollama":
|
||||
response = self.ollama_client.chat(
|
||||
model="gemma3:4b",
|
||||
messages=[
|
||||
{
|
||||
"role": "system",
|
||||
"content": system_prompt,
|
||||
},
|
||||
{"role": "user", "content": prompt},
|
||||
],
|
||||
)
|
||||
print(response)
|
||||
output = response.message.content
|
||||
elif self.PROVIDER == "openai":
|
||||
response = self.openai_client.responses.create(
|
||||
model="gpt-4o-mini",
|
||||
input=[
|
||||
{
|
||||
"role": "system",
|
||||
"content": system_prompt,
|
||||
},
|
||||
{"role": "user", "content": prompt},
|
||||
],
|
||||
)
|
||||
output = response.output_text
|
||||
try:
|
||||
response = self.ollama_client.chat(
|
||||
model="gemma3:4b",
|
||||
messages=[
|
||||
{
|
||||
"role": "system",
|
||||
"content": system_prompt,
|
||||
},
|
||||
{"role": "user", "content": prompt},
|
||||
],
|
||||
)
|
||||
output = response.message.content
|
||||
return output
|
||||
except Exception as e:
|
||||
logging.error(f"Could not connect to OLLAMA: {str(e)}")
|
||||
|
||||
response = self.openai_client.responses.create(
|
||||
model="gpt-4o-mini",
|
||||
input=[
|
||||
{
|
||||
"role": "system",
|
||||
"content": system_prompt,
|
||||
},
|
||||
{"role": "user", "content": prompt},
|
||||
],
|
||||
)
|
||||
output = response.output_text
|
||||
|
||||
return output
|
||||
|
||||
|
||||
4
main.py
4
main.py
@@ -197,10 +197,6 @@ def filter_indexed_files(docs):
|
||||
if __name__ == "__main__":
|
||||
args = parser.parse_args()
|
||||
if args.reindex:
|
||||
with sqlite3.connect("./visited.db") as conn:
|
||||
c = conn.cursor()
|
||||
c.execute("DELETE FROM indexed_documents")
|
||||
|
||||
logging.info("Fetching documents from Paperless-NGX")
|
||||
ppngx = PaperlessNGXService()
|
||||
docs = ppngx.get_data()
|
||||
|
||||
Reference in New Issue
Block a user