Docker support and Ollama support (#47)

- Added support for running CLI and Ollama server via Docker
- Introduced tests for local embeddings model and standalone Docker setup
- Enabled conditional Ollama server launch via LLM_PROVIDER
This commit is contained in:
Geeta Chauhan
2025-06-25 20:57:05 -07:00
committed by GitHub
parent 7abff0f354
commit 78ea029a0b
23 changed files with 2141 additions and 19 deletions

View File

@@ -7,6 +7,7 @@ class FinancialSituationMemory:
def __init__(self, name, config):
if config["backend_url"] == "http://localhost:11434/v1":
self.embedding = "nomic-embed-text"
self.client = OpenAI(base_url=config["backend_url"])
else:
self.embedding = "text-embedding-3-small"
self.client = OpenAI()

View File

@@ -2,7 +2,10 @@ import os
DEFAULT_CONFIG = {
"project_dir": os.path.abspath(os.path.join(os.path.dirname(__file__), ".")),
"data_dir": "/Users/yluo/Documents/Code/ScAI/FR1-data",
"data_dir": os.path.join(
os.path.abspath(os.path.join(os.path.dirname(__file__), ".")),
"data",
),
"data_cache_dir": os.path.join(
os.path.abspath(os.path.join(os.path.dirname(__file__), ".")),
"dataflows/data_cache",