Docker support and Ollama support (#47)

- Added support for running CLI and Ollama server via Docker
- Introduced tests for local embeddings model and standalone Docker setup
- Enabled conditional Ollama server launch via LLM_PROVIDER
This commit is contained in:
Geeta Chauhan
2025-06-25 20:57:05 -07:00
committed by GitHub
parent 7abff0f354
commit 78ea029a0b
23 changed files with 2141 additions and 19 deletions

74
docker-compose.yml Normal file
View File

@@ -0,0 +1,74 @@
version: "3.8"
services:
# Ollama service for local LLM
ollama:
image: ollama/ollama:latest
container_name: ollama
network_mode: host
volumes:
- ./ollama_data:/root/.ollama
# Uncomment for GPU support
# deploy:
# resources:
# reservations:
# devices:
# - capabilities: ["gpu"]
profiles:
- ollama
# App container for Ollama setup
app-ollama:
build:
context: .
container_name: trading-agents-ollama
network_mode: host
volumes:
- .:/app
- ./data:/app/data
env_file:
- .env
environment:
- LLM_BACKEND_URL=http://localhost:11434/v1
- LLM_PROVIDER=ollama
depends_on:
- ollama
tty: true
stdin_open: true
profiles:
- ollama
# App container for OpenAI setup (no Ollama dependency)
app-openai:
build:
context: .
container_name: trading-agents-openai
network_mode: host
volumes:
- .:/app
- ./data:/app/data
env_file:
- .env
environment:
- LLM_PROVIDER=openai
- LLM_BACKEND_URL=https://api.openai.com/v1
tty: true
stdin_open: true
profiles:
- openai
# Generic app container (uses .env settings as-is)
app:
build:
context: .
container_name: trading-agents
network_mode: host
volumes:
- .:/app
- ./data:/app/data
env_file:
- .env
tty: true
stdin_open: true
profiles:
- default