47 lines
1.2 KiB
YAML
47 lines
1.2 KiB
YAML
services:
|
|
autorag-api:
|
|
image: autorag-base
|
|
container_name: autorag-api
|
|
environment:
|
|
- CUDA_VISIBLE_DEVICES=0
|
|
- OPENAI_API_KEY=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
|
|
- OLLAMA_API_BASE_URL=http://autorag-ollama:11434 # Ollama 컨테이너로 연결결
|
|
volumes:
|
|
- ~/.cache/huggingface:/root/.cache/huggingface
|
|
- ./:/usr/src/app/
|
|
deploy:
|
|
resources:
|
|
reservations:
|
|
devices:
|
|
- driver: nvidia
|
|
count: all
|
|
capabilities: [gpu]
|
|
stdin_open: true
|
|
tty: true
|
|
working_dir: /usr/src/app
|
|
depends_on:
|
|
- autorag-ollama # Ollama가 먼저 실행되도록 설정
|
|
networks:
|
|
- autorag_network
|
|
command: ["bash"] # 기본 실행 명령
|
|
|
|
autorag-ollama:
|
|
image: ollama/ollama
|
|
container_name: autorag-ollama
|
|
deploy:
|
|
resources:
|
|
reservations:
|
|
devices:
|
|
- driver: nvidia
|
|
count: all
|
|
capabilities: [gpu]
|
|
ports:
|
|
- "11434:11434" # Ollama API 포트 설정
|
|
networks:
|
|
- autorag_network
|
|
restart: always
|
|
|
|
networks:
|
|
autorag_network:
|
|
driver: bridge
|