Skip to content

Commit

Permalink
Merge pull request #3 from openradx/fix-llamacpp_server
Browse files Browse the repository at this point in the history
Fix llama.cpp server by building it in CI
  • Loading branch information
medihack authored Mar 17, 2024
2 parents 821f4cd + 5702dec commit 9143294
Show file tree
Hide file tree
Showing 4 changed files with 27 additions and 17 deletions.
10 changes: 9 additions & 1 deletion .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,15 @@ jobs:
uses: actions/checkout@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Build and cache Docker images
- name: Build and cache llama.cpp server image
uses: docker/build-push-action@v3
with:
file: ./compose/llamacpp/llamacpp.Dockerfile
load: true
tags: llamacpp-server:latest
cache-from: type=gha
cache-to: type=gha,mode=max
- name: Build and cache main RADIS image
uses: docker/build-push-action@v3
with:
context: .
Expand Down
10 changes: 5 additions & 5 deletions compose/docker-compose.base.yml
Original file line number Diff line number Diff line change
Expand Up @@ -53,11 +53,11 @@ services:
celery --broker=amqp://rabbit/ flower --url_prefix=flower
"
# llamacpp:
# build:
# context: llamacpp
# dockerfile: llamacpp.Dockerfile
# hostname: llamacpp.local
llamacpp:
build:
context: llamacpp
dockerfile: llamacpp.Dockerfile
hostname: llamacpp.local

postgres:
image: postgres:16.2
Expand Down
23 changes: 12 additions & 11 deletions compose/docker-compose.dev.yml
Original file line number Diff line number Diff line change
Expand Up @@ -64,17 +64,18 @@ services:
profiles:
- full

# llamacpp:
# ports:
# - 9610:8080
# volumes:
# - ../models:/models
# command: >
# bash -c '
# /server -m /models/model.gguf -c 512 --host 0.0.0.0 --port 8080
# '
# profiles:
# - full
llamacpp:
image: llamacpp-server:latest
ports:
- 9610:8080
volumes:
- ../models:/models
command: >
bash -c '
/server -m /models/model.gguf -c 512 --host 0.0.0.0 --port 8080
'
profiles:
- full

postgres:
environment:
Expand Down
1 change: 1 addition & 0 deletions compose/docker-compose.prod.yml
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,7 @@ services:
replicas: 1

llamacpp:
image: llamacpp-server:latest
restart: always
volumes:
- models_data:/models
Expand Down

0 comments on commit 9143294

Please sign in to comment.