Skip to content

Commit

Permalink
fix: naming
Browse files Browse the repository at this point in the history
  • Loading branch information
jaluma committed Aug 5, 2024
1 parent e5ccf1f commit 93b5023
Show file tree
Hide file tree
Showing 4 changed files with 7 additions and 7 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/generate-release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ jobs:

strategy:
matrix:
type: [ llamacpp, ollama ]
type: [ llamacpp-cpu, ollama ]

permissions:
contents: read
Expand Down
File renamed without changes.
10 changes: 5 additions & 5 deletions docker-compose.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ services:
# Private-GPT service for the Ollama CPU and GPU modes
# This service builds from an external Dockerfile and runs the Ollama mode.
private-gpt-ollama:
image: ${PGPT_IMAGE:-zylonai/private-gpt}${PGPT_TAG:-0.6.1}-external
image: ${PGPT_IMAGE:-zylonai/private-gpt}${PGPT_TAG:-0.6.1}-ollama
build:
context: .
dockerfile: Dockerfile.ollama
Expand All @@ -30,11 +30,11 @@ services:

# Private-GPT service for the local mode
# This service builds from a local Dockerfile and runs the application in local mode.
private-gpt-local:
image: ${PGPT_IMAGE:-zylonai/private-gpt}${PGPT_TAG:-0.6.1}-local
private-gpt-llamacpp-cpu:
image: ${PGPT_IMAGE:-zylonai/private-gpt}${PGPT_TAG:-0.6.1}-llamacpp-cpu
build:
context: .
dockerfile: Dockerfile.llamacpp
dockerfile: Dockerfile.llamacpp-cpu
volumes:
- ./local_data/:/home/worker/app/local_data
- ./models/:/home/worker/app/models
Expand All @@ -46,7 +46,7 @@ services:
PGPT_PROFILES: local
HF_TOKEN: ${HF_TOKEN}
profiles:
- llamacpp
- llamacpp-cpu

#-----------------------------------
#---- Ollama services --------------
Expand Down
2 changes: 1 addition & 1 deletion fern/docs/pages/quickstart/quickstart.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ A **Hugging Face Token (HF_TOKEN)** is required for accessing Hugging Face model
**Run:**
Start the services with your Hugging Face token using pre-built images:
```sh
HF_TOKEN=<your_hf_token> docker-compose up --profile llamacpp
HF_TOKEN=<your_hf_token> docker-compose up --profile llamacpp-cpu
```
Replace `<your_hf_token>` with your actual Hugging Face token.

Expand Down

0 comments on commit 93b5023

Please sign in to comment.