ci: Docker workflow to build and slim images, update README for usage instructions

This commit is contained in:
remsky 2025-01-14 01:27:10 -07:00
parent 2649a0ba0c
commit da324b0959
2 changed files with 53 additions and 32 deletions

View file

@ -1,4 +1,4 @@
name: Docker Build and Publish name: Docker Build, Slim, and Publish
on: on:
push: push:
@ -35,42 +35,64 @@ jobs:
echo "GPU_IMAGE_NAME=${{ env.REGISTRY }}/$(echo ${{ env.IMAGE_NAME }} | tr '[:upper:]' '[:lower:]')-gpu" >> $GITHUB_ENV echo "GPU_IMAGE_NAME=${{ env.REGISTRY }}/$(echo ${{ env.IMAGE_NAME }} | tr '[:upper:]' '[:lower:]')-gpu" >> $GITHUB_ENV
echo "CPU_IMAGE_NAME=${{ env.REGISTRY }}/$(echo ${{ env.IMAGE_NAME }} | tr '[:upper:]' '[:lower:]')-cpu" >> $GITHUB_ENV echo "CPU_IMAGE_NAME=${{ env.REGISTRY }}/$(echo ${{ env.IMAGE_NAME }} | tr '[:upper:]' '[:lower:]')-cpu" >> $GITHUB_ENV
echo "UI_IMAGE_NAME=${{ env.REGISTRY }}/$(echo ${{ env.IMAGE_NAME }} | tr '[:upper:]' '[:lower:]')-ui" >> $GITHUB_ENV echo "UI_IMAGE_NAME=${{ env.REGISTRY }}/$(echo ${{ env.IMAGE_NAME }} | tr '[:upper:]' '[:lower:]')-ui" >> $GITHUB_ENV
echo "VERSION=${GITHUB_REF#refs/tags/}" >> $GITHUB_ENV
# Extract version tag if it exists # Build GPU version
- name: Set version tag - name: Build GPU Docker image
if: startsWith(github.ref, 'refs/tags/')
run: echo "VERSION=${GITHUB_REF#refs/tags/}" >> $GITHUB_ENV
# Build and push GPU version
- name: Build and push GPU Docker image
uses: docker/build-push-action@v5 uses: docker/build-push-action@v5
with: with:
context: . context: .
file: ./docker/gpu/Dockerfile file: ./docker/gpu/Dockerfile
push: true push: false
tags: | load: true
${{ env.GPU_IMAGE_NAME }}:latest tags: ${{ env.GPU_IMAGE_NAME }}:v0.1.0
${{ env.GPU_IMAGE_NAME }}:${{ env.VERSION }}
build-args: | build-args: |
DOCKER_BUILDKIT=1 DOCKER_BUILDKIT=1
platforms: linux/amd64 platforms: linux/amd64
# Build and push CPU version # Slim GPU version
- name: Build and push CPU Docker image - name: Slim GPU Docker image
uses: kitabisa/docker-slim-action@v1
env:
DSLIM_HTTP_PROBE: false
with:
target: ${{ env.GPU_IMAGE_NAME }}:v0.1.0
tag: v0.1.0-slim
# Push GPU versions
- name: Push GPU Docker images
run: |
docker push ${{ env.GPU_IMAGE_NAME }}:v0.1.0
docker push ${{ env.GPU_IMAGE_NAME }}:v0.1.0-slim
# Build CPU version
- name: Build CPU Docker image
uses: docker/build-push-action@v5 uses: docker/build-push-action@v5
with: with:
context: . context: .
file: ./docker/cpu/Dockerfile file: ./docker/cpu/Dockerfile
push: true push: false
tags: | load: true
${{ env.CPU_IMAGE_NAME }}:latest tags: ${{ env.CPU_IMAGE_NAME }}:v0.1.0
${{ env.CPU_IMAGE_NAME }}:${{ env.VERSION }}
build-args: | build-args: |
DOCKER_BUILDKIT=1 DOCKER_BUILDKIT=1
platforms: linux/amd64 platforms: linux/amd64
# Build and push UI version # Slim CPU version
- name: Slim CPU Docker image
uses: kitabisa/docker-slim-action@v1
env:
DSLIM_HTTP_PROBE: false
with:
target: ${{ env.CPU_IMAGE_NAME }}:v0.1.0
tag: v0.1.0-slim
# Push CPU versions
- name: Push CPU Docker images
run: |
docker push ${{ env.CPU_IMAGE_NAME }}:v0.1.0
docker push ${{ env.CPU_IMAGE_NAME }}:v0.1.0-slim
# Build and push UI version (unchanged)
- name: Build and push UI Docker image - name: Build and push UI Docker image
uses: docker/build-push-action@v5 uses: docker/build-push-action@v5
with: with:
@ -78,8 +100,7 @@ jobs:
file: ./ui/Dockerfile file: ./ui/Dockerfile
push: true push: true
tags: | tags: |
${{ env.UI_IMAGE_NAME }}:latest ${{ env.UI_IMAGE_NAME }}:v0.1.0
${{ env.UI_IMAGE_NAME }}:${{ env.VERSION }}
build-args: | build-args: |
DOCKER_BUILDKIT=1 DOCKER_BUILDKIT=1
platforms: linux/amd64 platforms: linux/amd64

View file

@ -24,7 +24,7 @@ Dockerized FastAPI wrapper for [Kokoro-82M](https://huggingface.co/hexgrad/Kokor
The service can be accessed through either the API endpoints or the Gradio web interface. The service can be accessed through either the API endpoints or the Gradio web interface.
1. Install prerequisites: 1. Install prerequisites:
- Install [Docker Desktop](https://www.docker.com/products/docker-desktop/) + [Git](https://git-scm.com/downloads) - Install [Docker Desktop](https://www.docker.com/products/docker-desktop/)
- Clone the repository: - Clone the repository:
```bash ```bash
git clone https://github.com/remsky/Kokoro-FastAPI.git git clone https://github.com/remsky/Kokoro-FastAPI.git
@ -33,20 +33,20 @@ The service can be accessed through either the API endpoints or the Gradio web i
2. Start the service: 2. Start the service:
a. Using Docker Compose (recommended for full setup including UI): - Using Docker Compose (Full setup including UI):
```bash ```bash
docker compose up --build # for GPU docker compose up --build # for GPU
#docker compose -f docker-compose.cpu.yml up --build # for CPU docker compose -f docker-compose.cpu.yml up --build # for CPU
``` ```
- OR running the API alone using Docker (model + voice packs baked in):
b. Running the API alone using Docker:
```bash ```bash
# For CPU version
docker run -p 8880:8880 kokoro-fastapi-cpu docker run -p 8880:8880 ghcr.io/remsky/kokoro-fastapi-cpu:latest # CPU
docker run --gpus all -p 8880:8880 ghcr.io/remsky/kokoro-fastapi-gpu:latest # Nvidia GPU
# For GPU version (requires NVIDIA Container Toolkit) # Minified versions are available with `:latest-slim` tag.
docker run --gpus all -p 8880:8880 kokoro-fastapi-gpu
``` ```
2. Run locally as an OpenAI-Compatible Speech Endpoint 2. Run locally as an OpenAI-Compatible Speech Endpoint
```python ```python
from openai import OpenAI from openai import OpenAI