From 7212f2f6320c4525180340fc80ac882bd12137e2 Mon Sep 17 00:00:00 2001 From: Reza Fatahi Date: Mon, 25 Nov 2024 17:16:29 -0800 Subject: [PATCH] Added Docker support with wrapper --- README.md | 72 ++++++++++++++++++++++++++++++++++- docker/Dockerfile | 58 ++++++++++++++++++++++++++++ docker/flux-cli.sh | 94 ++++++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 223 insertions(+), 1 deletion(-) create mode 100644 docker/Dockerfile create mode 100755 docker/flux-cli.sh diff --git a/README.md b/README.md index e73c1f13..0153decd 100644 --- a/README.md +++ b/README.md @@ -60,7 +60,7 @@ from flux.api import ImageRequest # this will create an api request directly but not block until the generation is finished request = ImageRequest("A beautiful beach", name="flux.1.1-pro") -# or: request = ImageRequest("A beautiful beach", name="flux.1.1-pro", api_key="your_key_here") +# or: request = ImageRequest("A beautiful beach", name="flux.1.1-pro", api_key="ece91834-fffe-4d96-ad01-729fe8e34ee5") # any of the following will block until the generation is finished request.url @@ -85,3 +85,73 @@ $ python -m flux.api --prompt="A beautiful beach" save outputs/api # open the image directly $ python -m flux.api --prompt="A beautiful beach" image show ``` + +## Docker Usage + +We provide Docker support for both local model inference and API access. This makes it easy to run FLUX without installing dependencies directly on your system. + +### Building the Docker Image + +The build process requires you to specify the platform: + +```bash +# Clone the repository +git clone https://github.com/black-forest-labs/flux +cd flux + +# For Apple Silicon (M1/M2/M3) +docker build --platform linux/arm64 -t flux-project -f docker/Dockerfile . + +# For Intel/AMD with NVIDIA GPU +docker build --platform linux/amd64 -t flux-project -f docker/Dockerfile . +``` + +#### Apple Silicon Macs + +```bash +# Install the CLI tool +cp docker/flux-cli.sh /usr/local/bin/flux-cli +chmod +x /usr/local/bin/flux-cli + +# API usage (recommended for M-series Macs) +flux-cli --api-key "your-api-key" \ + --prompt "A beautiful sunset" \ + --output sunset.jpg + +# Local model usage +flux-cli --local \ + --model flux.1-schnell \ + --prompt "A beautiful forest" \ + --output forest.jpg +``` + +#### NVIDIA GPU Systems + +```bash +# API usage +flux-cli --api-key "your-api-key" \ + --prompt "A beautiful sunset" \ + --output sunset.jpg + +# Local model usage with GPU acceleration +flux-cli --local \ + --model flux.1-schnell \ + --prompt "A beautiful forest" \ + --output forest.jpg \ + --gpu +``` + +### Output Formats + +The CLI supports multiple output formats: + +```bash +# Save to file (default) +flux-cli --api-key "your-key" --prompt "prompt" --output image.jpg + +# Get URL (API mode only) +flux-cli --api-key "your-key" --prompt "prompt" --format url + +# Display image directly +flux-cli --api-key "your-key" --prompt "prompt" --format image +``` diff --git a/docker/Dockerfile b/docker/Dockerfile new file mode 100644 index 00000000..f48fb261 --- /dev/null +++ b/docker/Dockerfile @@ -0,0 +1,58 @@ +# Specify multi-platform base images +FROM --platform=$TARGETPLATFORM ubuntu:22.04 as base + +# Set architecture-specific variables +ARG TARGETPLATFORM +ARG BUILDPLATFORM + +# Set up Python and basic dependencies +FROM base as python-deps +ENV PYTHONUNBUFFERED=1 \ + DEBIAN_FRONTEND=noninteractive + +# Install Python and dependencies based on architecture +RUN apt-get update && apt-get install -y \ + python3.10 \ + python3.10-venv \ + python3-pip \ + git \ + && rm -rf /var/lib/apt/lists/* + +# Create venv and install dependencies +RUN python3.10 -m venv /opt/venv +ENV PATH="/opt/venv/bin:$PATH" + +WORKDIR /app +COPY requirements.txt . + +# Install PyTorch based on architecture +RUN if [ "$TARGETPLATFORM" = "linux/arm64" ]; then \ + # Install PyTorch for Apple Silicon + pip install --no-cache-dir torch torchvision torchaudio; \ + else \ + # Install CUDA version for NVIDIA GPUs + pip install --no-cache-dir torch torchvision torchaudio --index-url https://download.pytorch.org/whl/cu118; \ + fi + +RUN pip install --no-cache-dir -r requirements.txt + +# Final stage +FROM base +COPY --from=python-deps /opt/venv /opt/venv +ENV PATH="/opt/venv/bin:$PATH" + +WORKDIR /app +COPY . . + +# Install the package +RUN pip install -e ".[all]" + +# Create volume mount points +VOLUME ["/app/outputs", "/app/models"] + +# Set environment variables +ENV TORCH_HOME=/app/models +ENV HF_HOME=/app/models + +# Default command that can be overridden +ENTRYPOINT ["python3", "-m", "flux.api"] diff --git a/docker/flux-cli.sh b/docker/flux-cli.sh new file mode 100755 index 00000000..5385452f --- /dev/null +++ b/docker/flux-cli.sh @@ -0,0 +1,94 @@ +#!/bin/bash + +# Enhanced CLI wrapper for FLUX Docker implementation +# Supports both local model inference and API access + +set -e + +# Default values +USE_LOCAL=false +API_KEY="${BFL_API_KEY:-}" +MODEL="flux.1-pro" +PROMPT="" +OUTPUT="flux-output.jpg" +OUTPUT_FORMAT="save" +GPU_SUPPORT="" + +usage() { + cat << EOF +Usage: $0 [options] + +Options: + --local Use local model instead of API + --api-key KEY API key for remote usage + --model NAME Model name to use (default: flux.1-pro) + --prompt TEXT Prompt for image generation + --output PATH Output path (default: outputs/image.jpg) + --format FORMAT Output format: save|url|image (default: save) + --gpu Enable GPU support + -h, --help Show this help message + +Examples: + $0 --prompt "A beautiful sunset" --output sunset.jpg + $0 --local --model flux.1-schnell --prompt "A forest" --gpu +EOF +} + +# Parse arguments +while [[ "$#" -gt 0 ]]; do + case $1 in + --local) USE_LOCAL=true ;; + --api-key) API_KEY="$2"; shift ;; + --model) MODEL="$2"; shift ;; + --prompt) PROMPT="$2"; shift ;; + --output) OUTPUT="$2"; shift ;; + --format) OUTPUT_FORMAT="$2"; shift ;; + --gpu) GPU_SUPPORT="--gpus all" ;; + -h|--help) usage; exit 0 ;; + *) echo "Unknown parameter: $1"; usage; exit 1 ;; + esac + shift +done + +# Validate required arguments +if [ -z "$PROMPT" ]; then + echo "Error: --prompt is required" + usage + exit 1 +fi + +if [ "$USE_LOCAL" = true ] && [ -z "$MODEL" ]; then + echo "Error: --model is required when using local mode" + usage + exit 1 +fi + +if [ "$USE_LOCAL" = false ] && [ -z "$API_KEY" ]; then + echo "Error: --api-key is required when using API mode" + usage + exit 1 +fi + +# Ensure output directory exists +mkdir -p "$(dirname "$OUTPUT")" + +# Build Docker command +DOCKER_CMD="docker run --rm ${GPU_SUPPORT} \ + -v $(pwd)/outputs:/app/outputs \ + -v $(pwd)/models:/app/models" + +if [ "$USE_LOCAL" = false ]; then + DOCKER_CMD="$DOCKER_CMD -e BFL_API_KEY=$API_KEY" +fi + +# Execute Docker command +if [ "$USE_LOCAL" = true ]; then + $DOCKER_CMD flux-project \ + --model "$MODEL" \ + --prompt "$PROMPT" \ + "$OUTPUT_FORMAT" "$OUTPUT" +else + $DOCKER_CMD flux-project \ + --prompt "$PROMPT" \ + "$OUTPUT_FORMAT" "$OUTPUT" +fi