feat: add Docker support for offline deployment with qwen3:14b
Major additions: - All-in-One Docker image with Ollama + models bundled - Separate deployment option for existing Ollama installations - Changed default model from qwen3:8b to qwen3:14b - Comprehensive deployment documentation Files added: - Dockerfile: Basic app-only image - Dockerfile.allinone: Complete image with Ollama + models - docker-compose.yml: Easy deployment configuration - docker-entrypoint.sh: Startup script for all-in-one image - requirements.txt: Python dependencies - .dockerignore: Exclude unnecessary files from image Scripts: - export-ollama-models.sh: Export models from local Ollama - build-allinone.sh: Build complete offline-deployable image - build-and-export.sh: Build and export basic image Documentation: - DEPLOYMENT.md: Comprehensive deployment guide - QUICK_START.md: Quick reference for common tasks Configuration: - Updated config.py: DEFAULT_CHAT_MODEL = qwen3:14b - Updated frontend/opro.html: Page title to 系统提示词优化
This commit is contained in:
98
build-allinone.sh
Executable file
98
build-allinone.sh
Executable file
@@ -0,0 +1,98 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Build all-in-one Docker image with Ollama and models
|
||||
# This creates a complete offline-deployable image
|
||||
|
||||
set -e
|
||||
|
||||
IMAGE_NAME="system-prompt-optimizer"
|
||||
IMAGE_TAG="allinone"
|
||||
EXPORT_FILE="${IMAGE_NAME}-${IMAGE_TAG}.tar"
|
||||
|
||||
echo "=========================================="
|
||||
echo "Building All-in-One Docker Image"
|
||||
echo "=========================================="
|
||||
echo ""
|
||||
echo "This will create a Docker image containing:"
|
||||
echo " - Python application"
|
||||
echo " - Ollama service"
|
||||
echo " - qwen3:14b model"
|
||||
echo " - qwen3-embedding:4b model"
|
||||
echo ""
|
||||
echo "WARNING: The final image will be 10-20GB in size!"
|
||||
echo ""
|
||||
|
||||
# Check if ollama-models directory exists
|
||||
if [ ! -d "ollama-models" ]; then
|
||||
echo "ERROR: ollama-models directory not found!"
|
||||
echo ""
|
||||
echo "Please run ./export-ollama-models.sh first to export the models."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "✓ Found ollama-models directory"
|
||||
echo ""
|
||||
|
||||
# Check disk space
|
||||
AVAILABLE_SPACE=$(df -h . | awk 'NR==2 {print $4}')
|
||||
echo "Available disk space: $AVAILABLE_SPACE"
|
||||
echo "Required: ~20GB for build process"
|
||||
echo ""
|
||||
|
||||
read -p "Continue with build? (y/n) " -n 1 -r
|
||||
echo
|
||||
if [[ ! $REPLY =~ ^[Yy]$ ]]; then
|
||||
echo "Build cancelled."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "=========================================="
|
||||
echo "Building Docker image..."
|
||||
echo "=========================================="
|
||||
docker build -f Dockerfile.allinone -t ${IMAGE_NAME}:${IMAGE_TAG} .
|
||||
|
||||
echo ""
|
||||
echo "=========================================="
|
||||
echo "Build complete!"
|
||||
echo "=========================================="
|
||||
docker images | grep ${IMAGE_NAME}
|
||||
|
||||
echo ""
|
||||
echo "=========================================="
|
||||
echo "Exporting image to ${EXPORT_FILE}..."
|
||||
echo "=========================================="
|
||||
echo "This will take several minutes..."
|
||||
docker save -o ${EXPORT_FILE} ${IMAGE_NAME}:${IMAGE_TAG}
|
||||
|
||||
echo ""
|
||||
echo "=========================================="
|
||||
echo "Export complete!"
|
||||
echo "=========================================="
|
||||
ls -lh ${EXPORT_FILE}
|
||||
|
||||
echo ""
|
||||
echo "=========================================="
|
||||
echo "Deployment Instructions"
|
||||
echo "=========================================="
|
||||
echo ""
|
||||
echo "1. Transfer ${EXPORT_FILE} to target server:"
|
||||
echo " scp ${EXPORT_FILE} user@server:/path/"
|
||||
echo ""
|
||||
echo "2. On target server, load the image:"
|
||||
echo " docker load -i ${EXPORT_FILE}"
|
||||
echo ""
|
||||
echo "3. Run the container:"
|
||||
echo " docker run -d \\"
|
||||
echo " --name system-prompt-optimizer \\"
|
||||
echo " -p 8010:8010 \\"
|
||||
echo " -p 11434:11434 \\"
|
||||
echo " -v \$(pwd)/outputs:/app/outputs \\"
|
||||
echo " --restart unless-stopped \\"
|
||||
echo " ${IMAGE_NAME}:${IMAGE_TAG}"
|
||||
echo ""
|
||||
echo "4. Access the application:"
|
||||
echo " http://<server-ip>:8010/ui/opro.html"
|
||||
echo ""
|
||||
echo "See DEPLOYMENT.md for more details."
|
||||
|
||||
Reference in New Issue
Block a user