toolboxes
This commit is contained in:
4
MINE/cmds
Normal file
4
MINE/cmds
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
root@db-NucBox-EVO-X2:/home/db/Downloads/SRBMiner-Multi-2-9-6# ./SRBMiner-MULTI --algorithm randomx --pool xmr-us-east1.nanopool.org:14444 --wallet bc1qjn4m6rmrveuxhk02a5qhe4r6kdcsvvt3vhdn9j --disable-gpu --cpu-threads 28
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
100
portainer-compose-stacks/amd-strix-halo-toolboxes.yml
Normal file
100
portainer-compose-stacks/amd-strix-halo-toolboxes.yml
Normal file
@@ -0,0 +1,100 @@
|
|||||||
|
version: '3.8'
|
||||||
|
|
||||||
|
services:
|
||||||
|
amd-strix-halo-llama-rocm:
|
||||||
|
image: kyuz0/amd-strix-halo-toolboxes:rocm-7rc-rocwmma
|
||||||
|
container_name: amd-strix-halo-llama-rocm
|
||||||
|
restart: unless-stopped
|
||||||
|
privileged: true
|
||||||
|
devices:
|
||||||
|
- /dev/kfd:/dev/kfd
|
||||||
|
- /dev/dri:/dev/dri
|
||||||
|
group_add:
|
||||||
|
- video
|
||||||
|
volumes:
|
||||||
|
- ./models:/models
|
||||||
|
- ./data:/data
|
||||||
|
- /home/${USER}:/home/${USER}:rslave
|
||||||
|
- /home/db/Downloads/:/mnt/dl
|
||||||
|
environment:
|
||||||
|
- DISPLAY=${DISPLAY}
|
||||||
|
- NVIDIA_VISIBLE_DEVICES=all
|
||||||
|
- NVIDIA_DRIVER_CAPABILITIES=all
|
||||||
|
ports:
|
||||||
|
- "8080:8080" # For web UI if available
|
||||||
|
working_dir: /models
|
||||||
|
command: /bin/bash
|
||||||
|
stdin_open: true
|
||||||
|
tty: true
|
||||||
|
|
||||||
|
# Alternative Vulkan backend
|
||||||
|
amd-strix-halo-llama-vulkan-radv:
|
||||||
|
image: kyuz0/amd-strix-halo-toolboxes:vulkan-radv
|
||||||
|
container_name: amd-strix-halo-llama-vulkan-radv
|
||||||
|
restart: unless-stopped
|
||||||
|
privileged: true
|
||||||
|
devices:
|
||||||
|
- /dev/dri:/dev/dri
|
||||||
|
group_add:
|
||||||
|
- video
|
||||||
|
volumes:
|
||||||
|
- ./models:/models
|
||||||
|
- ./data:/data
|
||||||
|
- /home/${USER}:/home/${USER}:rslave
|
||||||
|
- /home/db/Downloads/:/mnt/dl
|
||||||
|
environment:
|
||||||
|
- DISPLAY=${DISPLAY}
|
||||||
|
ports:
|
||||||
|
- "8081:8080" # Different port to avoid conflicts
|
||||||
|
working_dir: /models
|
||||||
|
command: /bin/bash
|
||||||
|
stdin_open: true
|
||||||
|
tty: true
|
||||||
|
|
||||||
|
# Alternative Vulkan AMDVLK backend
|
||||||
|
amd-strix-halo-llama-vulkan-amdvlk:
|
||||||
|
image: kyuz0/amd-strix-halo-toolboxes:vulkan-amdvlk
|
||||||
|
container_name: amd-strix-halo-llama-vulkan-amdvlk
|
||||||
|
restart: unless-stopped
|
||||||
|
privileged: true
|
||||||
|
devices:
|
||||||
|
- /dev/dri:/dev/dri
|
||||||
|
group_add:
|
||||||
|
- video
|
||||||
|
volumes:
|
||||||
|
- ./models:/models
|
||||||
|
- ./data:/data
|
||||||
|
- /home/${USER}:/home/${USER}:rslave
|
||||||
|
# - /home/db/Downloads/xmrig-6.21.0:/mnt/xmrig
|
||||||
|
- /home/db/Downloads/:/mnt/dl
|
||||||
|
environment:
|
||||||
|
- DISPLAY=${DISPLAY}
|
||||||
|
ports:
|
||||||
|
- "8082:8080" # Different port to avoid conflicts
|
||||||
|
working_dir: /models
|
||||||
|
command: /bin/bash
|
||||||
|
stdin_open: true
|
||||||
|
tty: true
|
||||||
|
|
||||||
|
amdopencl:
|
||||||
|
image: pbsprotest/amdopencl:24
|
||||||
|
container_name: amdopencl
|
||||||
|
devices:
|
||||||
|
- /dev/dri
|
||||||
|
- /dev/kfd
|
||||||
|
volumes:
|
||||||
|
- ./workspace:/workspace
|
||||||
|
# - /home/db/Downloads/xmrig-6.21.0:/mnt/xmrig
|
||||||
|
- /home/db/Downloads/:/mnt/dl
|
||||||
|
stdin_open: true
|
||||||
|
tty: true
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
models:
|
||||||
|
driver: local
|
||||||
|
data:
|
||||||
|
driver: local
|
||||||
|
|
||||||
|
networks:
|
||||||
|
default:
|
||||||
|
name: amd-strix-halo-network
|
||||||
109
portainer-compose-stacks/amd-strix-halo-toolboxes/README.md
Normal file
109
portainer-compose-stacks/amd-strix-halo-toolboxes/README.md
Normal file
@@ -0,0 +1,109 @@
|
|||||||
|
# AMD Strix Halo Toolboxes Docker Compose
|
||||||
|
|
||||||
|
This Docker Compose setup provides pre-built containers for running LLMs on AMD Ryzen AI Max "Strix Halo" integrated GPUs.
|
||||||
|
|
||||||
|
## Prerequisites
|
||||||
|
|
||||||
|
- AMD Ryzen AI Max "Strix Halo" system (e.g., Ryzen AI MAX+ 395)
|
||||||
|
- Docker and Docker Compose installed
|
||||||
|
- At least 128GB RAM recommended for larger models
|
||||||
|
- Proper kernel configuration for unified memory
|
||||||
|
|
||||||
|
## Kernel Configuration
|
||||||
|
|
||||||
|
Add these boot parameters to `/etc/default/grub`:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
amd_iommu=off amdgpu.gttsize=131072 ttm.pages_limit=33554432
|
||||||
|
```
|
||||||
|
|
||||||
|
Then apply:
|
||||||
|
```bash
|
||||||
|
sudo grub2-mkconfig -o /boot/grub2/grub.cfg
|
||||||
|
sudo reboot
|
||||||
|
```
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
### Start all services
|
||||||
|
```bash
|
||||||
|
docker-compose up -d
|
||||||
|
```
|
||||||
|
|
||||||
|
### Start specific backend
|
||||||
|
```bash
|
||||||
|
# ROCm backend (best for prompt processing)
|
||||||
|
docker-compose up -d amd-strix-halo-llama-rocm
|
||||||
|
|
||||||
|
# Vulkan RADV backend (fastest token generation)
|
||||||
|
docker-compose up -d amd-strix-halo-llama-vulkan-radv
|
||||||
|
|
||||||
|
# Vulkan AMDVLK backend
|
||||||
|
docker-compose up -d amd-strix-halo-llama-vulkan-amdvlk
|
||||||
|
```
|
||||||
|
|
||||||
|
### Access containers
|
||||||
|
```bash
|
||||||
|
# Enter ROCm container
|
||||||
|
docker exec -it amd-strix-halo-llama-rocm bash
|
||||||
|
|
||||||
|
# Enter Vulkan RADV container
|
||||||
|
docker exec -it amd-strix-halo-llama-vulkan-radv bash
|
||||||
|
|
||||||
|
# Enter Vulkan AMDVLK container
|
||||||
|
docker exec -it amd-strix-halo-llama-vulkan-amdvlk bash
|
||||||
|
```
|
||||||
|
|
||||||
|
## Directory Structure
|
||||||
|
|
||||||
|
```
|
||||||
|
amd-strix-halo-toolboxes/
|
||||||
|
├── models/ # Mount point for GGUF models
|
||||||
|
├── data/ # Mount point for data
|
||||||
|
└── amd-strix-halo-toolboxes.yml
|
||||||
|
```
|
||||||
|
|
||||||
|
## Download Models
|
||||||
|
|
||||||
|
Inside the container, download GGUF models:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Example: Download Llama-2-7B
|
||||||
|
wget https://huggingface.co/TheBloke/Llama-2-7B-Chat-GGUF/resolve/main/llama-2-7b-chat.Q4_K_M.gguf
|
||||||
|
|
||||||
|
# Run the model
|
||||||
|
./llama.cpp/main -m llama-2-7b-chat.Q4_K_M.gguf -n 128 --repeat_penalty 1.1
|
||||||
|
```
|
||||||
|
|
||||||
|
## Backend Performance
|
||||||
|
|
||||||
|
Based on benchmarks:
|
||||||
|
- **ROCm 6.4.3 + ROCWMMA (hipBLASLt)**: Best for prompt processing
|
||||||
|
- **Vulkan RADV**: Fastest for token generation
|
||||||
|
- **Vulkan AMDVLK**: Good balance
|
||||||
|
|
||||||
|
## Memory Planning
|
||||||
|
|
||||||
|
Use the VRAM estimator inside containers:
|
||||||
|
```bash
|
||||||
|
python3 gguf-vram-estimator.py your-model.gguf --contexts 4096 32768 1048576
|
||||||
|
```
|
||||||
|
|
||||||
|
## Ports
|
||||||
|
|
||||||
|
- ROCm backend: `8080`
|
||||||
|
- Vulkan RADV backend: `8081`
|
||||||
|
- Vulkan AMDVLK backend: `8082`
|
||||||
|
|
||||||
|
## Troubleshooting
|
||||||
|
|
||||||
|
1. **Permission issues**: Ensure your user is in the `video` group
|
||||||
|
2. **GPU not detected**: Check kernel parameters and reboot
|
||||||
|
3. **Out of memory**: Use the VRAM estimator to plan model sizes
|
||||||
|
|
||||||
|
## References
|
||||||
|
|
||||||
|
- [Original Repository](https://github.com/kyuz0/amd-strix-halo-toolboxes)
|
||||||
|
- [Strix Halo Hardware Database](https://strixhalo-homelab.d7.wtf/)
|
||||||
|
|
||||||
|
|
||||||
@@ -0,0 +1,100 @@
|
|||||||
|
version: '3.8'
|
||||||
|
|
||||||
|
services:
|
||||||
|
amd-strix-halo-llama-rocm:
|
||||||
|
image: kyuz0/amd-strix-halo-toolboxes:rocm-7rc-rocwmma
|
||||||
|
container_name: amd-strix-halo-llama-rocm
|
||||||
|
restart: unless-stopped
|
||||||
|
privileged: true
|
||||||
|
devices:
|
||||||
|
- /dev/kfd:/dev/kfd
|
||||||
|
- /dev/dri:/dev/dri
|
||||||
|
group_add:
|
||||||
|
- video
|
||||||
|
volumes:
|
||||||
|
- ./models:/models
|
||||||
|
- ./data:/data
|
||||||
|
- /home/${USER}:/home/${USER}:rslave
|
||||||
|
- /home/db/Downloads/:/mnt/dl
|
||||||
|
environment:
|
||||||
|
- DISPLAY=${DISPLAY}
|
||||||
|
- NVIDIA_VISIBLE_DEVICES=all
|
||||||
|
- NVIDIA_DRIVER_CAPABILITIES=all
|
||||||
|
ports:
|
||||||
|
- "8080:8080" # For web UI if available
|
||||||
|
working_dir: /models
|
||||||
|
command: /bin/bash
|
||||||
|
stdin_open: true
|
||||||
|
tty: true
|
||||||
|
|
||||||
|
# Alternative Vulkan backend
|
||||||
|
amd-strix-halo-llama-vulkan-radv:
|
||||||
|
image: kyuz0/amd-strix-halo-toolboxes:vulkan-radv
|
||||||
|
container_name: amd-strix-halo-llama-vulkan-radv
|
||||||
|
restart: unless-stopped
|
||||||
|
privileged: true
|
||||||
|
devices:
|
||||||
|
- /dev/dri:/dev/dri
|
||||||
|
group_add:
|
||||||
|
- video
|
||||||
|
volumes:
|
||||||
|
- ./models:/models
|
||||||
|
- ./data:/data
|
||||||
|
- /home/${USER}:/home/${USER}:rslave
|
||||||
|
- /home/db/Downloads/:/mnt/dl
|
||||||
|
environment:
|
||||||
|
- DISPLAY=${DISPLAY}
|
||||||
|
ports:
|
||||||
|
- "8081:8080" # Different port to avoid conflicts
|
||||||
|
working_dir: /models
|
||||||
|
command: /bin/bash
|
||||||
|
stdin_open: true
|
||||||
|
tty: true
|
||||||
|
|
||||||
|
# Alternative Vulkan AMDVLK backend
|
||||||
|
amd-strix-halo-llama-vulkan-amdvlk:
|
||||||
|
image: kyuz0/amd-strix-halo-toolboxes:vulkan-amdvlk
|
||||||
|
container_name: amd-strix-halo-llama-vulkan-amdvlk
|
||||||
|
restart: unless-stopped
|
||||||
|
privileged: true
|
||||||
|
devices:
|
||||||
|
- /dev/dri:/dev/dri
|
||||||
|
group_add:
|
||||||
|
- video
|
||||||
|
volumes:
|
||||||
|
- ./models:/models
|
||||||
|
- ./data:/data
|
||||||
|
- /home/${USER}:/home/${USER}:rslave
|
||||||
|
# - /home/db/Downloads/xmrig-6.21.0:/mnt/xmrig
|
||||||
|
- /home/db/Downloads/:/mnt/dl
|
||||||
|
environment:
|
||||||
|
- DISPLAY=${DISPLAY}
|
||||||
|
ports:
|
||||||
|
- "8082:8080" # Different port to avoid conflicts
|
||||||
|
working_dir: /models
|
||||||
|
command: /bin/bash
|
||||||
|
stdin_open: true
|
||||||
|
tty: true
|
||||||
|
|
||||||
|
amdopencl:
|
||||||
|
image: pbsprotest/amdopencl:24
|
||||||
|
container_name: amdopencl
|
||||||
|
devices:
|
||||||
|
- /dev/dri
|
||||||
|
- /dev/kfd
|
||||||
|
volumes:
|
||||||
|
- ./workspace:/workspace
|
||||||
|
# - /home/db/Downloads/xmrig-6.21.0:/mnt/xmrig
|
||||||
|
- /home/db/Downloads/:/mnt/dl
|
||||||
|
stdin_open: true
|
||||||
|
tty: true
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
models:
|
||||||
|
driver: local
|
||||||
|
data:
|
||||||
|
driver: local
|
||||||
|
|
||||||
|
networks:
|
||||||
|
default:
|
||||||
|
name: amd-strix-halo-network
|
||||||
40
portainer-compose-stacks/amd-strix-halo-toolboxes/start.sh
Normal file
40
portainer-compose-stacks/amd-strix-halo-toolboxes/start.sh
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# AMD Strix Halo Toolboxes Startup Script
|
||||||
|
|
||||||
|
echo "Starting AMD Strix Halo Toolboxes..."
|
||||||
|
|
||||||
|
# Check if Docker is running
|
||||||
|
if ! docker info > /dev/null 2>&1; then
|
||||||
|
echo "Error: Docker is not running. Please start Docker first."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check if we're in the right directory
|
||||||
|
if [ ! -f "amd-strix-halo-toolboxes.yml" ]; then
|
||||||
|
echo "Error: amd-strix-halo-toolboxes.yml not found. Please run this script from the amd-strix-halo-toolboxes directory."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Pull the latest images
|
||||||
|
echo "Pulling latest images..."
|
||||||
|
docker-compose pull
|
||||||
|
|
||||||
|
# Start the services
|
||||||
|
echo "Starting services..."
|
||||||
|
docker-compose up -d
|
||||||
|
|
||||||
|
echo "Services started successfully!"
|
||||||
|
echo ""
|
||||||
|
echo "Available containers:"
|
||||||
|
echo "- amd-strix-halo-llama-rocm (ROCm backend)"
|
||||||
|
echo "- amd-strix-halo-llama-vulkan-radv (Vulkan RADV backend)"
|
||||||
|
echo "- amd-strix-halo-llama-vulkan-amdvlk (Vulkan AMDVLK backend)"
|
||||||
|
echo ""
|
||||||
|
echo "To access a container:"
|
||||||
|
echo "docker exec -it amd-strix-halo-llama-rocm bash"
|
||||||
|
echo ""
|
||||||
|
echo "To view logs:"
|
||||||
|
echo "docker-compose logs -f"
|
||||||
|
|
||||||
|
|
||||||
Reference in New Issue
Block a user