88 lines
2.6 KiB
Plaintext

FROM ollama/ollama
# FROM ubuntu:20.04
# Avoid prompts from apt during build
ARG DEBIAN_FRONTEND=noninteractive
RUN apt-get update && apt-get install -y \
wget \
x11vnc \
xvfb \
net-tools \
git \
python3 \
python3-numpy \
novnc
# Create the /app directory to hold the application
WORKDIR /app
# https://medium.com/@renswunnink/why-i-love-appimage-for-linux-distros-924769510ec5
RUN wget -O lmstudio.AppImage "https://releases.lmstudio.ai/linux/0.2.18/beta/LM_Studio-0.2.18.AppImage" && \
chmod u+x lmstudio.AppImage && \
./lmstudio.AppImage --appimage-extract && \
rm lmstudio.AppImage && \
mv squashfs-root lmstudio
# or adddependencies on another layer
RUN apt-get update && apt-get install -y \
dbus pciutils \
libglib2.0-0 \
libnss3 \
libgbm1 \
libxshmfence1 \
libgl1-mesa-glx \
libegl1-mesa \
libatk1.0-0 \
libatk-bridge2.0-0 \
libgtk-3-0 \
libasound2 \
&& rm -rf /var/lib/apt/lists/*
# Setup a virtual display environment with XVFB
RUN Xvfb :99 -screen 0 1024x768x16 &
ENV DISPLAY=:99
EXPOSE 8080
# Expose port 5980 for noVNC
EXPOSE 5980
RUN ln -s /app/lmstudio/lm-studio /lm-studio
#? RUN chmod +x /app/lmstudio/AppRun
# Create a startup script to run Xvfb and your application
# Create a startup script
RUN echo '#!/bin/bash\n\
Xvfb :99 -screen 0 1024x768x16 &\n\
export DISPLAY=:99\n\
# Start X11VNC\n\
x11vnc -display :99 -nopw -listen localhost -xkb -forever &\n\
# Start noVNC\n\
/opt/noVNC/utils/launch.sh --vnc localhost:5900 --listen 5980\n\
# Start the application\n\
exec /app/lmstudio/AppRun --no-sandbox\n\
' > /start-app.sh && chmod +x /start-app.sh
CMD ["/start-app.sh"]
#> apt-get update && apt-get install -y git x11vnc
#> git clone https://github.com/novnc/noVNC.git /opt/noVNC
#? x11vnc -display :99 -nopw -listen localhost -xkb -forever &
# Run LM Studio (assumes LM Studio can be run headlessly or in a server mode)
#CMD ["./lmstudio/AppRun"]
#CMD ["./lmstudio/AppRun", "--no-sandbox"]
#CMD ["/bin/bash"] # interactive shell
# build: docker build -t llmstudio .
# run: docker run (-dit) -p 8980:8080 llmstudio
# docker build -t llmstudio . && docker run -it -p 8980:8080 llmstudio
# cd /mnt/storage/DEV/workspace/repos/git.d-popov.com/ai-kevin/lmstudio/
# docker run --runtime=nvidia -e NVIDIA_VISIBLE_DEVICES=all -it llmstudio
# docker build -t llmstudio . && docker run -dit -p 8980:8080 --volume /var/run/dbus:/var/run/dbus llmstudio
# docker build -t llmstudio . && docker run -it -p 8980:8080 --volume /var/run/dbus:/var/run/dbus --runtime=nvidia -e NVIDIA_VISIBLE_DEVICES=all --security-opt apparmor=unconfined llmstudio