diff --git a/bgpt/Dockerfile.bgpt b/bgpt/Dockerfile.bgpt new file mode 100644 index 0000000..7771563 --- /dev/null +++ b/bgpt/Dockerfile.bgpt @@ -0,0 +1,81 @@ +# syntax=docker/dockerfile:1 +FROM nvidia/cuda:11.6.1-devel-ubuntu20.04 + +ARG DEBIAN_FRONTEND="noninteractive" +ENV DEBIAN_FRONTEND=${DEBIAN_FRONTEND} + +COPY src/requirements.txt /root/ +ENV MAMBA_ROOT_PREFIX=~/micromamba +WORKDIR /root +SHELL ["/bin/bash", "-c"] + +COPY requirements.txt /root/ +COPY id_rsa.pub /root/.ssh/ +RUN <> /etc/ssh/sshd_config +echo "PasswordAuthentication yes" >> /etc/ssh/sshd_config +echo "PubkeyAuthentication yes" >> /etc/ssh/sshd_config +echo "Port 22" >> /etc/ssh/sshd_config +mkdir /var/run/sshd +echo 'root:cdcdocker' | chpasswd +mkdir -p ~/.pip +# install miniconda +wget -qO- https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh -O /tmp/miniconda.sh +bash /tmp/miniconda.sh -b -p /opt/conda +rm /tmp/miniconda.sh +conda init bash +ln -s /opt/conda/etc/profile.d/conda.sh /etc/profile.d/conda.sh +echo ". /opt/conda/etc/profile.d/conda.sh" >> ~/.bashrc +echo "conda activate ${CONDA_ENV_NAME}" >> ~/.bashrc +# 配置 .condarc 文件 +cat < ~/.condarc +channels: + - conda-forge + - bioconda + - pytorch + - pytorch-nightly + - nvidia + - defaults +show_channel_urls: true +EOF +# 安装 micromamba 并配置 mambarc +echo 1 | bash <(curl -s https://cdn.jsdelivr.net/gh/hotwa/MicroMamba_Installer@main/install.sh) +micromamba shell init -s bash -p ~/micromamba +cat <<'EOF' >> ~/.bashrc +source ~/micromamba/etc/profile.d/micromamba.sh +alias mamba=micromamba +alias mba=mamba +EOF +# 配置 .mambarc 文件 +cat < ~/.mambarc +channels: + - conda-forge + - bioconda + - pytorch + - pytorch-nightly + - nvidia +EOF +mkdir -p ~/.pip +echo " +[global] +index-url = https://mirrors.aliyun.com/pypi/simple/ + +[install] +trusted-host=mirrors.aliyun.com +" >> ~/.pip/pip.conf +micromamba create -n bgpt -c conda-forge python=3.7.9 -y +micromamba run -n bgpt pip install -r requirements.txt +micromamba run -n bgpt pip install ipykernel attrs seaborn +micromamba run -n bgpt python -m ipykernel install --user --name="bgpt" --display-name="bgpt_env" +micromamba run -n bgpt pip install seaborn attrs torch==1.13.1+cu116 torchvision==0.14.1+cu116 torchaudio==0.13.1 --extra-index-url https://download.pytorch.org/whl/cu116 +echo "micromamba activate bgpt" >> ~/.bashrc +EOT + +# Expose SSH port +EXPOSE 3222 + +# Keep the container running +CMD ["/usr/sbin/sshd", "-D"] diff --git a/bgpt/docker-compose-bgpt.yml b/bgpt/docker-compose-bgpt.yml new file mode 100644 index 0000000..2599b51 --- /dev/null +++ b/bgpt/docker-compose-bgpt.yml @@ -0,0 +1,32 @@ +version: '3.8' + +services: + ubuntu-ssh: + build: + context: . + dockerfile: Dockerfile.bgpt + volumes: + - /data:/data + container_name: ubuntu-ssh + pull_policy: if_not_present + tty: true + restart: unless-stopped + image: zly/cuda-bgpt:latest + ports: + - 3222:22 + environment: + - NVIDIA_VISIBLE_DEVICES=all + - NVIDIA_DRIVER_CAPABILITIES=compute,utility + networks: + - network_bgpt + deploy: + resources: + reservations: + devices: + - driver: nvidia + count: all + capabilities: [gpu] + +networks: + network_bgpt: + name: network_bgpt