77 lines
2.3 KiB
Docker
77 lines
2.3 KiB
Docker
# syntax=docker/dockerfile:1
|
|
FROM nvidia/cuda:11.6.1-devel-ubuntu20.04
|
|
ARG DEBIAN_FRONTEND="noninteractive"
|
|
ENV DEBIAN_FRONTEND=${DEBIAN_FRONTEND}
|
|
ENV MAMBA_ROOT_PREFIX=~/micromamba
|
|
WORKDIR /root
|
|
SHELL ["/bin/bash", "-c"]
|
|
COPY requirements.txt /root/
|
|
RUN <<EOT
|
|
#!/bin/bash
|
|
apt-get update
|
|
apt-get install -y wget curl git git-lfs jq vim bash libaio-dev build-essential openssh-server lsof python3 python3-pip
|
|
echo "PermitRootLogin yes" >> /etc/ssh/sshd_config
|
|
echo "PasswordAuthentication yes" >> /etc/ssh/sshd_config
|
|
echo "PubkeyAuthentication yes" >> /etc/ssh/sshd_config
|
|
echo "Port 22" >> /etc/ssh/sshd_config
|
|
mkdir /var/run/sshd
|
|
echo 'root:cdcdocker' | chpasswd
|
|
mkdir -p ~/.pip
|
|
# install miniconda
|
|
wget -qO- https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh -O /tmp/miniconda.sh
|
|
bash /tmp/miniconda.sh -b -p /opt/conda
|
|
rm /tmp/miniconda.sh
|
|
conda init bash
|
|
ln -s /opt/conda/etc/profile.d/conda.sh /etc/profile.d/conda.sh
|
|
echo ". /opt/conda/etc/profile.d/conda.sh" >> ~/.bashrc
|
|
echo "conda activate ${CONDA_ENV_NAME}" >> ~/.bashrc
|
|
# 配置 .condarc 文件
|
|
cat <<EOF > ~/.condarc
|
|
channels:
|
|
- conda-forge
|
|
- bioconda
|
|
- pytorch
|
|
- pytorch-nightly
|
|
- nvidia
|
|
- defaults
|
|
show_channel_urls: true
|
|
EOF
|
|
# 安装 micromamba 并配置 mambarc
|
|
echo 1 | bash <(curl -s https://cdn.jsdelivr.net/gh/hotwa/MicroMamba_Installer@main/install.sh)
|
|
micromamba shell init -s bash -p ~/micromamba
|
|
cat <<'EOF' >> ~/.bashrc
|
|
source ~/micromamba/etc/profile.d/micromamba.sh
|
|
alias mamba=micromamba
|
|
alias mba=mamba
|
|
EOF
|
|
# 配置 .mambarc 文件
|
|
cat <<EOF > ~/.mambarc
|
|
channels:
|
|
- conda-forge
|
|
- bioconda
|
|
- pytorch
|
|
- pytorch-nightly
|
|
- nvidia
|
|
EOF
|
|
mkdir -p ~/.pip
|
|
echo "
|
|
[global]
|
|
index-url = https://mirrors.aliyun.com/pypi/simple/
|
|
|
|
[install]
|
|
trusted-host=mirrors.aliyun.com
|
|
" >> ~/.pip/pip.conf
|
|
micromamba create -n bgpt -c conda-forge python=3.7.9 -y
|
|
micromamba run -n bgpt pip install -r requirements.txt
|
|
micromamba run -n bgpt pip install ipykernel attrs seaborn
|
|
micromamba run -n bgpt python -m ipykernel install --user --name="bgpt" --display-name="bgpt_env"
|
|
micromamba run -n bgpt pip install seaborn attrs torch==1.13.1+cu116 torchvision==0.14.1+cu116 torchaudio==0.13.1 --extra-index-url https://download.pytorch.org/whl/cu116
|
|
echo "micromamba activate bgpt" >> ~/.bashrc
|
|
EOT
|
|
|
|
# Expose SSH port
|
|
EXPOSE 3222
|
|
|
|
# Keep the container running
|
|
CMD ["/usr/sbin/sshd", "-D"]
|