move to bgpt dir
This commit is contained in:
@@ -1,81 +0,0 @@
|
||||
# syntax=docker/dockerfile:1
|
||||
FROM nvidia/cuda:11.6.1-devel-ubuntu20.04
|
||||
|
||||
ARG DEBIAN_FRONTEND="noninteractive"
|
||||
ENV DEBIAN_FRONTEND=${DEBIAN_FRONTEND}
|
||||
|
||||
COPY src/requirements.txt /root/
|
||||
ENV MAMBA_ROOT_PREFIX=~/micromamba
|
||||
WORKDIR /root
|
||||
SHELL ["/bin/bash", "-c"]
|
||||
|
||||
COPY requirements.txt /root/
|
||||
COPY id_rsa.pub /root/.ssh/
|
||||
RUN <<EOT
|
||||
#!/bin/bash
|
||||
apt-get update
|
||||
apt-get install -y wget curl git git-lfs jq vim bash libaio-dev build-essential openssh-server lsof python3 python3-pip
|
||||
echo "PermitRootLogin yes" >> /etc/ssh/sshd_config
|
||||
echo "PasswordAuthentication yes" >> /etc/ssh/sshd_config
|
||||
echo "PubkeyAuthentication yes" >> /etc/ssh/sshd_config
|
||||
echo "Port 22" >> /etc/ssh/sshd_config
|
||||
mkdir /var/run/sshd
|
||||
echo 'root:cdcdocker' | chpasswd
|
||||
mkdir -p ~/.pip
|
||||
# install miniconda
|
||||
wget -qO- https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh -O /tmp/miniconda.sh
|
||||
bash /tmp/miniconda.sh -b -p /opt/conda
|
||||
rm /tmp/miniconda.sh
|
||||
conda init bash
|
||||
ln -s /opt/conda/etc/profile.d/conda.sh /etc/profile.d/conda.sh
|
||||
echo ". /opt/conda/etc/profile.d/conda.sh" >> ~/.bashrc
|
||||
echo "conda activate ${CONDA_ENV_NAME}" >> ~/.bashrc
|
||||
# 配置 .condarc 文件
|
||||
cat <<EOF > ~/.condarc
|
||||
channels:
|
||||
- conda-forge
|
||||
- bioconda
|
||||
- pytorch
|
||||
- pytorch-nightly
|
||||
- nvidia
|
||||
- defaults
|
||||
show_channel_urls: true
|
||||
EOF
|
||||
# 安装 micromamba 并配置 mambarc
|
||||
echo 1 | bash <(curl -s https://cdn.jsdelivr.net/gh/hotwa/MicroMamba_Installer@main/install.sh)
|
||||
micromamba shell init -s bash -p ~/micromamba
|
||||
cat <<'EOF' >> ~/.bashrc
|
||||
source ~/micromamba/etc/profile.d/micromamba.sh
|
||||
alias mamba=micromamba
|
||||
alias mba=mamba
|
||||
EOF
|
||||
# 配置 .mambarc 文件
|
||||
cat <<EOF > ~/.mambarc
|
||||
channels:
|
||||
- conda-forge
|
||||
- bioconda
|
||||
- pytorch
|
||||
- pytorch-nightly
|
||||
- nvidia
|
||||
EOF
|
||||
mkdir -p ~/.pip
|
||||
echo "
|
||||
[global]
|
||||
index-url = https://mirrors.aliyun.com/pypi/simple/
|
||||
|
||||
[install]
|
||||
trusted-host=mirrors.aliyun.com
|
||||
" >> ~/.pip/pip.conf
|
||||
micromamba create -n bgpt -c conda-forge python=3.7.9 -y
|
||||
micromamba run -n bgpt pip install -r requirements.txt
|
||||
micromamba run -n bgpt pip install ipykernel attrs seaborn
|
||||
micromamba run -n bgpt python -m ipykernel install --user --name="bgpt" --display-name="bgpt_env"
|
||||
micromamba run -n bgpt pip install seaborn attrs torch==1.13.1+cu116 torchvision==0.14.1+cu116 torchaudio==0.13.1 --extra-index-url https://download.pytorch.org/whl/cu116
|
||||
echo "micromamba activate bgpt" >> ~/.bashrc
|
||||
EOT
|
||||
|
||||
# Expose SSH port
|
||||
EXPOSE 3222
|
||||
|
||||
# Keep the container running
|
||||
CMD ["/usr/sbin/sshd", "-D"]
|
||||
@@ -1,54 +0,0 @@
|
||||
version: '3.8'
|
||||
|
||||
services:
|
||||
ubuntu-ssh:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile.bgpt
|
||||
volumes:
|
||||
- ./src:/root/data
|
||||
- /mnt/sdb/zly/datas/Datas:/mnt/sdb/zly/datas
|
||||
- /data:/data
|
||||
container_name: ubuntu-ssh
|
||||
pull_policy: if_not_present
|
||||
tty: true
|
||||
restart: unless-stopped
|
||||
image: zly/cuda-bgpt:latest
|
||||
ports:
|
||||
- 3222:22
|
||||
environment:
|
||||
- NVIDIA_VISIBLE_DEVICES=all
|
||||
- NVIDIA_DRIVER_CAPABILITIES=compute,utility
|
||||
- OLLAMA_ORIGINS="chrome-extension://*"
|
||||
networks:
|
||||
- network3
|
||||
deploy:
|
||||
resources:
|
||||
reservations:
|
||||
devices:
|
||||
- driver: nvidia
|
||||
count: all
|
||||
capabilities: [gpu]
|
||||
|
||||
# openai-whisper-asr-webservice:
|
||||
# ports:
|
||||
# - '9000:9000'
|
||||
# volumes:
|
||||
# - './whisper-large-v3:/data/whisper'
|
||||
# environment:
|
||||
# - ASR_MODEL=large
|
||||
# - ASR_ENGINE=openai_whisper
|
||||
# - ASR_MODEL_PATH=/data/whisper
|
||||
# image: 'onerahmet/openai-whisper-asr-webservice:latest-gpu'
|
||||
# deploy:
|
||||
# resources:
|
||||
# reservations:
|
||||
# devices:
|
||||
# - driver: nvidia
|
||||
# count: 1
|
||||
# capabilities: [gpu]
|
||||
|
||||
|
||||
networks:
|
||||
network3:
|
||||
name: network3
|
||||
Reference in New Issue
Block a user