|
|
|
|
|
FROM nvidia/cuda:12.1.1-cudnn8-devel-ubuntu22.04 |
|
|
|
|
|
ENV DEBIAN_FRONTEND=noninteractive |
|
|
|
|
|
|
|
|
RUN apt-get update && apt-get install -y \ |
|
|
git wget curl ffmpeg libgl1-mesa-glx \ |
|
|
libglib2.0-0 libsm6 libxext6 libxrender-dev \ |
|
|
&& rm -rf /var/lib/apt/lists/* |
|
|
|
|
|
|
|
|
ENV CONDA_DIR=/opt/conda |
|
|
RUN curl -sLo ~/miniconda.sh https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh && \ |
|
|
bash ~/miniconda.sh -b -p $CONDA_DIR && \ |
|
|
rm ~/miniconda.sh && \ |
|
|
$CONDA_DIR/bin/conda clean -afy |
|
|
ENV PATH=$CONDA_DIR/bin:$PATH |
|
|
SHELL ["/bin/bash", "-c"] |
|
|
|
|
|
|
|
|
COPY . /workspace |
|
|
WORKDIR /workspace |
|
|
|
|
|
|
|
|
RUN conda tos accept --override-channels --channel https://repo.anaconda.com/pkgs/main && \ |
|
|
conda tos accept --override-channels --channel https://repo.anaconda.com/pkgs/r |
|
|
|
|
|
|
|
|
RUN conda env create -f lyra.yaml |
|
|
ENV PATH /opt/conda/envs/lyra/bin:$PATH |
|
|
RUN echo "conda activate lyra" >> ~/.bashrc |
|
|
|
|
|
|
|
|
RUN pip install -r requirements_gen3c.txt && \ |
|
|
pip install -r requirements_lyra.txt && \ |
|
|
pip install transformer-engine[pytorch]==1.12.0 |
|
|
|
|
|
|
|
|
RUN git clone https://github.com/NVIDIA/apex && \ |
|
|
CUDA_HOME=$CONDA_PREFIX pip install -v --disable-pip-version-check --no-cache-dir \ |
|
|
--no-build-isolation --config-settings "--build-option=--cpp_ext" \ |
|
|
--config-settings "--build-option=--cuda_ext" ./apex |
|
|
|
|
|
|
|
|
RUN pip install git+https://github.com/microsoft/MoGe.git |
|
|
RUN pip install --no-build-isolation "git+https://github.com/state-spaces/[email protected]" |
|
|
|
|
|
|
|
|
RUN ln -sf $CONDA_PREFIX/lib/python3.10/site-packages/nvidia/*/include/* $CONDA_PREFIX/include/ && \ |
|
|
ln -sf $CONDA_PREFIX/lib/python3.10/site-packages/nvidia/*/include/* $CONDA_PREFIX/include/python3.10 |
|
|
|
|
|
|
|
|
RUN huggingface-cli login --token YOUR_HF_TOKEN_HERE && \ |
|
|
python3 scripts/download_tokenizer_checkpoints.py --checkpoint_dir checkpoints/cosmos_predict1 --tokenizer_types CV8x8x8-720p && \ |
|
|
CUDA_HOME=$CONDA_PREFIX PYTHONPATH=$(pwd) python scripts/download_gen3c_checkpoints.py --checkpoint_dir checkpoints && \ |
|
|
CUDA_HOME=$CONDA_PREFIX PYTHONPATH=$(pwd) python scripts/download_lyra_checkpoints.py --checkpoint_dir checkpoints |
|
|
|
|
|
|
|
|
EXPOSE 7860 |
|
|
|
|
|
|
|
|
CMD ["python", "main_gradio.py"] |
|
|
|