Skip to content

Commit

Permalink
[docker] Add Dockerfiles for Rocky9 build
Browse files Browse the repository at this point in the history
CCTag build has been disabled, as well as the SWIG binding.
  • Loading branch information
cbentejac committed Dec 10, 2024
1 parent 868e745 commit 273b998
Show file tree
Hide file tree
Showing 2 changed files with 182 additions and 0 deletions.
73 changes: 73 additions & 0 deletions docker/Dockerfile_rocky
Original file line number Diff line number Diff line change
@@ -0,0 +1,73 @@
ARG AV_DEPS_VERSION
ARG AV_VERSION
ARG CUDA_VERSION
ARG ROCKY_VERSION
FROM alicevision/alicevision-deps:${AV_DEPS_VERSION}-rocky${ROCKY_VERSION}-cuda${CUDA_VERSION}
LABEL maintainer="AliceVision Team alicevision-team@googlegroups.com"
ARG TARGET_ARCHITECTURE=core

# use CUDA_VERSION to select the image version to use
# see https://hub.docker.com/r/nvidia/cuda/
#
# AV_VERSION=2.2.8
# CUDA_VERSION=11.0
# ROCKY_VERSION=9
# docker build \
# --build-arg CUDA_VERSION=${CUDA_VERSION} \
# --build-arg ROCKY_VERSION${ROCKY_VERSION} \
# --build-arg AV_VERSION=2.2.8.develop \
# --tag alicevision/alicevision:${AV_VERSION}-rocky${ROCKY_VERSION}-cuda${CUDA_VERSION} \
# -f Dockerfile_rocky .
#
# then execute with nvidia docker (https://github.com/nvidia/nvidia-docker/wiki/Installation-(version-2.0))
# docker run -it --runtime=nvidia alicevision/alicevision:{AV_VERSION}-rocky${ROCKY_VERSION}-cuda${CUDA_VERSION}


# OS/Version (FILE): cat /etc/issue.net
# Cuda version (ENV): $CUDA_VERSION

ENV AV_DEV=/opt/AliceVision_git \
AV_BUILD=/tmp/AliceVision_build \
AV_INSTALL=/opt/AliceVision_install \
AV_BUNDLE=/opt/AliceVision_bundle \
PATH="${PATH}:${AV_BUNDLE}" \
VERBOSE=1

COPY CMakeLists.txt *.md ${AV_DEV}/
COPY src ${AV_DEV}/src

WORKDIR "${AV_BUILD}"

COPY docker ${AV_DEV}/docker

RUN export CPU_CORES=`${AV_DEV}/docker/check-cpu.sh`

# RUN echo `python3 --version` && echo `which python3` && ech ""

RUN cmake -DCMAKE_BUILD_TYPE=Release \
-DBUILD_SHARED_LIBS:BOOL=ON \
-DTARGET_ARCHITECTURE=${TARGET_ARCHITECTURE} \
-DALICEVISION_BUILD_DEPENDENCIES:BOOL=OFF \
-DCMAKE_PREFIX_PATH:PATH="${AV_INSTALL}" \
-DCMAKE_INSTALL_PREFIX:PATH="${AV_INSTALL}" \
-DALICEVISION_BUNDLE_PREFIX="${AV_BUNDLE}" \
-DALICEVISION_USE_ALEMBIC:BOOL=ON \
-DMINIGLOG:BOOL=ON \
-DALICEVISION_USE_CCTAG:BOOL=OFF \
-DALICEVISION_USE_OPENCV:BOOL=ON \
-DALICEVISION_USE_OPENGV:BOOL=ON \
-DALICEVISION_USE_POPSIFT:BOOL=ON \
-DALICEVISION_USE_CUDA:BOOL=ON \
-DALICEVISION_USE_ONNX_GPU:BOOL=OFF \
-DALICEVISION_BUILD_DOC:BOOL=OFF \
-DALICEVISION_BUILD_SWIG_BINDING:BOOL=OFF \
-DSWIG_DIR:PATH="${AV_INSTALL}/share/swig/4.3.0" -DSWIG_EXECUTABLE:PATH="${AV_INSTALL}/bin-deps/swig" \
"${AV_DEV}"

RUN make install -j${CPU_CORES}

RUN make bundle

RUN rm -rf "${AV_BUILD}" "${AV_DEV}" && \
echo "export ALICEVISION_SENSOR_DB=${AV_BUNDLE}/share/aliceVision/cameraSensors.db" >> /etc/profile.d/alicevision.sh && \
echo "export ALICEVISION_ROOT=${AV_BUNDLE}" >> /etc/profile.d/alicevision.sh
109 changes: 109 additions & 0 deletions docker/Dockerfile_rocky_deps
Original file line number Diff line number Diff line change
@@ -0,0 +1,109 @@
ARG CUDA_VERSION
ARG ROCKY_VERSION
FROM nvidia/cuda:${CUDA_VERSION}-devel-rockylinux${ROCKY_VERSION}
LABEL maintainer="AliceVision Team alicevision-team@googlegroups.com"

# use CUDA_VERSION to select the image version to use
# see https://hub.docker.com/r/nvidia/cuda/
#
# AV_VERSION=2.2.8.develop
# CUDA_VERSION=11.0
# ROCKY_VERSION=20.04
# docker build \
# --build-arg CUDA_VERSION=${CUDA_VERSION} \
# --build-arg ROCKY_VERSION${ROCKY_VERSION} \
# --tag alicevision/alicevision-deps:${AV_VERSION}-rocky${ROCKY_VERSION}-cuda${CUDA_TAG} \
# -f Dockerfile_rocky_deps .

# OS/Version (FILE): cat /etc/issue.net
# Cuda version (ENV): $CUDA_VERSION

RUN dnf groupinstall -y "Development Tools"
RUN dnf install -y 'dnf-command(config-manager)'
RUN dnf config-manager --set-enabled devel
RUN dnf install -y epel-release
# RUN dnf install -y gcc-toolset-13-gcc-c++
# RUN dnf remove -y gcc-g++ gcc
# RUN update-alternatives --install /usr/bin/gcc gcc /opt/rh/gcc-toolset-13/root/usr/bin/gcc 60
# RUN update-alternatives --install /usr/bin/g++ g++ /opt/rh/gcc-toolset-13/root/usr/bin/g++ 60
# RUN update-alternatives --install /usr/bin/cpp cpp /opt/rh/gcc-toolset-13/root/usr/bin/cpp 60
RUN dnf install -y ca-certificates wget
RUN dnf install -y cmake git unzip
RUN dnf install -y python3-pip
RUN dnf install -y pcre2-devel
RUN dnf install -y xerces-c-devel
RUN dnf install -y bison
RUN dnf install -y diffutils which file
RUN dnf install -y yasm
RUN dnf install -y gfortran libasan libubsan
RUN dnf update -y
# RUN scl enable gcc-toolset-13 bash

RUN python3 -m pip install numpy

ENV AV_DEV=/opt/AliceVision_git \
AV_BUILD=/tmp/AliceVision_build \
AV_INSTALL=/opt/AliceVision_install \
PATH="${PATH}:${AV_BUNDLE}"

COPY dl/vlfeat_K80L3.SIFT.tree ${AV_INSTALL}/share/aliceVision/
RUN echo "export ALICEVISION_VOCTREE=${AV_INSTALL}/share/aliceVision/vlfeat_K80L3.SIFT.tree" > /etc/profile.d/alicevision.sh

COPY dl/sphereDetection_Mask-RCNN.onnx ${AV_INSTALL}/share/aliceVision/
RUN echo "export ALICEVISION_SPHERE_DETECTION_MODEL=${AV_INSTALL}/share/aliceVision/sphereDetection_Mask-RCNN.onnx" > /etc/profile.d/alicevision.sh

COPY dl/fcn_resnet50.onnx ${AV_INSTALL}/share/aliceVision/
RUN echo "export ALICEVISION_SEMANTIC_SEGMENTATION_MODEL=${AV_INSTALL}/share/aliceVision/fcn_resnet50.onnx" > /etc/profile.d/alicevision.sh

COPY docker/check-cpu.sh ${AV_DEV}/docker/check-cpu.sh
RUN export CPU_CORES=`${AV_DEV}/docker/check-cpu.sh` && echo "Build multithreading number of cores: ${CPU_CORES}"

COPY CMakeLists.txt ${AV_DEV}/
COPY src/cmake/Dependencies.cmake ${AV_DEV}/src/cmake/

COPY dl/deps ${AV_BUILD}/external/download/

WORKDIR "${AV_BUILD}"
RUN cmake "${AV_DEV}" \
-DCMAKE_BUILD_TYPE=Release \
-DALICEVISION_BUILD_DEPENDENCIES:BOOL=ON \
-DAV_BUILD_ZLIB:BOOL=ON \
-DAV_BUILD_ALICEVISION:BOOL=OFF \
-DAV_BUILD_CCTAG:BOOL=OFF \
-DCMAKE_INSTALL_PREFIX="${AV_INSTALL}"

# Symlink lib64 to lib as qtOIIO expects to find OIIO in lib64
RUN mkdir -p "${AV_INSTALL}/lib" && \
ln -s lib "${AV_INSTALL}/lib64"

RUN test -e /usr/local/cuda/lib64/libcublas.so || ln -s /usr/lib/x86_64-linux-gnu/libcublas.so /usr/local/cuda/lib64/libcublas.so

# RUN make -j ${CPU_CORES} onnxruntime
# RUN make -j ${CPU_CORES} turbojpeg
# RUN make -j ${CPU_CORES} boost
# RUN make -j ${CPU_CORES} openexr
# RUN make -j ${CPU_CORES} tbb
# RUN make -j ${CPU_CORES} assimp
# RUN make -j ${CPU_CORES} geogram
# RUN make -j ${CPU_CORES} eigen
# RUN make -j ${CPU_CORES} opengv
# RUN make -j ${CPU_CORES} lapack
# RUN make -j ${CPU_CORES} suitesparse
# RUN make -j ${CPU_CORES} ceres
# RUN make -j ${CPU_CORES} tiff
# RUN make -j ${CPU_CORES} png
# RUN make -j ${CPU_CORES} libraw
# RUN make -j ${CPU_CORES} openimageio
# RUN make -j ${CPU_CORES} alembic
# RUN make -j ${CPU_CORES} ffmpeg
# RUN make -j ${CPU_CORES} opencv
# RUN make -j ${CPU_CORES} expat
# RUN make -j ${CPU_CORES} SWIG
# RUN make -j ${CPU_CORES} E57Format

# RUN make -j ${CPU_CORES} popsift
# RUN make -j ${CPU_CORES} cctag

RUN cmake --build . -j ${CPU_CORES} && \
mv "${AV_INSTALL}/bin" "${AV_INSTALL}/bin-deps" && \
rm -rf "${AV_BUILD}"

0 comments on commit 273b998

Please sign in to comment.