diff --git a/.github/workflows/continuous-integration.yml b/.github/workflows/continuous-integration.yml index 2b7cecb328..9ea65817d5 100644 --- a/.github/workflows/continuous-integration.yml +++ b/.github/workflows/continuous-integration.yml @@ -21,8 +21,11 @@ on: jobs: build-linux: runs-on: ubuntu-latest + strategy: + matrix: + container: ["alicevision/alicevision-deps:2024.12.03-ubuntu22.04-cuda12.1.0", "alicevision/alicevision-deps:2024.12.09-rocky9-cuda12.1.0"] container: - image: alicevision/alicevision-deps:2024.11.25-ubuntu22.04-cuda12.1.0 + image: ${{ matrix.container }} env: DEPS_INSTALL_DIR: /opt/AliceVision_install BUILD_TYPE: Release @@ -30,6 +33,7 @@ jobs: ALICEVISION_ROOT: ${{ github.workspace }}/../AV_install ALICEVISION_SENSOR_DB: ${{ github.workspace }}/../AV_install/share/aliceVision/cameraSensors.db ALICEVISION_LENS_PROFILE_INFO: "" + BUILD_CCTAG: "${{ matrix.container == 'alicevision/alicevision-deps:2024.12.03-ubuntu22.04-cuda12.1.0' && 'ON' || 'OFF' }}" steps: - uses: actions/checkout@v1 @@ -53,7 +57,7 @@ jobs: -DALICEVISION_BUILD_SWIG_BINDING=ON \ -DALICEVISION_USE_OPENCV=ON \ -DALICEVISION_USE_CUDA=ON \ - -DALICEVISION_USE_CCTAG=ON \ + -DALICEVISION_USE_CCTAG="${BUILD_CCTAG}" \ -DALICEVISION_USE_POPSIFT=ON \ -DALICEVISION_USE_ALEMBIC=ON \ -DOpenCV_DIR="${DEPS_INSTALL_DIR}/share/OpenCV" \ diff --git a/INSTALL.md b/INSTALL.md index 467a891a48..0b1191e33e 100644 --- a/INSTALL.md +++ b/INSTALL.md @@ -384,33 +384,33 @@ Check the sample in [samples](src/samples/aliceVisionAs3rdParty) for an example ### Docker image -A docker image can be built using the CentOS or Ubuntu Dockerfiles. +A docker image can be built using the Ubuntu or Rocky Linux Dockerfiles. The Dockerfiles are based on `nvidia/cuda` images (https://hub.docker.com/r/nvidia/cuda/) To generate the docker image, just run: ``` -./docker/build-centos.sh +./docker/build-rocky.sh ``` -To do it manually, parameters `OS_TAG` and `CUDA_TAG` should be passed to choose the OS and CUDA version. -For example, the first line of below's commands shows the example to create docker for a CentOS 7 with Cuda 11.3.1 and second line for Ubuntu 16.04 with Cuda 11.0: +To do it manually, parameters `ROCKY_VERSION`/`UBUNTU_VERSION` and `CUDA_TAG` should be passed to choose the OS and CUDA versions. +For example, the first line of the commands below shows the example to create docker for a Rocky 9 with Cuda 12.1.0 and the second line for Ubuntu 16.04 with Cuda 11.0: ``` -docker build --build-arg OS_TAG=7 --build-arg CUDA_TAG=11.3.1 --tag alicevision:centos7-cuda11.3.1 . -docker build --build-arg OS_TAG=16.04 --build-arg CUDA_TAG=11.0 --build-arg NPROC=8 --tag alicevision:ubuntu16.04-cuda11.0 -f Dockerfile_ubuntu . +docker build --build-arg ROCKY_VERSION=9 --build-arg CUDA_TAG=12.1.0 --tag alicevision:rocky9-cuda12.1.0 -f Dockerfile_rocky . +docker build --build-arg UBUNTU_VERSION=22.04 --build-arg CUDA_TAG=12.1.0 --build-arg NPROC=8 --tag alicevision:ubuntu22.04-cuda12.1.0 -f Dockerfile_ubuntu . ``` In order to run the image [nvidia docker](https://github.com/nvidia/nvidia-docker/wiki/Installation-(version-2.0)) is needed. ``` -docker run -it --runtime=nvidia alicevision:centos7-cuda9.2 +docker run -it --runtime=nvidia alicevision:rocky9-cuda12.1.0 ``` To retrieve the generated files: ``` # Create an instance of the image, copy the files and remove the temporary docker instance. -CID=$(docker create alicevision:centos7-cuda11.3.1) && docker cp ${CID}:/opt/AliceVision_install . && docker cp ${CID}:/opt/AliceVision_bundle . && docker rm ${CID} +CID=$(docker create alicevision:rocky9-cuda12.1.0) && docker cp ${CID}:/opt/AliceVision_install . && docker cp ${CID}:/opt/AliceVision_bundle . && docker rm ${CID} ``` Environment variable diff --git a/docker/Dockerfile_centos b/docker/Dockerfile_centos deleted file mode 100644 index 538770aaa5..0000000000 --- a/docker/Dockerfile_centos +++ /dev/null @@ -1,49 +0,0 @@ -ARG AV_DEPS_VERSION -ARG AV_VERSION -ARG CUDA_VERSION -ARG CENTOS_VERSION -FROM alicevision/alicevision-deps:${AV_DEPS_VERSION}-centos${CENTOS_VERSION}-cuda${CUDA_VERSION} -LABEL maintainer="AliceVision Team alicevision-team@googlegroups.com" -ARG TARGET_ARCHITECTURE=core - -# use CUDA_TAG to select the image version to use -# see https://hub.docker.com/r/nvidia/cuda/ -# -# CUDA_TAG=8.0-devel -# docker build --build-arg CUDA_TAG=$CUDA_TAG --tag alicevision:$CUDA_TAG . -# -# then execute with nvidia docker (https://github.com/nvidia/nvidia-docker/wiki/Installation-(version-2.0)) -# docker run -it --runtime=nvidia alicevision - -ENV AV_DEV=/opt/AliceVision_git \ - AV_BUILD=/tmp/AliceVision_build \ - AV_INSTALL=/opt/AliceVision_install \ - AV_BUNDLE=/opt/AliceVision_bundle \ - VERBOSE=1 - -COPY CMakeLists.txt *.md ${AV_DEV}/ -COPY src ${AV_DEV}/src -COPY docker ${AV_DEV}/docker - -WORKDIR "${AV_BUILD}" - -RUN cmake -DCMAKE_BUILD_TYPE=Release \ - -DBUILD_SHARED_LIBS:BOOL=ON \ - -DTARGET_ARCHITECTURE=${TARGET_ARCHITECTURE} \ - -DALICEVISION_BUILD_DEPENDENCIES:BOOL=OFF \ - -DALICEVISION_BUILD_SWIG_BINDING:BOOL=ON \ - -DCMAKE_PREFIX_PATH:PATH="${AV_INSTALL}" \ - -DCMAKE_INSTALL_PREFIX:PATH="${AV_INSTALL}" \ - -DALICEVISION_BUNDLE_PREFIX="${AV_BUNDLE}" \ - -DALICEVISION_USE_ALEMBIC=ON -DMINIGLOG=ON -DALICEVISION_USE_CCTAG=ON -DALICEVISION_USE_APRILTAG=ON -DALICEVISION_USE_OPENCV=ON -DALICEVISION_USE_OPENGV=ON \ - -DALICEVISION_USE_POPSIFT=ON -DALICEVISION_USE_CUDA=ON -DALICEVISION_USE_ONNX_GPU=OFF -DALICEVISION_BUILD_DOC=OFF \ - -DSWIG_DIR="${AV_INSTALL}/share/swig/4.3.0" -DSWIG_EXECUTABLE="${AV_INSTALL}/bin-deps/swig" \ - "${AV_DEV}" || (cat "${AV_BUILD}/CMakeFiles/CMakeOutput.log" "${AV_BUILD}/CMakeFiles/CMakeError.log" && false) - -RUN export CPU_CORES=`${AV_DEV}/docker/check-cpu.sh`; \ - make -j"${CPU_CORES}" && \ - make install && \ - make bundle && \ - rm -rf "${AV_DEV}" "${AV_BUILD}" && \ - echo "export ALICEVISION_SENSOR_DB=${AV_BUNDLE}/share/aliceVision/cameraSensors.db" >> /etc/profile.d/alicevision.sh && \ - echo "export ALICEVISION_ROOT=${AV_BUNDLE}" >> /etc/profile.d/alicevision.sh diff --git a/docker/Dockerfile_centos_deps b/docker/Dockerfile_centos_deps deleted file mode 100644 index 817f947117..0000000000 --- a/docker/Dockerfile_centos_deps +++ /dev/null @@ -1,149 +0,0 @@ -ARG CUDA_VERSION -ARG CENTOS_VERSION -FROM nvidia/cuda:${CUDA_VERSION}-devel-centos${CENTOS_VERSION} -LABEL maintainer="AliceVision Team alicevision-team@googlegroups.com" - -# use CUDA_VERSION to select the image version to use -# see https://hub.docker.com/r/nvidia/cuda/ -# -# AV_VERSION=2.2.8.develop -# CUDA_VERSION=10.2 -# CENTOS_VERSION=20.04 -# docker build \ -# --build-arg CUDA_VERSION=${CUDA_VERSION} \ -# --build-arg CENTOS_VERSION=${CENTOS_VERSION} \ -# --tag alicevision/alicevision-deps:${AV_VERSION}-centos${CENTOS_VERSION}-cuda${CUDA_TAG} \ -# -f Dockerfile_deps . - -# OS/Version (FILE): cat /etc/issue.net -# Cuda version (ENV): $CUDA_VERSION - -ENV AV_DEV=/opt/AliceVisionDeps_git \ - AV_BUILD=/tmp/AliceVisionDeps_build \ - AV_INSTALL=/opt/AliceVision_install \ - VERBOSE=1 \ - YUM_INSTALL='yum install -y --setopt=tsflags=nodocs' - -# -# Install all compilation tools -# - file and openssl are needed for cmake -# -# Workaround to give access to deprecated devtoolset-6 is taken from ASWF container: -# https://github.com/AcademySoftwareFoundation/aswf-docker/blob/master/scripts/common/install_yumpackages.sh#L119 -# -# Install packages one by one with yum to ensure that it creates an error if a package is missing. -# -RUN $YUM_INSTALL centos-release-scl-rh && \ - $YUM_INSTALL yum-utils && \ - $YUM_INSTALL python3 && \ - $YUM_INSTALL python3-devel && \ - yum clean all && \ - $YUM_INSTALL devtoolset-10-toolchain devtoolset-10-libatomic-devel \ - devtoolset-10-libasan-devel devtoolset-10-libubsan-devel --nogpgcheck && \ - $YUM_INSTALL --enablerepo=extras epel-release && \ - $YUM_INSTALL file \ - git \ - wget \ - unzip \ - yasm \ - pkgconfig \ - libtool \ - nasm \ - automake \ - which \ - openssl-devel \ - pcre2-devel \ - xerces-c-devel - -# Install numpy for Python3 -RUN pip3 install numpy - -# Manually install Bison, as Centos7 does not natively support versions beyond 3.0.4 -RUN wget http://ftp.gnu.org/gnu/bison/bison-3.6.tar.gz -RUN tar -zxvf bison-3.6.tar.gz -RUN cd bison-3.6 && ./configure && make && make install - - -# Okay, change our shell to specifically use our software collections. -# (default was SHELL [ "/bin/sh", "-c" ]) -# https://docs.docker.com/engine/reference/builder/#shell -# -# See also `scl` man page for enabling multiple packages if desired: -# https://linux.die.net/man/1/scl -# SHELL [ "/usr/bin/scl", "enable", "devtoolset-10" ] -ENV PATH="/opt/rh/devtoolset-10/root/usr/bin:${PATH}" \ - LD_LIBRARY_PATH="/opt/rh/devtoolset-10/root/usr/lib:/opt/rh/devtoolset-10/root/usr/lib64:/opt/rh/devtoolset-10/root/usr/lib64/dyninst:${LD_LIBRARY_PATH}" \ - MAKE=/opt/rh/devtoolset-10/root/usr/bin/make \ - CMAKE_VERSION=3.26.0 - -COPY dl/vlfeat_K80L3.SIFT.tree ${AV_INSTALL}/share/aliceVision/ -RUN echo "export ALICEVISION_VOCTREE=${AV_INSTALL}/share/aliceVision/vlfeat_K80L3.SIFT.tree" > /etc/profile.d/alicevision.sh - -COPY dl/sphereDetection_Mask-RCNN.onnx ${AV_INSTALL}/share/aliceVision/ -RUN echo "export ALICEVISION_SPHERE_DETECTION_MODEL=${AV_INSTALL}/share/aliceVision/sphereDetection_Mask-RCNN.onnx" > /etc/profile.d/alicevision.sh - -COPY dl/fcn_resnet50.onnx ${AV_INSTALL}/share/aliceVision/ -RUN echo "export ALICEVISION_SEMANTIC_SEGMENTATION_MODEL=${AV_INSTALL}/share/aliceVision/fcn_resnet50.onnx" > /etc/profile.d/alicevision.sh - -COPY docker/check-cpu.sh ${AV_DEV}/docker/check-cpu.sh -RUN export CPU_CORES=`${AV_DEV}/docker/check-cpu.sh` && echo "Build multithreading number of cores: ${CPU_CORES}" - -# Manually install cmake -WORKDIR /opt -COPY dl/cmake-${CMAKE_VERSION}.tar.gz /tmp -RUN tar zxf /tmp/cmake-${CMAKE_VERSION}.tar.gz && \ - rm /tmp/cmake-${CMAKE_VERSION}.tar.gz && \ - cd cmake-${CMAKE_VERSION} && \ - ./bootstrap --parallel=${CPU_CORES} --prefix=/usr/local -- -DCMAKE_BUILD_TYPE:STRING=Release -DCMAKE_USE_OPENSSL:BOOL=ON && \ - make -j ${CPU_CORES} && \ - make install - -COPY CMakeLists.txt ${AV_DEV}/ -COPY src/cmake/Dependencies.cmake ${AV_DEV}/src/cmake/ - -COPY dl/deps ${AV_BUILD}/external/download/ - -WORKDIR "${AV_BUILD}" -RUN cmake "${AV_DEV}" \ - -DCMAKE_BUILD_TYPE=Release \ - -DALICEVISION_BUILD_DEPENDENCIES:BOOL=ON \ - -DAV_BUILD_ALICEVISION:BOOL=OFF \ - -DCMAKE_INSTALL_PREFIX="${AV_INSTALL}" - -# Symlink lib64 to lib as qtOIIO expects to find OIIO in lib64 -RUN mkdir -p "${AV_INSTALL}/lib" && \ - ln -s lib "${AV_INSTALL}/lib64" - -RUN test -e /usr/local/cuda/lib64/libcublas.so || ln -s /usr/lib64/libcublas.so /usr/local/cuda/lib64/libcublas.so - -# RUN make -j ${CPU_CORES} onnxruntime -# RUN make -j ${CPU_CORES} pcl -# RUN make -j ${CPU_CORES} turbojpeg -# RUN make -j ${CPU_CORES} boost -# RUN make -j ${CPU_CORES} openexr -# RUN make -j ${CPU_CORES} tbb -# RUN make -j ${CPU_CORES} assimp -# RUN make -j ${CPU_CORES} geogram -# RUN make -j ${CPU_CORES} eigen -# RUN make -j ${CPU_CORES} opengv -# RUN make -j ${CPU_CORES} lapack -# RUN make -j ${CPU_CORES} suitesparse -# RUN make -j ${CPU_CORES} ceres -# RUN make -j ${CPU_CORES} tiff -# RUN make -j ${CPU_CORES} png -# RUN make -j ${CPU_CORES} libraw -# RUN make -j ${CPU_CORES} boost -# RUN make -j ${CPU_CORES} openimageio -# RUN make -j ${CPU_CORES} alembic -# RUN make -j ${CPU_CORES} ffmpeg -# RUN make -j ${CPU_CORES} opencv -# RUN make -j ${CPU_CORES} expat -# RUN make -j ${CPU_CORES} SWIG -# RUN make -j ${CPU_CORES} E57Format - -# RUN make -j ${CPU_CORES} cctag -# RUN make -j ${CPU_CORES} popsift - -RUN cmake --build . -j ${CPU_CORES} && \ - mv "${AV_INSTALL}/bin" "${AV_INSTALL}/bin-deps" && \ - rm -rf "${AV_BUILD}" diff --git a/docker/Dockerfile_rocky b/docker/Dockerfile_rocky new file mode 100644 index 0000000000..39023159a0 --- /dev/null +++ b/docker/Dockerfile_rocky @@ -0,0 +1,71 @@ +ARG AV_DEPS_VERSION +ARG AV_VERSION +ARG CUDA_VERSION +ARG ROCKY_VERSION +FROM alicevision/alicevision-deps:${AV_DEPS_VERSION}-rocky${ROCKY_VERSION}-cuda${CUDA_VERSION} +LABEL maintainer="AliceVision Team alicevision-team@googlegroups.com" +ARG TARGET_ARCHITECTURE=core + +# use CUDA_VERSION to select the image version to use +# see https://hub.docker.com/r/nvidia/cuda/ +# +# AV_VERSION=2.2.8 +# CUDA_VERSION=11.0 +# ROCKY_VERSION=9 +# docker build \ +# --build-arg CUDA_VERSION=${CUDA_VERSION} \ +# --build-arg ROCKY_VERSION${ROCKY_VERSION} \ +# --build-arg AV_VERSION=2.2.8.develop \ +# --tag alicevision/alicevision:${AV_VERSION}-rocky${ROCKY_VERSION}-cuda${CUDA_VERSION} \ +# -f Dockerfile_rocky . +# +# then execute with nvidia docker (https://github.com/nvidia/nvidia-docker/wiki/Installation-(version-2.0)) +# docker run -it --runtime=nvidia alicevision/alicevision:{AV_VERSION}-rocky${ROCKY_VERSION}-cuda${CUDA_VERSION} + + +# OS/Version (FILE): cat /etc/issue.net +# Cuda version (ENV): $CUDA_VERSION + +ENV AV_DEV=/opt/AliceVision_git \ + AV_BUILD=/tmp/AliceVision_build \ + AV_INSTALL=/opt/AliceVision_install \ + AV_BUNDLE=/opt/AliceVision_bundle \ + PATH="${PATH}:${AV_BUNDLE}" \ + VERBOSE=1 + +COPY CMakeLists.txt *.md ${AV_DEV}/ +COPY src ${AV_DEV}/src + +WORKDIR "${AV_BUILD}" + +COPY docker ${AV_DEV}/docker + +RUN export CPU_CORES=`${AV_DEV}/docker/check-cpu.sh` + +RUN cmake -DCMAKE_BUILD_TYPE=Release \ + -DBUILD_SHARED_LIBS:BOOL=ON \ + -DTARGET_ARCHITECTURE=${TARGET_ARCHITECTURE} \ + -DALICEVISION_BUILD_DEPENDENCIES:BOOL=OFF \ + -DCMAKE_PREFIX_PATH:PATH="${AV_INSTALL}" \ + -DCMAKE_INSTALL_PREFIX:PATH="${AV_INSTALL}" \ + -DALICEVISION_BUNDLE_PREFIX="${AV_BUNDLE}" \ + -DALICEVISION_USE_ALEMBIC:BOOL=ON \ + -DMINIGLOG:BOOL=ON \ + -DALICEVISION_USE_CCTAG:BOOL=OFF \ + -DALICEVISION_USE_OPENCV:BOOL=ON \ + -DALICEVISION_USE_OPENGV:BOOL=ON \ + -DALICEVISION_USE_POPSIFT:BOOL=ON \ + -DALICEVISION_USE_CUDA:BOOL=ON \ + -DALICEVISION_USE_ONNX_GPU:BOOL=OFF \ + -DALICEVISION_BUILD_DOC:BOOL=OFF \ + -DALICEVISION_BUILD_SWIG_BINDING:BOOL=ON \ + -DSWIG_DIR:PATH="${AV_INSTALL}/share/swig/4.3.0" -DSWIG_EXECUTABLE:PATH="${AV_INSTALL}/bin-deps/swig" \ + "${AV_DEV}" + +RUN make install -j${CPU_CORES} + +RUN make bundle + +RUN rm -rf "${AV_BUILD}" "${AV_DEV}" && \ + echo "export ALICEVISION_SENSOR_DB=${AV_BUNDLE}/share/aliceVision/cameraSensors.db" >> /etc/profile.d/alicevision.sh && \ + echo "export ALICEVISION_ROOT=${AV_BUNDLE}" >> /etc/profile.d/alicevision.sh diff --git a/docker/Dockerfile_rocky_deps b/docker/Dockerfile_rocky_deps new file mode 100644 index 0000000000..7e19553887 --- /dev/null +++ b/docker/Dockerfile_rocky_deps @@ -0,0 +1,108 @@ +ARG CUDA_VERSION +ARG ROCKY_VERSION +FROM nvidia/cuda:${CUDA_VERSION}-devel-rockylinux${ROCKY_VERSION} +LABEL maintainer="AliceVision Team alicevision-team@googlegroups.com" + +# use CUDA_VERSION to select the image version to use +# see https://hub.docker.com/r/nvidia/cuda/ +# +# AV_VERSION=2.2.8.develop +# CUDA_VERSION=11.0 +# ROCKY_VERSION=20.04 +# docker build \ +# --build-arg CUDA_VERSION=${CUDA_VERSION} \ +# --build-arg ROCKY_VERSION${ROCKY_VERSION} \ +# --tag alicevision/alicevision-deps:${AV_VERSION}-rocky${ROCKY_VERSION}-cuda${CUDA_TAG} \ +# -f Dockerfile_rocky_deps . + +# OS/Version (FILE): cat /etc/issue.net +# Cuda version (ENV): $CUDA_VERSION + +RUN dnf install -y 'dnf-command(config-manager)' +RUN dnf config-manager --set-enabled devel +RUN dnf install -y epel-release +# RUN dnf install -y gcc-toolset-13-gcc-c++ +# RUN dnf remove -y gcc-g++ gcc +# RUN update-alternatives --install /usr/bin/gcc gcc /opt/rh/gcc-toolset-13/root/usr/bin/gcc 60 +# RUN update-alternatives --install /usr/bin/g++ g++ /opt/rh/gcc-toolset-13/root/usr/bin/g++ 60 +# RUN update-alternatives --install /usr/bin/cpp cpp /opt/rh/gcc-toolset-13/root/usr/bin/cpp 60 +RUN dnf install -y ca-certificates wget +RUN dnf install -y cmake git unzip +RUN dnf install -y python3-devel python3-pip +RUN dnf install -y pcre2-devel +RUN dnf install -y xerces-c-devel +RUN dnf install -y bison +RUN dnf install -y diffutils which file +RUN dnf install -y yasm +RUN dnf install -y gfortran libasan libubsan +RUN dnf update -y +# RUN scl enable gcc-toolset-13 bash + +RUN python3 -m pip install numpy + +ENV AV_DEV=/opt/AliceVision_git \ + AV_BUILD=/tmp/AliceVision_build \ + AV_INSTALL=/opt/AliceVision_install \ + PATH="${PATH}:${AV_BUNDLE}" + +COPY dl/vlfeat_K80L3.SIFT.tree ${AV_INSTALL}/share/aliceVision/ +RUN echo "export ALICEVISION_VOCTREE=${AV_INSTALL}/share/aliceVision/vlfeat_K80L3.SIFT.tree" > /etc/profile.d/alicevision.sh + +COPY dl/sphereDetection_Mask-RCNN.onnx ${AV_INSTALL}/share/aliceVision/ +RUN echo "export ALICEVISION_SPHERE_DETECTION_MODEL=${AV_INSTALL}/share/aliceVision/sphereDetection_Mask-RCNN.onnx" > /etc/profile.d/alicevision.sh + +COPY dl/fcn_resnet50.onnx ${AV_INSTALL}/share/aliceVision/ +RUN echo "export ALICEVISION_SEMANTIC_SEGMENTATION_MODEL=${AV_INSTALL}/share/aliceVision/fcn_resnet50.onnx" > /etc/profile.d/alicevision.sh + +COPY docker/check-cpu.sh ${AV_DEV}/docker/check-cpu.sh +RUN export CPU_CORES=`${AV_DEV}/docker/check-cpu.sh` && echo "Build multithreading number of cores: ${CPU_CORES}" + +COPY CMakeLists.txt ${AV_DEV}/ +COPY src/cmake/Dependencies.cmake ${AV_DEV}/src/cmake/ + +COPY dl/deps ${AV_BUILD}/external/download/ + +WORKDIR "${AV_BUILD}" +RUN cmake "${AV_DEV}" \ + -DCMAKE_BUILD_TYPE=Release \ + -DALICEVISION_BUILD_DEPENDENCIES:BOOL=ON \ + -DAV_BUILD_ZLIB:BOOL=ON \ + -DAV_BUILD_ALICEVISION:BOOL=OFF \ + -DAV_BUILD_CCTAG:BOOL=OFF \ + -DCMAKE_INSTALL_PREFIX="${AV_INSTALL}" + +# Symlink lib64 to lib as qtOIIO expects to find OIIO in lib64 +RUN mkdir -p "${AV_INSTALL}/lib" && \ + ln -s lib "${AV_INSTALL}/lib64" + +RUN test -e /usr/local/cuda/lib64/libcublas.so || ln -s /usr/lib/x86_64-linux-gnu/libcublas.so /usr/local/cuda/lib64/libcublas.so + +# RUN make -j ${CPU_CORES} onnxruntime +# RUN make -j ${CPU_CORES} turbojpeg +# RUN make -j ${CPU_CORES} boost +# RUN make -j ${CPU_CORES} openexr +# RUN make -j ${CPU_CORES} tbb +# RUN make -j ${CPU_CORES} assimp +# RUN make -j ${CPU_CORES} geogram +# RUN make -j ${CPU_CORES} eigen +# RUN make -j ${CPU_CORES} opengv +# RUN make -j ${CPU_CORES} lapack +# RUN make -j ${CPU_CORES} suitesparse +# RUN make -j ${CPU_CORES} ceres +# RUN make -j ${CPU_CORES} tiff +# RUN make -j ${CPU_CORES} png +# RUN make -j ${CPU_CORES} libraw +# RUN make -j ${CPU_CORES} openimageio +# RUN make -j ${CPU_CORES} alembic +# RUN make -j ${CPU_CORES} ffmpeg +# RUN make -j ${CPU_CORES} opencv +# RUN make -j ${CPU_CORES} expat +# RUN make -j ${CPU_CORES} SWIG +# RUN make -j ${CPU_CORES} E57Format + +# RUN make -j ${CPU_CORES} popsift +# RUN make -j ${CPU_CORES} cctag + +RUN cmake --build . -j ${CPU_CORES} && \ + mv "${AV_INSTALL}/bin" "${AV_INSTALL}/bin-deps" && \ + rm -rf "${AV_BUILD}" diff --git a/docker/build-all.sh b/docker/build-all.sh index 276eb1f420..0fd2467f1f 100755 --- a/docker/build-all.sh +++ b/docker/build-all.sh @@ -9,10 +9,5 @@ test -e docker/fetch.sh || { exit 1 } -CUDA_VERSION=11.0 UBUNTU_VERSION=20.04 docker/build-ubuntu.sh -CUDA_VERSION=11.0 UBUNTU_VERSION=18.04 docker/build-ubuntu.sh -CUDA_VERSION=10.2 UBUNTU_VERSION=18.04 docker/build-ubuntu.sh -CUDA_VERSION=9.2 UBUNTU_VERSION=18.04 docker/build-ubuntu.sh - -CUDA_VERSION=10.2 CENTOS_VERSION=7 docker/build-centos.sh -CUDA_VERSION=9.2 CENTOS_VERSION=7 docker/build-centos.sh +CUDA_VERSION=12.1.0 UBUNTU_VERSION=22.04 docker/build-ubuntu.sh +CUDA_VERSION=12.1.0 ROCKY_VERSION=9 docker/build-rocky.sh diff --git a/docker/build-centos.sh b/docker/build-centos.sh deleted file mode 100755 index f9fe4b25bb..0000000000 --- a/docker/build-centos.sh +++ /dev/null @@ -1,67 +0,0 @@ -#!/bin/bash - -set -ex - -test -e docker/fetch.sh || { - echo This script must be run from the top level of the AliceVision tree - exit 1 -} - -test -z "$AV_DEPS_VERSION" && AV_DEPS_VERSION=2024.10.22 -test -z "$AV_VERSION" && AV_VERSION="$(git rev-parse --abbrev-ref HEAD)-$(git rev-parse --short HEAD)" -test -z "$CUDA_VERSION" && CUDA_VERSION=11.3.1 -test -z "$CENTOS_VERSION" && CENTOS_VERSION=7 -test -z "$REPO_OWNER" && REPO_OWNER=alicevision -test -z "$DOCKER_REGISTRY" && DOCKER_REGISTRY=docker.io - - -echo "AV_VERSION: $AV_VERSION" -echo "AV_DEPS_VERSION: $AV_DEPS_VERSION" -echo "CUDA_VERSION: $CUDA_VERSION" -echo "CENTOS_VERSION: $CENTOS_VERSION" - -echo "--== FETCH DEPENDENCIES ==--" - -docker/fetch.sh - -DEPS_DOCKER_TAG=${REPO_OWNER}/alicevision-deps:${AV_DEPS_VERSION}-centos${CENTOS_VERSION}-cuda${CUDA_VERSION} - -echo "--== BUILD DEPENDENCIES ==--" - -## DEPENDENCIES -docker build \ - --progress plain \ - --rm \ - --build-arg CUDA_VERSION=${CUDA_VERSION} \ - --build-arg CENTOS_VERSION=${CENTOS_VERSION} \ - --tag ${DEPS_DOCKER_TAG} \ - -f docker/Dockerfile_centos_deps . - -echo "" -echo " To upload results:" -echo "docker push ${DEPS_DOCKER_TAG}" -echo "" - - -DOCKER_TAG=${REPO_OWNER}/alicevision:${AV_VERSION}-centos${CENTOS_VERSION}-cuda${CUDA_VERSION} - -echo "--== BUILD ALICEVISION ==--" - -## ALICEVISION -docker build \ - --progress plain \ - --rm \ - --build-arg CUDA_VERSION=${CUDA_VERSION} \ - --build-arg CENTOS_VERSION=${CENTOS_VERSION} \ - --build-arg AV_DEPS_VERSION=${AV_DEPS_VERSION} \ - --build-arg AV_VERSION=${AV_VERSION} \ - --tag ${DOCKER_TAG} \ - -f docker/Dockerfile_centos . - -echo "" -echo " To upload results:" -echo "" -echo "docker push ${DEPS_DOCKER_TAG}" -echo "docker push ${DOCKER_TAG}" -echo "" - diff --git a/docker/build-rocky.sh b/docker/build-rocky.sh new file mode 100755 index 0000000000..55cd9d8e7a --- /dev/null +++ b/docker/build-rocky.sh @@ -0,0 +1,62 @@ +#!/bin/bash +set -e + +test -e docker/fetch.sh || { + echo This script must be run from the top level of the AliceVision tree + exit 1 +} + +test -z "$AV_DEPS_VERSION" && AV_DEPS_VERSION=2024.12.09 +test -z "$AV_VERSION" && AV_VERSION="$(git rev-parse --abbrev-ref HEAD)-$(git rev-parse --short HEAD)" +test -z "$CUDA_VERSION" && CUDA_VERSION=12.1.0 +test -z "$ROCKY_VERSION" && ROCKY_VERSION=9 +test -z "$REPO_OWNER" && REPO_OWNER=alicevision +test -z "$DOCKER_REGISTRY" && DOCKER_REGISTRY=docker.io + +echo "AV_VERSION: $AV_VERSION" +echo "AV_DEPS_VERSION: $AV_DEPS_VERSION" +echo "CUDA_VERSION: $CUDA_VERSION" +echo "ROCKY_VERSION: $ROCKY_VERSION" + +echo "--== FETCH DEPENDENCIES ==--" + +./docker/fetch.sh + +DEPS_DOCKER_TAG=${REPO_OWNER}/alicevision-deps:${AV_DEPS_VERSION}-rocky${ROCKY_VERSION}-cuda${CUDA_VERSION} + +echo "--== BUILD DEPENDENCIES ==--" + +## DEPENDENCIES +docker build \ + --rm \ + --build-arg CUDA_VERSION=${CUDA_VERSION} \ + --build-arg ROCKY_VERSION=${ROCKY_VERSION} \ + --tag ${DEPS_DOCKER_TAG} \ + -f docker/Dockerfile_rocky_deps . + +echo "" +echo " To upload results:" +echo "docker push ${DEPS_DOCKER_TAG}" +echo "" + + +DOCKER_TAG=${REPO_OWNER}/alicevision:${AV_VERSION}-rocky${ROCKY_VERSION}-cuda${CUDA_VERSION} + +echo "--== BUILD ALICEVISION ==--" + +## ALICEVISION +docker build \ + --rm \ + --build-arg CUDA_VERSION=${CUDA_VERSION} \ + --build-arg ROCKY_VERSION=${ROCKY_VERSION} \ + --build-arg AV_DEPS_VERSION=${AV_DEPS_VERSION} \ + --build-arg AV_VERSION=${AV_VERSION} \ + --tag ${DOCKER_TAG} \ + -f docker/Dockerfile_rocky . + +echo "" +echo " To upload results:" +echo "" +echo "docker push ${DEPS_DOCKER_TAG}" +echo "docker push ${DOCKER_TAG}" +echo "" diff --git a/docker/build-ubuntu.sh b/docker/build-ubuntu.sh index c4b797e33e..63721a53eb 100755 --- a/docker/build-ubuntu.sh +++ b/docker/build-ubuntu.sh @@ -6,7 +6,7 @@ test -e docker/fetch.sh || { exit 1 } -test -z "$AV_DEPS_VERSION" && AV_DEPS_VERSION=2024.10.22 +test -z "$AV_DEPS_VERSION" && AV_DEPS_VERSION=2024.12.03 test -z "$AV_VERSION" && AV_VERSION="$(git rev-parse --abbrev-ref HEAD)-$(git rev-parse --short HEAD)" test -z "$CUDA_VERSION" && CUDA_VERSION=12.1.0 test -z "$UBUNTU_VERSION" && UBUNTU_VERSION=22.04 diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt index a7423b71dd..fa16428c58 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt @@ -791,7 +791,7 @@ set(ALICEVISION_HAVE_CCTAG 0) if(ALICEVISION_BUILD_SFM) if(NOT ALICEVISION_USE_CCTAG STREQUAL "OFF") if(ALICEVISION_HAVE_OPENCV) - find_package(CCTag 1.0.0 CONFIG) + find_package(CCTag 1.0.4 CONFIG) if(CCTag_FOUND) set(ALICEVISION_HAVE_CCTAG 1)