From 63bd275d56f8688a3df88c9865cddade30bb436b Mon Sep 17 00:00:00 2001 From: Yumeng Chen Date: Thu, 25 Jan 2024 16:35:56 +0000 Subject: [PATCH] allow for conda package distribution --- README.md | 37 +++++--- conda.recipe/bld.bat | 3 + conda.recipe/build.sh | 1 + conda.recipe/conda_build_config.yaml | 7 ++ conda.recipe/meta.yaml | 11 ++- docs/source/index.rst | 4 +- docs/source/install.md | 37 ++++---- docs/source/introduction.rst | 6 +- setup.cfg | 10 +++ setup.py | 130 +++++++++++++++------------ setup_intel.cfg | 22 ----- setup_mac.cfg | 16 +++- setup_win.cfg | 14 ++- 13 files changed, 175 insertions(+), 123 deletions(-) create mode 100644 conda.recipe/bld.bat delete mode 100644 setup_intel.cfg diff --git a/README.md b/README.md index cdb00907..721d5703 100644 --- a/README.md +++ b/README.md @@ -4,19 +4,30 @@ A Python interface to the Fortran-written data assimilation library - [PDAF](htt ![GitHub Workflow Status](https://github.com/yumengch/pyPDAF/actions/workflows/test_build.yaml/badge.svg) -## Prerequisite: -- `Fortran compiler: e.g.:gfortran/intel fortran` -- `a message passing interface (MPI) implementation: e.g. openMPI/MPICH` -- `Python>=3.8` - - ## Installation: -- pyPDAF uses `[PDAF V2.1](https://github.com/PDAF/PDAF/tree/PDAF_V2.1)` which can be obtained by: -`git submodule update --init --recursive` -- Currently, Fortran-written PDAF is compiled together with pyPDAF. Hence, the Fortran compiler options need to be specified in the PDAF section of [`setup.cfg`](setup.cfg). -- Options in pyPDAF section of `setup.cfg` are related to the current pyPDAF directory (`pwd`) and C compiler used by Cython, e.g. (`CC=mpicc` for GNU compiler or `CC=mpiicc` for Intel compiler) -- It is recommended to use a clean conda environment to install pyPDAF to avoid any package conflicts -- Install Python package: ```pip install .``` +There are two ways of installing pyPDAF. +- The easiest approach is using `conda`. Currently, `pyPDAF` is available from `conda` for `Windows`, `Linux` and `MacOS (arm64)`. The installation can be obtained via: +```bash +conda create -n pyPDAF -c yumengch -c conda-forge pyPDAF +``` +You can start to use `pyPDAF` by `conda activate pyPDAF`. +- In HPC or cluster environment, it might not be desirable to use compilers and MPI implementation provided by conda. In this case, pyPDAF can be installed from source +```bash +git clone https://github.com/yumengch/pyPDAF.git +cd pyPDAF +git submodule update --init --recursive +pip install -v . +``` +The `pip` command compiles both `PDAF V2.1` and its C interface. To customise the compiler options with the local machine, it is necessary to specify the compiler, compiler options, path to the dependent libraries. In our case, the dependent library is `BLAS`, `LAPACK`, and `MPI` implementation. + - The installation requires `Cython`, `mpi4py`, and `numpy` package. + - The Fortran compiler options need to be specified in the PDAF section of [`setup.cfg`](setup.cfg). Note that the `-fPIC` compiler option is required to create a Python package. Note that these are only relevant on non-Windows machines. For Windows machines, `MSVC` and `Intel Fortran compilers` are used by default and adaptations for other compilers will need changes in `CMakeLists.txt` in [PDAFBuild/CMakeLists.txt](PDAFBuild/CMakeLists.txt) and [pyPDAF/fortran/CMakeLists.txt](pyPDAF/fortran/CMakeLists.txt). + - Options in pyPDAF section of `setup.cfg` requires the following options: + - `pwd` is the absolute path to the pyPDAF repository directory + - `CC` is the C compiler used by Cython, e.g. `CC=mpicc` for GNU compiler or `CC=mpiicc` for Intel compiler. This option is not usable in Windows as only `MSVC` is supported. + - `condaBuild` -- ignore this option as is only relevant for `conda build` scenario + - `useMKL` decides if you use Intel's Math Kernel Library (MKL). If `True` is given, `MKLROOT` must be specified which is the absolute path to the static MKL library + - `LAPACK_PATH` and `LAPACK_LIBRARY` is the path to the BLAS and LAPACK directory and the linking flag respectively. They can be delimited by `,`. For example, we can have `LAPACK_LIBRARY=blas,lapack`. Do not give `-lblas` as `setuptools` deal with the format to the linker. + - `MPI_INC_PATH`, `MPI_MOD_PATH`, and `MPI_LIB_PATH` are only relevant in Windows, which is the path to `.h` file, `.f90` file, and `.lib` file respectively. These paths are usually `C:\Program Files (x86)\Microsoft SDKs\MPI\Include\x64`, `C:\Program Files (x86)\Microsoft SDKs\MPI\Include`, and `C:\Program Files (x86)\Microsoft SDKs\MPI\Lib\x64` respectively. ## Run example: ```bash @@ -32,6 +43,6 @@ Currently, it interfaces with subroutines of ```PDAF-V2.1``` with an example for ## Contributors: Yumeng Chen, Lars Nerger -pyPDAF is mainly developed and maintainde by National Centre for Earth Observation and University of Reading. +pyPDAF is mainly developed and maintained by National Centre for Earth Observation and University of Reading. diff --git a/conda.recipe/bld.bat b/conda.recipe/bld.bat new file mode 100644 index 00000000..6d121506 --- /dev/null +++ b/conda.recipe/bld.bat @@ -0,0 +1,3 @@ +call "C:\Program Files (x86)\Intel\oneAPI\setvars.bat" intel64 vs2022 + +%PYTHON -m pip install . --no-deps --ignore-installed --no-cache-dir -vvv diff --git a/conda.recipe/build.sh b/conda.recipe/build.sh index 4c22600f..58262bca 100644 --- a/conda.recipe/build.sh +++ b/conda.recipe/build.sh @@ -1,6 +1,7 @@ #!/usr/bin/env bash set -ex + # Install the Python package, but without dependencies, # because Conda takes care of that $PYTHON -m pip install . --no-deps --ignore-installed --no-cache-dir -vvv \ No newline at end of file diff --git a/conda.recipe/conda_build_config.yaml b/conda.recipe/conda_build_config.yaml index 87fed87a..9343590d 100644 --- a/conda.recipe/conda_build_config.yaml +++ b/conda.recipe/conda_build_config.yaml @@ -3,3 +3,10 @@ python: - 3.9 - 3.10 - 3.11 + +c_compiler: + - vs2022 # [win] + +mpi: + - mpich # [not win] + - msmpi # [win] \ No newline at end of file diff --git a/conda.recipe/meta.yaml b/conda.recipe/meta.yaml index cf6401e1..b3fcbe00 100644 --- a/conda.recipe/meta.yaml +++ b/conda.recipe/meta.yaml @@ -18,13 +18,16 @@ requirements: - pip - setuptools - numpy - - blas-devel - - liblapack - mpi4py + - {{ mpi }} + - mkl-static # [x86] + - blas-devel # [not x86] + - liblapack # [not x86] build: - make # [not win] - {{ compiler('c') }} - {{ compiler('fortran') }} # [not win] + - cmake run: - python - {{ pin_compatible('numpy') }} @@ -36,5 +39,5 @@ about: description: | pyPDAF is a python interface to the Fortran-based PDAF library license: GPL - doc_url: https://github.com/BoldingBruggeman/eat/wiki - dev_url: https://github.com/BoldingBruggeman/eat \ No newline at end of file + doc_url: https://yumengch.github.io/pyPDAF/index.html + dev_url: https://github.com/yumengch/pyPDAF \ No newline at end of file diff --git a/docs/source/index.rst b/docs/source/index.rst index 58fd582f..0a7b5151 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -3,8 +3,8 @@ You can adapt this file completely to your liking, but it should at least contain the root `toctree` directive. -Welcome to pyPDAF's documentation! -================================== +pyPDAF - A Python interface to Parallel Data Assimilation Framework +=================================================================== .. include:: introduction.rst .. toctree:: diff --git a/docs/source/install.md b/docs/source/install.md index 94afb6d5..46848855 100644 --- a/docs/source/install.md +++ b/docs/source/install.md @@ -1,20 +1,25 @@ # Installation -## Prerequisite: -- `Fortran compiler: e.g.:gfortran/intel fortran` -- `a message passing interface (MPI) implementation: e.g. openMPI/MPICH/MS-MPI` -- `BLAS and LAPACK installation or Intel MKL library compatiable with the Fortran compiler` -- `Python>=3.8` - ---- -**NOTE** -- pyPDAF uses [MPI4py](https://mpi4py.readthedocs.io/en/stable/). The MPI4py and the compile-time MPI should use the same MPI implementations to avoid any issues. To specify the MPI implementation for MPI4py, the following method can be used: +There are two ways of installing pyPDAF. +- The easiest approach is using `conda`. Currently, `pyPDAF` is available from `conda` for `Windows`, `Linux` and `MacOS (arm64)`. The installation can be obtained via: ```bash -export CC=/path/to/mpicc python -env MPICC=/path/to/mpicc python -m pip install mpi4py +conda create -n pyPDAF -c yumengch -c conda-forge pyPDAF ``` ---- - -## Install pyPDAF: -- First, provide path to the compiler and libraries in `setup.cfg` -- ```pip install -e .``` +You can start to use `pyPDAF` by `conda activate pyPDAF`. +- In HPC or cluster environment, it might not be desirable to use compilers and MPI implementation provided by conda. In this case, pyPDAF can be installed from source +```bash +git clone https://github.com/yumengch/pyPDAF.git +cd pyPDAF +git submodule update --init --recursive +pip install -v . +``` +The `pip` command compiles both `PDAF V2.1` and its C interface. To customise the compiler options with the local machine, it is necessary to specify the compiler, compiler options, path to the dependent libraries. In our case, the dependent library is `BLAS`, `LAPACK`, and `MPI` implementation. + - The installation requires `Cython`, `mpi4py`, and `numpy` package. + - The Fortran compiler options need to be specified in the PDAF section of [`setup.cfg`](setup.cfg). Note that the `-fPIC` compiler option is required to create a Python package. Note that these are only relevant on non-Windows machines. For Windows machines, `MSVC` and `Intel Fortran compilers` are used by default and adaptations for other compilers will need changes in `CMakeLists.txt` in [PDAFBuild/CMakeLists.txt](PDAFBuild/CMakeLists.txt) and [pyPDAF/fortran/CMakeLists.txt](pyPDAF/fortran/CMakeLists.txt). + - Options in pyPDAF section of `setup.cfg` requires the following options: + - `pwd` is the absolute path to the pyPDAF repository directory + - `CC` is the C compiler used by Cython, e.g. `CC=mpicc` for GNU compiler or `CC=mpiicc` for Intel compiler. This option is not usable in Windows as only `MSVC` is supported. + - `condaBuild` -- ignore this option as is only relevant for `conda build` scenario + - `useMKL` decides if you use Intel's Math Kernel Library (MKL). If `True` is given, `MKLROOT` must be specified which is the absolute path to the static MKL library + - `LAPACK_PATH` and `LAPACK_LIBRARY` is the path to the BLAS and LAPACK directory and the linking flag respectively. They can be delimited by `,`. For example, we can have `LAPACK_LIBRARY=blas,lapack`. Do not give `-lblas` as `setuptools` deal with the format to the linker. + - `MPI_INC_PATH`, `MPI_MOD_PATH`, and `MPI_LIB_PATH` are only relevant in Windows, which is the path to `.h` file, `.f90` file, and `.lib` file respectively. These paths are usually `C:\Program Files (x86)\Microsoft SDKs\MPI\Include\x64`, `C:\Program Files (x86)\Microsoft SDKs\MPI\Include`, and `C:\Program Files (x86)\Microsoft SDKs\MPI\Lib\x64` respectively. \ No newline at end of file diff --git a/docs/source/introduction.rst b/docs/source/introduction.rst index b5867c3e..cb05c48f 100644 --- a/docs/source/introduction.rst +++ b/docs/source/introduction.rst @@ -1,6 +1,8 @@ pyPDAF ====== -pyPDAF is a Python interface to the `Parallel Data Assimilation Framwork (PDAF) `_ library written in Fortran. The latest pyPDAF supports PDAF-V2.0. +pyPDAF is a Python interface to the `Parallel Data Assimilation Framwork (PDAF) `_ library written in Fortran. The latest pyPDAF supports PDAF-V2.1. -With a variety of packages in Python, it allows a simpler coding style for user-supplied functions, such as I/O of observations and post-processing. It can also benefit many Python-based numerical models with parallel and efficient data assimilation capability. \ No newline at end of file +With a variety of packages in Python, it allows a simpler coding style for user-supplied functions, such as I/O of observations and post-processing. This is helpful for prototyping data assimilation systems, offline data assimilation systems. It can also benefit many Python-based numerical models, or models that can be interfaced with Python, with parallel and efficient data assimilation capability. + +The core DA algorithm is as efficient as Fortran implementation in the interface. The efficiency of the Python-based user supplied functions can be improved if sufficient optimisations are used. \ No newline at end of file diff --git a/setup.cfg b/setup.cfg index f215616a..f3d9f46d 100644 --- a/setup.cfg +++ b/setup.cfg @@ -24,5 +24,15 @@ CPP_DEFS = -DUSE_PDAF [pyPDAF] pwd = /home/runner/work/pyPDAF/pyPDAF/ CC = mpicc +condaBuild = +# if MKL is used, give the path to the static MKL library +use_MKL= +MKLROOT= +# if dynamic/shared liblapack and libblas library is used, +# give the library path and flags LAPACK_PATH= LAPACK_Flag=lapack,blas +# GIVE MPI information +MPI_INC_PATH= +MPI_MOD_PATH= +MPI_LIB_PATH= diff --git a/setup.py b/setup.py index eef9e756..3c892963 100644 --- a/setup.py +++ b/setup.py @@ -45,6 +45,7 @@ if not os.path.isabs(PDAFdir): PDAFdir = os.path.join(pwd, PDAFdir) print ('input PDAF directory is not absolute path, changing to: ', PDAFdir) + # set up C compiler for cython and Python if os.name == 'nt': compiler = 'msvc' @@ -67,78 +68,91 @@ else: print ('....using GNU compiler....') -# compiler options for cython +condaBuild = dist.get_option_dict('pyPDAF')['condaBuild'][1] + extra_compile_args=[] +extra_link_args = [] +extra_objects = [] +library_dirs=[] +libraries = [] + +# compiler options for cython if compiler == 'gnu': extra_compile_args+=['-Wno-unreachable-code-fallthrough'] -# linking static PDAF library and interface objects -extra_objects = [] -if sys.platform == 'darwin': - extra_objects+=['-Wl,-force_load', f'{PDAFdir}/lib/libpdaf-var.a', - '-Wl,-force_load', f'{pwd}/lib/libPDAFc.a',] -elif os.name != 'nt': - extra_objects+=['-Wl,--whole-archive', f'{PDAFdir}/lib/libpdaf-var.a', - f'{pwd}/lib/libPDAFc.a', '-Wl,--no-whole-archive'] -if compiler == 'intel': - MKLROOT=dist.get_option_dict('pyPDAF')['MKLROOT'][1] - extra_objects+=['-Wl,--start-group', - f'{MKLROOT}/lib/intel64/libmkl_intel_lp64.a', - f'{MKLROOT}/lib/intel64/libmkl_sequential.a', - f'{MKLROOT}/lib/intel64/libmkl_core.a', - '-Wl,--end-group'] +# linking static PDAF library and interface objects +if os.name == 'nt': + library_dirs+=[os.path.join(PDAFdir, 'lib', 'Release'), + os.path.join(pwd, 'pyPDAF', 'fortran', 'build', 'Release'), + ] + libraries += ['pdaf-var', 'pdafc'] +else: + if sys.platform == 'darwin': + extra_objects+=['-Wl,-force_load', f'{PDAFdir}/lib/libpdaf-var.a', + '-Wl,-force_load', f'{pwd}/lib/libPDAFc.a',] + else: + extra_objects+=['-Wl,--whole-archive', f'{PDAFdir}/lib/libpdaf-var.a', + f'{pwd}/lib/libPDAFc.a', '-Wl,--no-whole-archive'] -# PDAF library contains multiple same .o files -# multiple-definition is thus necessary -extra_link_args = [] -# setup library to MPI-fortran -LAPACK_PATH=dist.get_option_dict('pyPDAF')['LAPACK_PATH'][1] -print ('LAPACK_PATH', LAPACK_PATH) -library_dirs=[] -if LAPACK_PATH != '': library_dirs += LAPACK_PATH.split(',') # add mpi library path -if os.name != 'nt': - if compiler == 'intel': - result = subprocess.run(['mpiifort', '-show'], stdout=subprocess.PIPE) - else: - result = subprocess.run(['mpifort', '-show'], stdout=subprocess.PIPE) +if os.name == 'nt': + # always use external msmpi as msmpi from conda cannot be linked + MPI_LIB_PATH=dist.get_option_dict('pyPDAF')['MPI_LIB_PATH'][1] + if MPI_LIB_PATH != '': library_dirs += MPI_LIB_PATH.split(',') + libraries += ['msmpi', 'msmpifec'] +else: + mpifortran = 'mpiifort' if compiler == 'intel' else 'mpifort' + result = subprocess.run([mpifortran, '-show'], stdout=subprocess.PIPE) result = result.stdout.decode()[:-1].split(' ') s = [l[2:].replace('"', '') for l in result if l[:2] == '-L'] if len(s) > 0: library_dirs += s - # add gfortran library path - if sys.platform == 'darwin': - result = subprocess.run(['gfortran', '--print-file', 'libgfortran.dylib'], stdout=subprocess.PIPE) - result = result.stdout.decode()[:-18] + s = [l[2:] for l in result if l[:2] == '-l'] + if len(s) > 0: libraries += s + +# linking BLAS/LAPACK +use_MKL=dist.get_option_dict('pyPDAF')['use_MKL'][1] +if use_MKL == 'True': + if condaBuild == 'True': + MKLROOT = os.environ['LIBRARY_LIB'] if os.name == 'nt' else \ + os.path.join(os.environ['PREFIX'], 'lib') else: - result = subprocess.run(['gfortran', '--print-file', 'libgfortran.so'], stdout=subprocess.PIPE) - result = result.stdout.decode()[:-15] - library_dirs+=[result,] - library_dirs+=['/usr/lib', ] + MKLROOT = dist.get_option_dict('pyPDAF')['MKLROOT'][1] + assert MKLROOT != '', 'MKLROOT must not be empty, check setup.cfg file' + if os.name == 'nt': + library_dirs+=[MKLROOT,] + libraries += ['mkl_core', 'mkl_sequential', 'mkl_intel_lp64'] + else: + extra_objects+=['-Wl,--start-group', + f'{MKLROOT}/libmkl_intel_lp64.a', + f'{MKLROOT}/libmkl_sequential.a', + f'{MKLROOT}/libmkl_core.a', + '-Wl,--end-group'] else: - library_dirs+=[os.path.join(PDAFdir, 'lib', 'Release'), - os.path.join(pwd, 'pyPDAF', 'fortran', 'build', 'Release'), - ] -print ('library_dirs', library_dirs) + # setup library to MPI-fortran + LAPACK_PATH=dist.get_option_dict('pyPDAF')['LAPACK_PATH'][1] + if LAPACK_PATH != '': library_dirs += LAPACK_PATH.split(',') + LAPACK_Flag=dist.get_option_dict('pyPDAF')['LAPACK_Flag'][1] + print ('LAPACK_Flag', LAPACK_Flag) + if LAPACK_Flag != '': libraries += LAPACK_Flag.split(',') +# add fortran library to the linking if os.name != 'nt': - if compiler == 'intel': - # somehow gfortran is always necessary - libraries = ['ifcore', 'ifcoremt', 'gfortran', 'm'] - else: - libraries=['gfortran', 'm'] + suffix = 'dylib' if sys.platform == 'darwin' else 'so' + FC = os.environ['FC'] if condaBuild == 'True' else 'gfortran' + result = subprocess.run([FC, '--print-file', + 'libgfortran.'+suffix], stdout=subprocess.PIPE) + result = result.stdout.decode() + result = result[:-18] if sys.platform == 'darwin' else result[:-15] + library_dirs+=[result,] + library_dirs+=['/usr/lib', ] + # somehow gfortran is always necessary + libraries += ['gfortran', 'm'] + if compiler == 'intel': libraries += ['ifcore', 'ifcoremt'] - if compiler == 'intel': - result = subprocess.run(['mpiifort', '-show'], stdout=subprocess.PIPE) - else: - result = subprocess.run(['mpifort', '-show'], stdout=subprocess.PIPE) - result = result.stdout.decode()[:-1].split(' ') - s = [l[2:] for l in result if l[:2] == '-l'] - if len(s) > 0: libraries += s -else: - libraries = ['msmpi', 'msmpifec', 'pdaf-var', 'pdafc', 'mkl_core', 'mkl_sequential', 'mkl_intel_lp64'] -LAPACK_Flag=dist.get_option_dict('pyPDAF')['LAPACK_Flag'][1] -print ('LAPACK_Flag', LAPACK_Flag) -if LAPACK_Flag != '': libraries += LAPACK_Flag.split(',') +print ('extra_compile_args', extra_compile_args) +print ('extra_link_args', extra_link_args) +print ('extra_objects', extra_objects) +print ('library_dirs', library_dirs) print ('libraries', libraries) def compilePDAFLibraryInterface(): diff --git a/setup_intel.cfg b/setup_intel.cfg deleted file mode 100644 index 3bceab74..00000000 --- a/setup_intel.cfg +++ /dev/null @@ -1,22 +0,0 @@ -[PDAF] -# PDAF source file directory -directory= PDAF_V2.1 -# fortran compiler -FC = mpiifort -# fortran linker -LD = mpiifort -AR = ar -RANLIB = ranlib -CPP = cpp -OPT = -O3 -r8 -fPIC -OPT_LNK = -INC = -IPDAF_V2.1/include -LINK_LIBS = -llapack -lblas -CPP_DEFS = -DUSE_PDAF - -[pyPDAF] -pwd = /scratch/local/yumeng/testpyPDAF/pyPDAF -CC = mpiicc -MKLROOT=/opt/intel/oneapi/mkl/2022.0.2 -LAPACK_PATH=/opt/intel/oneapi/mkl/2022.0.2/lib/intel64 -LAPACK_Flag=mkl_intel_lp64,mkl_sequential,mkl_core,pthread,m,dl diff --git a/setup_mac.cfg b/setup_mac.cfg index f6d17fef..666d1bf4 100644 --- a/setup_mac.cfg +++ b/setup_mac.cfg @@ -26,14 +26,24 @@ LD = mpif90 AR = ar RANLIB = ranlib CPP = /usr/bin/cpp -OPT = -O3 -fdefault-real-8 -fPIC +OPT = -O3 -fdefault-real-8 -fPIC -mmacosx-version-min=10.6 OPT_LNK = INC = -IPDAF_V2.1/include -LINK_LIBS = -llapack -lblas +LINK_LIBS = -llapack -lblas CPP_DEFS = -DUSE_PDAF [pyPDAF] pwd = /Users/runner/work/pyPDAF/pyPDAF/ CC = mpicc +condaBuild = +# if MKL is used, give the path to the static MKL library +use_MKL= +MKLROOT= +# if dynamic/shared liblapack and libblas library is used, +# give the library path and flags LAPACK_PATH= -LAPACK_Flag=lapack,blas \ No newline at end of file +LAPACK_Flag=lapack,blas +# GIVE MPI information +MPI_INC_PATH= +MPI_MOD_PATH= +MPI_LIB_PATH= diff --git a/setup_win.cfg b/setup_win.cfg index ffe42aed..f735980b 100644 --- a/setup_win.cfg +++ b/setup_win.cfg @@ -22,9 +22,17 @@ LINK_LIBS = CPP_DEFS = [pyPDAF] -pwd = C:\Users\cymji\Documents\pyPDAF +pwd = C:\Users\john\Documents\pyPDAF CC = cl +condaBuild = +# if MKL is used, give the path to the static MKL library +use_MKL= +MKLROOT= +# if dynamic/shared liblapack and libblas library is used, +# give the library path and flags +LAPACK_PATH= +LAPACK_Flag=lapack,blas +# GIVE MPI information MPI_INC_PATH=C:\Program Files (x86)\Microsoft SDKs\MPI\Include\x64 MPI_MOD_PATH=C:\Program Files (x86)\Microsoft SDKs\MPI\Include -LAPACK_PATH=C:\Program Files (x86)\Microsoft SDKs\MPI\Lib\x64,C:\Users\cymji\anaconda3\envs\pyPDAF\Library\lib -LAPACK_Flag= +MPI_LIB_PATH=C:\Program Files (x86)\Microsoft SDKs\MPI\Lib\x64