forked from dftbplus/dftbplus
-
Notifications
You must be signed in to change notification settings - Fork 0
/
config.cmake
159 lines (123 loc) · 8.23 KB
/
config.cmake
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
#
# Global architecture independent build settings
#
#set(CMAKE_BUILD_TYPE "Debug" CACHE STRING "Build type (Release|RelWithDebInfo|Debug|MinSizeRel)")
# CMAKE_BUILD_TYPE is commented out in order to allow for multi-configuration builds. It will
# automatically default to RelWithDebInfo if used in a single configuration build. Uncomment or
# override it only if you want a non-default single configuration build.
option(WITH_OMP "Whether OpenMP thread parallisation should be enabled" TRUE)
option(WITH_MPI "Whether DFTB+ should support MPI-parallelism" FALSE)
# If you build an MPI-parallised binary, consider to set WITH_OMP (OpenMP thread parallelisaton) to
# FALSE unless you want hybrid parallelisation (for experts only).
option(WITH_GPU "Whether DFTB+ should support GPU-acceleration" FALSE)
# For serial builds, the GPU support requires the MAGMA library. For MPI parallel builds it
# requires the ELSI library built with GPU support.
option(WITH_ELSI "Whether DFTB+ with MPI-parallelism should use the ELSI libraries" FALSE)
# Works only with MPI-parallel build. If WITH_GPU was selected above, the ELSI library must be
# enabled (and must have been built with GPU support).
option(WITH_TRANSPORT "Whether transport via libNEGF should be included." FALSE)
# Works only when building static libraries (see option BUILD_SHARED_LIBS)
option(WITH_POISSON "Whether the Poisson-solver should be included" ${WITH_TRANSPORT})
# The Poisson-solver is mostly used in transport calculations only. Enable this option
# if you want to use it in a non-transport build. Note, the Poisson-solver is not
# multi-instance safe and is therefore not allowed, if WITH_API (see below) is on.
option(WITH_TBLITE "Whether xTB support should be included via tblite." FALSE)
option(WITH_SOCKETS "Whether socket communication should be allowed for" FALSE)
option(WITH_ARPACK "Whether the ARPACK library should be included (needed for TD-DFTB)" FALSE)
# Works only with non-MPI (serial) build, needed for Casida linear response
option(WITH_SDFTD3 "Whether the s-dftd3 library should be included" FALSE)
option(WITH_MBD "Whether DFTB+ should be built with many-body-dispersion support" FALSE)
option(WITH_PLUMED "Whether metadynamics via the PLUMED2 library should be allowed for" FALSE)
option(WITH_CHIMES "Whether repulsive corrections via the ChIMES library should be enabled" FALSE)
option(WITH_API "Whether public API should be included and the DFTB+ library installed" TRUE)
# Turn this on, if you want to use the DFTB+ library to integrate DFTB+ into other software
# packages. (Otherwise only a stripped down version of the library without the public API is built.)
# This will also install necessary include and module files and further libraries needed to link the
# DFTB+ library.
option(WITH_PYTHON "Whether the Python components of DFTB+ should be tested and installed" FALSE)
# Use this option to test and install the Python components of DFTB+. Note, that the Python API
# based tools will only be considered, if shared library bulding (BUILD_SHARED_LIBS) and support for
# the general API (WITH_API) and dynamic loading (ENABLE_DYNAMIC_LOADING) had been enabled.
# Otherwise only the file I/O based tools (dptools) will be tested and installed.
option(INSTANCE_SAFE_BUILD "Whether build should support concurrent DFTB+ instances" FALSE)
# Turn this on, if you want to create multiple concurrent DFTB+ instances **within one process** via
# the API. This option will ensure that only components without writable global variables are
# included in the build, so that multiple instances can safely coexist. There are components
# (e.g. Poisson, DFTD-D3, ARPACK) which can not be included if this option is on. Note, this option
# is not relevant for the standalone DFTB+ binary, only for the API (if WITH_API had been turned
# on).
option(BUILD_SHARED_LIBS "Whether the libraries built should be shared" FALSE)
# Turn this on, if the DFTB+ library (and other compiled libraries) should be shared libraries and
# dynamically linked to their applications. This results in smaller applications, but the libraries
# must be present at run-time (and the correct LD_LIBRARY_PATH environment variable must be set, so
# that they can be found by the operating system). If you want use the DFTB+ library from other
# software packages (see WITH_API option above), they may also require a shared library (e.g.
# calling DFTB+ functions from Python or Julia). Note, that in order to use the library from Python
# and Julia, you also need to turn on the ENABLE_DYNAMIC_LOADING option.
option(ENABLE_DYNAMIC_LOADING "Whether the library should be dynamically loadable" FALSE)
# Turn this on, if you wish to load the library dynamically (typically when you want to use
# the library from Python or Julia). Only makes sense in combination with BUILD_SHARED_LIBS and
# WITH_API set to True.
#
# Test environment settings
#
set(TEST_MPI_PROCS "1" CACHE STRING "Nr. of MPI processes used for testing")
set(TEST_OMP_THREADS "1" CACHE STRING "Nr. of OpenMP-threads used for testing")
# Command line used to launch the test code.
# The escaped variables (\${VARIABLE}) will be substituted by the corresponding CMake variables.
if(WITH_MPI)
set(TEST_RUNNER_TEMPLATE "env OMP_NUM_THREADS=\${TEST_OMP_THREADS} mpiexec -n \${TEST_MPI_PROCS}"
CACHE STRING "How to run the tests")
else()
set(TEST_RUNNER_TEMPLATE "env OMP_NUM_THREADS=\${TEST_OMP_THREADS}" CACHE STRING
"How to run the tests")
set(MODES_RUNNER_TEMPLATE "env OMP_NUM_THREADS=\${TEST_OMP_THREADS}" CACHE STRING
"How to run the modes code for tests")
endif()
# Turn it on to include the unit tests (needes FyTest)
option(WITH_UNIT_TESTS "Whether the unit tests should be built" FALSE)
#
# Installation options
#
set(CMAKE_INSTALL_PREFIX "${CMAKE_BINARY_DIR}/_install" CACHE STRING
"Directory to install the compiled code into")
set(INSTALL_INCLUDEDIR "dftbplus" CACHE PATH
"Name of the project specific sub-folder within the install folder for include files")
set(INSTALL_MODULEDIR "${INSTALL_INCLUDEDIR}/modfiles" CACHE PATH
"Installation directory for Fortran module files (within the install folder for include files)")
set(PKGCONFIG_LANGUAGE "Fortran" CACHE STRING
"Compiler and Linker language to assume when creating the pkg-config export file (C or Fortran)")
# The pkg-config export file (lib/pkgconfig/dftbplus.pc) contains the compiler and linker options
# needed to link the DFTB+ library to an application. (It can be queried with the pkg-config tool.)
# Depending on the language setting ("C" or "Fortran") you would get the flags for the case of using
# that compiler for the linking.
#
# Advanced options (e.g. for developers and packagers)
#
#set(TOOLCHAIN "gnu" CACHE STRING "Prefix of the toolchain file to be read from the sys/ folder")
# Uncomment and set it if you want to override the automatic, compiler based toolchain file
# selection.
set(HYBRID_CONFIG_METHODS "Submodule;Find;Fetch" CACHE STRING
"Configuration methods to try in order to satisfy hybrid dependencies")
#
# This list can be used to control how hybrid dependencies (external dependencies which can
# optionally be built during the build process) are configured. The listed methods are applied in
# the specified order. Following methods are available:
#
# Submodule: Use the source in external/*/origin and build the dependency as part of the build
# process. If the source is not present, try to retrieve it via the 'git submodule' command
# (provided the source tree is a git repository and git is available)
#
# Find: Find the dependency as an already installed package in the system.
#
# Fetch: Fetch the source into the build folder and build the dependency as part of the build
# process (works also in cases where the source tree is not a Git repository)
#
# Developer settings
#
option(LCOV_REPORT "Whether coverage report should be generated via lcov/genhtml" FALSE)
# Makes only sense for build type 'Coverage'. Requires lcov and and optionally genhtml to be
# installed on the system. After building the code, you have to build manually the 'lcov_init'
# target (e.g. `make lcov_init`), then run the tests (e.g. `ctest`) and finally generate the report
# with the lcov_report target (e.g. `make lcov_report`). If you only need the evaluated coverage
# data, but no HTML report, build the `lcov_eval` target instead.