diff --git a/docs/source/conf.py b/docs/source/conf.py deleted file mode 100644 index aff496089..000000000 --- a/docs/source/conf.py +++ /dev/null @@ -1,290 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# -# nonAdiabatic_coupling documentation build configuration file, created by -# sphinx-quickstart on Wed Mar 23 11:55:12 2016. -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -#sys.path.insert(0, os.path.abspath('.')) - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -#needs_sphinx = '1.0' - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - 'sphinx.ext.autodoc', - 'sphinx.ext.intersphinx', - 'sphinx.ext.mathjax', - 'sphinx.ext.viewcode', -] - -# Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -# source_suffix = ['.rst', '.md'] -source_suffix = '.rst' - -# The encoding of source files. -#source_encoding = 'utf-8-sig' - -# The master toctree document. -master_doc = 'index' - -# General information about the project. -project = 'nonAdiabatic_coupling' -copyright = '2016, felipeZ' -author = 'felipeZ' - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The short X.Y version. -version = '0.1' -# The full version, including alpha/beta/rc tags. -release = '0.1' - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -#today = '' -# Else, today_fmt is used as the format for a strftime call. -#today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = [] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -#default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -#add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -#add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -#show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' - -# A list of ignored prefixes for module index sorting. -#modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -#keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = False - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = 'alabaster' - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -#html_theme_options = {} - -# Add any paths that contain custom themes here, relative to this directory. -#html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -#html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -#html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -#html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -#html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['_static'] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -#html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -#html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -#html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -#html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -#html_additional_pages = {} - -# If false, no module index is generated. -#html_domain_indices = True - -# If false, no index is generated. -#html_use_index = True - -# If true, the index is split into individual pages for each letter. -#html_split_index = False - -# If true, links to the reST sources are added to the pages. -#html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -#html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -#html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -#html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -#html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr' -#html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -#html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -#html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = 'nonAdiabatic_couplingdoc' - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { -# The paper size ('letterpaper' or 'a4paper'). -#'papersize': 'letterpaper', - -# The font size ('10pt', '11pt' or '12pt'). -#'pointsize': '10pt', - -# Additional stuff for the LaTeX preamble. -#'preamble': '', - -# Latex figure (float) alignment -#'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - (master_doc, 'nonAdiabatic_coupling.tex', 'nonAdiabatic\\_coupling Documentation', - 'felipeZ', 'manual'), -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -#latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -#latex_use_parts = False - -# If true, show page references after internal links. -#latex_show_pagerefs = False - -# If true, show URL addresses after external links. -#latex_show_urls = False - -# Documents to append as an appendix to all manuals. -#latex_appendices = [] - -# If false, no module index is generated. -#latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - (master_doc, 'nonadiabatic_coupling', 'nonAdiabatic_coupling Documentation', - [author], 1) -] - -# If true, show URL addresses after external links. -#man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - (master_doc, 'nonAdiabatic_coupling', 'nonAdiabatic_coupling Documentation', - author, 'nonAdiabatic_coupling', 'One line description of project.', - 'Miscellaneous'), -] - -# Documents to append as an appendix to all manuals. -#texinfo_appendices = [] - -# If false, no module index is generated. -#texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -#texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -#texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = {'https://docs.python.org/': None} diff --git a/jupyterNotebooks/coupling_workflow.ipynb b/jupyterNotebooks/coupling_workflow.ipynb index 6280f4ecd..e1a48a818 100644 --- a/jupyterNotebooks/coupling_workflow.ipynb +++ b/jupyterNotebooks/coupling_workflow.ipynb @@ -86,8 +86,9 @@ } ], "metadata": { + "anaconda-cloud": {}, "kernelspec": { - "display_name": "Python 3", + "display_name": "Python [default]", "language": "python", "name": "python3" }, diff --git a/nac/integrals/__init__.py b/nac/integrals/__init__.py index 38a013773..efc1b9f19 100644 --- a/nac/integrals/__init__.py +++ b/nac/integrals/__init__.py @@ -1,4 +1,7 @@ from .electronTransfer import photoExcitationRate +from .fourierTransform import (calculate_fourier_trasform_cartesian, + fun_density_real, real_to_reciprocal_space, + transform_to_spherical) from .multipoleIntegrals import (calcMtxMultipoleP, general_multipole_matrix) from .nonAdiabaticCoupling import calculateCoupling3Points from .overlapIntegral import calcMtxOverlapP @@ -6,5 +9,7 @@ __all__ = ['calc_transf_matrix', 'calcMtxMultipoleP', 'calcMtxOverlapP', - 'calculateCoupling3Points', 'general_multipole_matrix', - 'photoExcitationRate'] + 'calculateCoupling3Points', 'calculate_fourier_trasform_cartesian', + 'fun_density_real', 'general_multipole_matrix', + 'photoExcitationRate', 'real_to_reciprocal_space', + 'transform_to_spherical'] diff --git a/nac/integrals/fourierTransform.py b/nac/integrals/fourierTransform.py new file mode 100644 index 000000000..51a0c5535 --- /dev/null +++ b/nac/integrals/fourierTransform.py @@ -0,0 +1,185 @@ + +__all__ = ["calculate_fourier_trasform_cartesian", "fun_density_real", + "real_to_reciprocal_space", "transform_to_spherical"] + +from cmath import (exp, pi, sqrt) +from functools import partial +from nac.common import retrieve_hdf5_data +from os.path import join + +import numpy as np + + +# Some Hint about the types +from typing import Callable, Dict, List, NamedTuple, Tuple +Vector = np.ndarray +Matrix = np.ndarray + + +def fun_density_real(function: Callable, k: float) -> float: + """ Compute the momentum density""" + xs = function(k) + print("Orbital transformation is: ", xs) + return np.dot(xs, np.conjugate(xs)).real + + +def transform_to_spherical(fun_fourier: Callable, path_hdf5: str, + project_name: str, orbital: str, + k: Vector) -> complex: + """ + Calculate the Fourier transform in Cartesian, convert it to Spherical + multiplying by the `trans_mtx` and finally multiply the coefficients + in Spherical coordinates. + """ + trans_mtx = retrieve_hdf5_data(path_hdf5, join(project_name, 'trans_mtx')) + path_to_mo = join(project_name, 'point_0/cp2k/mo/coefficients') + molecular_orbital_i = retrieve_hdf5_data(path_hdf5, path_to_mo)[:, orbital] + + return np.dot(molecular_orbital_i, np.dot(trans_mtx, fun_fourier(k))) + + +def calculate_fourier_trasform_cartesian(atomic_symbols: Vector, + atomic_coords: Vector, + dictCGFs: Dict, + number_of_basis: int, + ks: Vector) -> Vector: + """ + Calculate the Fourier transform projecting the MO in a set of plane waves + + mo_fourier(k) = < phi(r) | exp(i k . r)> + + :param atomic_symbols: Atomic symbols + :type atomic_symbols: Numpy Array [String] + :param ks: The vector in k-space where the fourier transform is evaluated. + :type ks: Numpy Array + :paramter dictCGFS: Dictionary from Atomic Label to basis set. + :type dictCGFS: Dict String [CGF], CGF = ([Primitives], + AngularMomentum), Primitive = (Coefficient, Exponent) + + returns: Numpy array + """ + print("K-vector: ", ks) + stream_coord = chunksOf(atomic_coords, 3) + stream_cgfs = yieldCGF(dictCGFs, atomic_symbols) + fun = partial(calculate_fourier_trasform_atom, ks) + molecular_orbital_transformed = np.empty(number_of_basis, dtype=np.complex128) + acc = 0 + for cgfs, xyz in zip(stream_cgfs, stream_coord): + dim_cgfs = len(cgfs) + molecular_orbital_transformed[acc: acc + dim_cgfs] = fun(cgfs, xyz) + acc += dim_cgfs + + return molecular_orbital_transformed + + +def chunksOf(xs, n): + """Yield successive n-sized chunks from xs""" + for i in range(0, len(xs), n): + yield xs[i:i + n] + + +def yieldCGF(dictCGFs, symbols): + """ Stream of CGFs """ + for symb in symbols: + yield dictCGFs[symb] + + +def calculate_fourier_trasform_atom(ks: Vector, cgfs: List, + xyz: Vector)-> Vector: + """ + Calculate the Fourier transform for the set of CGFs in an Atom. + """ + arr = np.empty(len(cgfs), dtype=np.complex128) + for i, cgf in enumerate(cgfs): + arr[i] = calculate_fourier_trasform_contracted(cgf, xyz, ks) + + return arr + + +def calculate_fourier_trasform_contracted(cgf: NamedTuple, xyz: Vector, + ks: Vector) -> complex: + """ + Compute the fourier transform for a given CGF. + Implementation note: the function loops over the x,y and z coordinates + while operate in the whole set of Contracted Gaussian primitives. + """ + cs, es = cgf.primitives + label = cgf.orbType + angular_momenta = compute_angular_momenta(label) + acc = np.ones(cs.shape, dtype=np.complex128) + + # Accumlate x, y and z for each one of the primitves + for l, x, k in zip(angular_momenta, xyz, ks): + fun_primitive = partial(calculate_fourier_trasform_primitive, l, x, k) + rs = np.apply_along_axis(np.vectorize(fun_primitive), 0, es) + acc *= rs + + # The result is the summation of the primitive multiplied by is corresponding + # coefficients + return np.dot(acc, cs) + + +def compute_angular_momenta(label) -> Vector: + """ + Compute the exponents l,m and n for the CGF: x^l y^m z^n exp(-a (r-R)^2) + """ + orbitalIndexes = {("S", 0): 0, ("S", 1): 0, ("S", 2): 0, + ("Px", 0): 1, ("Px", 1): 0, ("Px", 2): 0, + ("Py", 0): 0, ("Py", 1): 1, ("Py", 2): 0, + ("Pz", 0): 0, ("Pz", 1): 0, ("Pz", 2): 1, + ("Dxx", 0): 2, ("Dxx", 1): 0, ("Dxx", 2): 0, + ("Dxy", 0): 1, ("Dxy", 1): 1, ("Dxy", 2): 0, + ("Dxz", 0): 1, ("Dxz", 1): 0, ("Dxz", 2): 1, + ("Dyy", 0): 0, ("Dyy", 1): 2, ("Dyy", 2): 0, + ("Dyz", 0): 0, ("Dyz", 1): 1, ("Dyz", 2): 1, + ("Dzz", 0): 0, ("Dzz", 1): 0, ("Dzz", 2): 2} + lookup = lambda i: orbitalIndexes[(label, i)] + + return np.apply_along_axis(np.vectorize(lookup), 0, np.arange(3)) + + +def calculate_fourier_trasform_primitive(l: int, x: float, k: float, + alpha: float) -> complex: + """ + Compute the fourier transform for primitive Gaussian Type Orbitals. + """ + pik = pi * k + f = exp(-alpha * x ** 2 + complex(alpha * x, - pik) ** 2 / alpha) + if l == 0: + return sqrt(pi / alpha) * f + elif l == 1: + f = k * exp(-pik * complex(pik / alpha, 2 * x)) + return (pi / alpha) ** 1.5 * f + elif l == 2: + f = exp(-pik * complex(pik / alpha, 2 * x)) + return sqrt(pi / (alpha ** 5)) * (alpha / 2 - pik ** 2) * f + else: + msg = ("there is not implementation for the primivite fourier " + "transform of l: {}".format(l)) + raise NotImplementedError(msg) + + +def real_to_reciprocal_space(tup: Tuple) -> tuple: + """ + Transform a 3D point from real space to reciprocal space. + """ + a1, a2, a3 = tup + cte = 2 * pi / np.dot(a1, cross(a2, a3)) + + b1 = cte * cross(a2, a3) + b2 = cte * cross(a3, a1) + b3 = cte * cross(a1, a2) + + return b1, b2, b3 + + +def cross(a: Vector, b: Vector) -> Vector: + """ Cross product""" + x1, y1, z1 = a + x2, y2, z2 = b + + x = y1 * z2 - y2 * z1 + y = x2 * z1 - x1 * z2 + z = x1 * y2 - x2 * y1 + + return np.array([x, y, z]) diff --git a/nac/schedule/components.py b/nac/schedule/components.py index 3fbb4f633..004027609 100644 --- a/nac/schedule/components.py +++ b/nac/schedule/components.py @@ -97,9 +97,14 @@ def search_data_in_hdf5(i): else: point_dir = folders[j] job_files = create_file_names(point_dir, k) + # A job is a restart if guess_job is None and the list of + # wf guesses are not empty + is_restart = bool((guess_job is None) and + calc_new_wf_guess_on_points) # Calculating initial guess - if k in calc_new_wf_guess_on_points: - guess_job = call_schedule_qm(package_name, guess_args, path_hdf5, + if k in calc_new_wf_guess_on_points or is_restart: + guess_job = call_schedule_qm(package_name, guess_args, + path_hdf5, point_dir, job_files, k, gs, nHOMOS, nLUMOS, project_name=project_name, diff --git a/nac/workflows/initialization.py b/nac/workflows/initialization.py index ef665d932..e6eb12f81 100644 --- a/nac/workflows/initialization.py +++ b/nac/workflows/initialization.py @@ -10,6 +10,7 @@ from qmworks.parsers import parse_string_xyz from subprocess import (PIPE, Popen) +import fnmatch import getpass import h5py import os @@ -147,6 +148,9 @@ def split_trajectory(path, nBlocks, pathOut): cmd = 'split -a 1 -l {} {} {}'.format(lines_per_block, path, prefix) subprocess.run(cmd, shell=True) p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE, shell=True) - rs, err = p.communicate() - print("Submission Output: ", rs) - print("Submission Errors: ", err) + rs = p.communicate() + err = rs[1] + if err: + raise RuntimeError("Submission Errors: ".format(err)) + else: + return fnmatch.filter(os.listdir(), "chunk_xyz*") diff --git a/scripts/Analysis/MOs_energies.py b/scripts/Analysis/MOs_energies.py index 168c3a3af..37e0cc947 100644 --- a/scripts/Analysis/MOs_energies.py +++ b/scripts/Analysis/MOs_energies.py @@ -1,35 +1,13 @@ +import argparse import matplotlib matplotlib.use('Agg') from os.path import join -from interactive import ask_question import h5py import matplotlib.pyplot as plt import numpy as np -# ======================================<>==================================== - - -def obtain_data(): - project = ask_question('What is the project name? ') - f5 = ask_question('What is the path of the hdf5-file? ') - nh = ask_question('What is the number of HOMOs to plot? [Default: 10] ', - special='int', default='10') - nl = ask_question('What is the number of LUMOs to plot? [Default: 10] ', - special='int', default='10') - save_fig = ask_question('Do you want to save the plot (y/n)? [Default: n] ', - special='bool', default='n') - y_lower = ask_question('What is the lower limit for the y-axis? [Default: None] ', - special='float') - y_upper = ask_question('What is the upper limit for the y-axis? [Default: None] ', - special='float') - - print(y_lower, y_upper, isinstance(y_lower, str)) - - return project, f5, nh, nl, save_fig, y_lower, y_upper - - # ======================================<>==================================== h2ev = 27.2114 # hartrees to electronvolts @@ -50,12 +28,12 @@ def fetch_data(project, path_HDF5): sh = len(xs) points = map(lambda x: join(project, 'point_{}'.format(x), 'cp2k/mo/eigenvalues'), range(sh)) - ess = list(map(lambda x: f5[x].value, points)) + ess = list(map(lambda x: f5[x].value, points)) return list(map(lambda x: x.dot(h2ev), ess)) -def plot_data(project, pathHDF5, nHOMOS, nLUMOS, save_fig, y_lower, y_upper): +def plot_data(project, pathHDF5, homo, nHOMOS, nLUMOS, y_lower, y_upper): """ Generates a PDF containing the representantion of the eigenvalues for a molecular system called `project` and stored in `pathHDF5`. @@ -76,26 +54,44 @@ def plot_data(project, pathHDF5, nHOMOS, nLUMOS, save_fig, y_lower, y_upper): if y_lower is not None and y_upper is not None: plt.ylim(y_lower, y_upper) for i in range(nHOMOS): - plt.plot(ts, rs[99 - i], 'b') + plt.plot(ts, rs[homo - i], 'b') for i in range(nLUMOS): - plt.plot(ts, rs[100 + i], 'g') + plt.plot(ts, rs[homo + 1 + i], 'g') plt.tight_layout() for tic in ax.xaxis.get_major_ticks(): tic.tick1On = tic.tick2On = False for tic in ax.yaxis.get_major_ticks(): tic.tick1On = tic.tick2On = False - if save_fig: - plt.savefig('Eigenvalues.png', dpi=300 / magnifying_factor, format='png') + plt.savefig('Eigenvalues.png', dpi=300 / magnifying_factor, format='png') plt.show() -def main(): - project, f5, nh, nl, save_fig, y_lower, y_upper = obtain_data() - plot_data(project, f5, nh, nl, save_fig, y_lower, y_upper) +def read_cmd_line(parser): + """ + Parse Command line options. + """ + args = parser.parse_args() + + attributes = ['p', 'hdf5', 'homo', 'nh', 'nl', 'yl', 'yu'] + + return [getattr(args, p) for p in attributes] -# =================<>================================ if __name__ == "__main__": - main() + optional = "[-homo n -nh nh -nl nl -yl float -yu float]" + msg = " script -p project_name -hdf5 " + optional + + parser = argparse.ArgumentParser(description=msg) + parser.add_argument('-p', required=True, help='Project name') + parser.add_argument('-hdf5', required=True, help='path to the HDF5 file') + parser.add_argument('-homo', help='homo index', type=int, default=19) + parser.add_argument('-nh', help='Number of HOMOS', type=int, default=10) + parser.add_argument('-nl', help='Number of LUMOS', type=int, default=10) + parser.add_argument('-yl', help='Lower limit of y-axis (ev)', type=float, + default=-6) + parser.add_argument('-yu', help='upper limit of y-axis (ev)', type=float, + default=1) + + plot_data(*read_cmd_line(parser)) diff --git a/scripts/Analysis/kspace_density.py b/scripts/Analysis/kspace_density.py index 7b7161cdd..360a2f828 100644 --- a/scripts/Analysis/kspace_density.py +++ b/scripts/Analysis/kspace_density.py @@ -1,19 +1,17 @@ -from cmath import (exp, pi, sqrt) from functools import partial -from nac.basisSet.basisNormalization import createNormalizedCGFs +from multiprocessing import Pool +from nac.integrals.fourierTransform import (calculate_fourier_trasform_cartesian, + fun_density_real, transform_to_spherical) +from nac.schedule.components import create_dict_CGFs from os.path import join from qmworks.parsers.xyzParser import readXYZ - import argparse -import h5py import numpy as np import os - # Some Hint about the types -from typing import Callable, Dict, NamedTuple Vector = np.ndarray Matrix = np.ndarray @@ -33,12 +31,21 @@ def main(parser): coords_angstrom = np.concatenate([at.xyz for at in atoms]) au_to_angstrom = 1.889725989 coords = au_to_angstrom * coords_angstrom - - dictCGFs = create_dict_CGFs(path_hdf5, basis_name, atoms) + + # Dictionary containing as key the atomic symbols and as values the set of CGFs + home = os.path.expanduser('~') + basiscp2k = join(home, "Cp2k/cp2k_basis/BASIS_MOLOPT") + potcp2k = join(home, "Cp2k/cp2k_basis/GTH_POTENTIALS") + cp2k_config = {"basis": basiscp2k, "potential": potcp2k} + dictCGFs = create_dict_CGFs(path_hdf5, basis_name, atoms, + package_config=cp2k_config) + count_cgfs = np.vectorize(lambda s: len(dictCGFs[s])) + number_of_basis = np.sum(np.apply_along_axis(count_cgfs, 0, symbols)) + # K-space grid to calculate the fuzzy band - initial = (0., 0., 0.) # Gamma point - final = (0., 1., 1.) # X point - nPoints = 20 + initial = (0., 1., 1.) # Gamma point + final = (0., 0., 0.) # X point + nPoints = 10 grid_k_vectors = grid_kspace(initial, final, nPoints) # Calculate what part of the grid is computed by each process @@ -50,164 +57,25 @@ def main(parser): # Compute the fourier transformation in cartesian coordinates fun_fourier = partial(calculate_fourier_trasform_cartesian, symbols, - coords, dictCGFs) + coords, dictCGFs, number_of_basis) # Apply the fourier transform then covert it to spherical - fun_sphericals = lambda k: transform_to_spherical(fun_fourier, - path_hdf5, - project_name, - orbital, k) + fun_sphericals = partial(transform_to_spherical, fun_fourier, + path_hdf5, project_name, orbital) # Compute the momentum density (an Scalar) - momentum_density = lambda k: fun_density_real(fun_sphericals, k) + momentum_density = partial(fun_density_real, fun_sphericals) # Apply the whole fourier transform to the subset of the grid # correspoding to each process - result = np.apply_along_axis(momentum_density, 1, k_vectors) - print("Results: ", result) - -def fun_density_real(function: Callable, k: float) -> float: - """ Compute the momentum density""" - xs = function(k) - print("Orbital transformation is: ", xs) - return np.dot(xs, np.conjugate(xs)).real - - -def transform_to_spherical(fun_fourier: Callable, path_hdf5: str, - project_name: str, orbital: str, - k: Vector) -> complex: - """ - Calculate the Fourier transform in Cartesian, convert it to Spherical - multiplying by the `trans_mtx` and finally multiply the coefficients - in Spherical coordinates. - """ - trans_mtx = read_hdf5(path_hdf5, join(project_name, 'trans_mtx')) - path_to_mo = join(project_name, 'point_0/cp2k/mo/coefficients') - molecular_orbital_i = read_hdf5(path_hdf5, path_to_mo)[:, orbital] - - return np.dot(molecular_orbital_i, np.dot(trans_mtx, fun_fourier(k))) - - -def calculate_fourier_trasform_cartesian(atomic_symbols: Vector, - atomic_coords: Vector, - dictCGFs: Dict, - ks: Vector) -> Vector: - """ - Calculate the Fourier transform projecting the MO in a set of plane waves - - mo_fourier(k) = < phi(r) | exp(i k . r)> - - :param atomic_symbols: Atomic symbols - :type atomic_symbols: Numpy Array [String] - :param ks: The vector in k-space where the fourier transform is evaluated. - :type ks: Numpy Array - :paramter dictCGFS: Dictionary from Atomic Label to basis set. - :type dictCGFS: Dict String [CGF], CGF = ([Primitives], - AngularMomentum), Primitive = (Coefficient, Exponent) - - returns: Numpy array - """ - print("K-vector: ", ks) - fun = np.vectorize(lambda s: len(dictCGFs[s])) - dim_mo = np.sum(np.apply_along_axis(fun, 0, atomic_symbols)) - molecular_orbital_transformed = np.empty(int(dim_mo), dtype=np.complex128) - - acc = 0 - for i, symb in enumerate(atomic_symbols): - num_CGFs = len(dictCGFs[symb]) - i3, i3_1 = i * 3, 3 * (i + 1) - xyz = atomic_coords[i3: i3_1] - arr = calculate_fourier_trasform_atom(dictCGFs[symb], xyz, ks) - molecular_orbital_transformed[acc: acc + num_CGFs] = arr - acc += num_CGFs - - return molecular_orbital_transformed - - -def calculate_fourier_trasform_atom(cgfs: Dict, xyz: Vector, - ks: Vector) -> Vector: - """ - Calculate the Fourier transform for the set of CGFs in an Atom. - """ - arr = np.empty(len(cgfs), dtype=np.complex128) - for i, cgf in enumerate(cgfs): - arr[i] = calculate_fourier_trasform_contracted(cgf, xyz, ks) - - return arr - + with Pool() as p: + rss = p.map(momentum_density, k_vectors) -def calculate_fourier_trasform_contracted(cgf: NamedTuple, xyz: Vector, - ks: Vector) -> complex: - """ - Compute the fourier transform for a given CGF. - Implementation note: the function loops over the x,y and z coordinates - while operate in the whole set of Contracted Gaussian primitives. - """ - cs, es = cgf.primitives - label = cgf.orbType - angular_momenta = compute_angular_momenta(label) - acc = np.ones(cs.shape, dtype=np.complex128) - - # Accumlate x, y and z for each one of the primitves - for l, x, k in zip(angular_momenta, xyz, ks): - fun_primitive = partial(calculate_fourier_trasform_primitive, l, x, k) - rs = np.apply_along_axis(np.vectorize(fun_primitive), 0, es) - acc *= rs - - # The result is the summation of the primitive multiplied by is corresponding - # coefficients - return np.dot(acc, cs) - - -def compute_angular_momenta(label) -> Vector: - """ - Compute the exponents l,m and n for the CGF: x^l y^m z^n exp(-a (r-R)^2) - """ - orbitalIndexes = {("S", 0): 0, ("S", 1): 0, ("S", 2): 0, - ("Px", 0): 1, ("Px", 1): 0, ("Px", 2): 0, - ("Py", 0): 0, ("Py", 1): 1, ("Py", 2): 0, - ("Pz", 0): 0, ("Pz", 1): 0, ("Pz", 2): 1, - ("Dxx", 0): 2, ("Dxx", 1): 0, ("Dxx", 2): 0, - ("Dxy", 0): 1, ("Dxy", 1): 1, ("Dxy", 2): 0, - ("Dxz", 0): 1, ("Dxz", 1): 0, ("Dxz", 2): 1, - ("Dyy", 0): 0, ("Dyy", 1): 2, ("Dyy", 2): 0, - ("Dyz", 0): 0, ("Dyz", 1): 1, ("Dyz", 2): 1, - ("Dzz", 0): 0, ("Dzz", 1): 0, ("Dzz", 2): 2} - lookup = lambda i: orbitalIndexes[(label, i)] - - return np.apply_along_axis(np.vectorize(lookup), 0, np.arange(3)) - - -def calculate_fourier_trasform_primitive(l: int, x: float, k: float, - alpha: float) -> complex: - """ - Compute the fourier transform for primitive Gaussian Type Orbitals. - """ - pik = pi * k - f = exp(-alpha * x ** 2 + complex(alpha * x, - pik) ** 2 / alpha) - if l == 0: - return sqrt(pi / alpha) * f - elif l == 1: - f = k * exp(-pik * complex(pik / alpha, 2 * x)) - return (pi / alpha) ** 1.5 * f - elif l == 2: - f = exp(-pik * complex(pik / alpha, 2 * x)) - return sqrt(pi / (alpha ** 5)) * (alpha / 2 - pik ** 2) * f - else: - msg = ("there is not implementation for the primivite fourier " - "transform of l: {}".format(l)) - raise NotImplementedError(msg) - - -def read_hdf5(path_hdf5, path_to_prop): - """ - Read an array using the MPI interface of HDF5. - """ - with h5py.File(path_hdf5, "r") as f5: - return f5[path_to_prop].value + # result = np.apply_along_axis(momentum_density, 1, k_vectors) + print("Results: ", rss) def point_number_to_compute(size, points) -> Vector: - """ Compute how many grid points is computed in a given mpi worker """ + """ Compute how many grid points is computed in a given worker """ res = points % size n = points // size @@ -239,38 +107,6 @@ def grid_kspace(initial, final, points) -> Matrix: return np.transpose(mtx) -def createCGFs(path_hdf5, atoms, basis_name) -> Dict: - """ - Create a dictionary containing the primitives Gaussian functions for - each atom involved in the calculation. - """ - home = os.path.expanduser('~') - basiscp2k = join(home, "Cp2k/cp2k_basis/BASIS_MOLOPT") - potcp2k = join(home, "Cp2k/cp2k_basis/GTH_POTENTIALS") - cp2k_config = {"basis": basiscp2k, "potential": potcp2k} - return create_dict_CGFs(path_hdf5, basis_name, atoms, - package_config=cp2k_config) - - -def create_dict_CGFs(path_hdf5, basisname, xyz, package_name='cp2k', - package_config=None): - """ - Try to read the basis from the HDF5 otherwise read it from a file and store - it in the HDF5 file. Finally, it reads the basis Set from HDF5 and calculate - the CGF for each atom. - - :param path_hdf5: Path to the HDF5 file that contains the - numerical results. - type path_hdf5: String - :param basisname: Name of the Gaussian basis set. - :type basisname: String - :param xyz: List of Atoms. - :type xyz: [nac.common.AtomXYZ] - """ - with h5py.File(path_hdf5, "r") as f5: - return createNormalizedCGFs(f5, basisname, package_name, xyz) - - def read_cmd_line(parser): """ Parse Command line options. diff --git a/scripts/Submission/distribute_jobs.py b/scripts/Submission/distribute_jobs.py new file mode 100644 index 000000000..9d4188ae7 --- /dev/null +++ b/scripts/Submission/distribute_jobs.py @@ -0,0 +1,209 @@ + +from collections import namedtuple +from nac.workflows.initialization import split_trajectory +from os.path import join +from qmworks import Settings +from qmworks.utils import settings2Dict + +import getpass +import os +import shutil +import string +import subprocess + +SLURM = namedtuple("SLURM", ("nodes", "tasks", "time", "name")) + + +def main(): + """ + THE USER MUST CHANGES THESE VARIABLES ACCORDING TO HER/HIS NEEDS: + * project_name + * path to the basis and Cp2k Potential + * CP2K: + - Range of Molecular oribtals printed by CP2K + - Cell parameter + * Settings to Run Cp2k simulations + * Path to the trajectory in XYZ + + The slurm configuration is optional but the user can edit it: + property default + * nodes 2 + * tasks 24 + * time 48:00:00 + * name namd + + """ + # USER DEFINED CONFIGURATION + project_name = 'distribute_Cd33Se33' # name use to create folders + + # Path to the basis set used by Cp2k + home = os.path.expanduser('~') + basisCP2K = join(home, "Cp2k/cp2k_basis/BASIS_MOLOPT") + potCP2K = join(home, "Cp2k/cp2k_basis/GTH_POTENTIALS") + lower_orbital, upper_orbital = 278, 317 + cp2k_main, cp2k_guess = cp2k_input(lower_orbital, upper_orbital, + cell_parameters=28) + + # Trajectory splitting + path_to_trajectory = "traj1000.xyz" + blocks = 5 # Number of chunks to split the trajectory + + # SLURM Configuration + slurm = SLURM( + nodes=2, + tasks=24, + time="48:00:00", + name="namd" + ) + + distribute_computations(project_name, basisCP2K, potCP2K, cp2k_main, + cp2k_guess, path_to_trajectory, blocks, slurm) + + +def cp2k_input(lower_orbital, upper_orbital, cell_parameters=None): + """ + # create ``Settings`` for the Cp2K Jobs. + """ + # Main Cp2k Jobs + cp2k_args = Settings() + cp2k_args.basis = "DZVP-MOLOPT-SR-GTH" + cp2k_args.potential = "GTH-PBE" + cp2k_args.cell_parameters = [cell_parameters] * 3 + main_dft = cp2k_args.specific.cp2k.force_eval.dft + main_dft.scf.added_mos = 20 + main_dft.scf.max_scf = 200 + main_dft.scf.eps_scf = 1e-5 + main_dft['print']['mo']['mo_index_range'] = "{} {}".format(lower_orbital, + upper_orbital) + cp2k_args.specific.cp2k.force_eval.subsys.cell.periodic = 'None' + + # Setting to calculate the wave function used as guess + cp2k_OT = Settings() + cp2k_OT.basis = "DZVP-MOLOPT-SR-GTH" + cp2k_OT.potential = "GTH-PBE" + cp2k_OT.cell_parameters = [cell_parameters] * 3 + ot_dft = cp2k_OT.specific.cp2k.force_eval.dft + ot_dft.scf.scf_guess = 'atomic' + ot_dft.scf.ot.minimizer = 'DIIS' + ot_dft.scf.ot.n_diis = 7 + ot_dft.scf.ot.preconditioner = 'FULL_SINGLE_INVERSE' + ot_dft.scf.added_mos = 0 + ot_dft.scf.eps_scf = 1e-05 + ot_dft.scf.scf_guess = 'restart' + cp2k_OT.specific.cp2k.force_eval.subsys.cell.periodic = 'None' + + return cp2k_args, cp2k_OT + + +# ============================> Distribution <================================= + + +def distribute_computations(project_name, basisCP2K, potCP2K, cp2k_main, + cp2k_guess, path_to_trajectory, blocks, slurm): + + script_name = "script_remote_function.py" + # Split the trajectory in Chunks and move each chunk to its corresponding + # directory. + chunks_trajectory = split_trajectory(path_to_trajectory, blocks, '.') + chunks_trajectory.sort() + enumerate_from = 0 + for file_xyz, l in zip(chunks_trajectory, string.ascii_lowercase): + folder = 'chunk_{}'.format(l) + os.mkdir(folder) + shutil.move(file_xyz, folder) + # function to be execute remotely + write_python_script(folder, file_xyz, project_name, + basisCP2K, potCP2K, cp2k_main, + cp2k_guess, enumerate_from, script_name) + write_slurm_script(folder, slurm, script_name) + enumerate_from += number_of_geometries(join(folder, file_xyz)) + + +def write_python_script(folder, file_xyz, project_name, basisCP2K, potCP2K, cp2k_main, + cp2k_guess, enumerate_from, script_name): + """ Write the python script to compute the PYXAID hamiltonians""" + scratch = '/scratch-shared' + user = getpass.getuser() + path_hdf5 = join(scratch, user, project_name, '{}.hdf5'.format(folder)) + + xs = """ +from nac.workflows.workflow_coupling import generate_pyxaid_hamiltonians +from nac.workflows.initialization import initialize +from qmworks.utils import dict2Setting +import plams + +plams.init() + +project_name = '{}' +path_basis = '{}' +path_potential = '{}' +path_hdf5 = '{}' +path_traj_xyz = '{}' +basisname = '{}' + +initial_config = initialize(project_name, path_traj_xyz, + basisname=basisname, + path_basis=path_basis, + path_potential=path_potential, + enumerate_from={}, + calculate_guesses='first', + path_hdf5=path_hdf5) + +cp2k_main = dict2Setting({}) +cp2k_guess = dict2Setting({}) + +generate_pyxaid_hamiltonians('cp2k', project_name, cp2k_main, + guess_args=cp2k_guess, nCouplings=40, + **initial_config) +plams.finish() + """.format(project_name, basisCP2K, potCP2K, path_hdf5, file_xyz, cp2k_main.basis, + enumerate_from, settings2Dict(cp2k_main), settings2Dict(cp2k_guess)) + + with open(join(folder, script_name), 'w') as f: + f.write(xs) + + return script_name + + +def write_slurm_script(folder, slurm, python_script): + """ + write an Slurm launch script + """ + sbatch = lambda x, y: "#SBATCH -{} {}\n".format(x, y) + + header = "#! /bin/bash\n" + modules = "\nmodule load cp2k/3.0\nsource activate qmworks\n\n" + time = sbatch('t', slurm.time) + nodes = sbatch('N', slurm.nodes) + tasks = sbatch('n', slurm.tasks) + name = sbatch('J', slurm.name) + python = "python {}".format(python_script) + + # Script content + content = header + time + nodes + tasks + name + modules + python + + file_name = join(folder, "launch.sh") + + with open(file_name, 'w') as f: + f.write(content) + return file_name + + +def number_of_geometries(file_name): + """ + Count the number of geometries in XYZ formant in a given file. + """ + + with open(file_name, 'r') as f: + numat = int(f.readline()) + + cmd = "wc -l {}".format(file_name) + wc = subprocess.getoutput(cmd).split()[0] + + lines_per_geometry = numat + 2 + + return int(wc) // lines_per_geometry + + +if __name__ == "__main__": + main() diff --git a/setup.py b/setup.py index e26034f1b..38e1892fe 100644 --- a/setup.py +++ b/setup.py @@ -5,7 +5,7 @@ setup( name='NonAdiabaticCouling', - version='0.1.6', + version='0.1.7', description='Automation of computations in quantum chemistry', license='MIT', url='https://github.com/felipeZ/nonAdiabaticCoupling', @@ -22,7 +22,7 @@ 'intended audience :: science/research', 'topic :: scientific/engineering :: chemistry' ], - install_requires=['cython', 'numpy', 'h5py', 'noodles', 'qmworks', 'pymonad', 'mpi4py'], + install_requires=['cython', 'numpy', 'h5py', 'noodles', 'qmworks', 'pymonad'], cmdclass={'build_ext': build_ext}, ext_modules=[Extension('multipoleObaraSaika', ['nac/integrals/multipoleObaraSaika.pyx'])], extras_require={'test': ['nose', 'coverage']} diff --git a/test/test_obaraSaika.py b/test/test_obaraSaika.py index 4a22a28c8..664fc7d55 100644 --- a/test/test_obaraSaika.py +++ b/test/test_obaraSaika.py @@ -22,7 +22,7 @@ def create_paths_mos(project_name, i): def test_obaraSaika(): """ - Test the Obara-Saika scheme to compute overlap integrals and + Test the Obara-Saika scheme to compute overlap integrals and then cumputed the derivated coupling using them. """ project_name = 'ethylene'