diff --git a/.gitchangelog.rc b/.gitchangelog.rc new file mode 100644 index 0000000..e2130b7 --- /dev/null +++ b/.gitchangelog.rc @@ -0,0 +1,309 @@ +# -*- coding: utf-8; mode: python -*- +# +# Format +# +# ACTION: [AUDIENCE:] COMMIT_MSG [!TAG ...] +# +# Description +# +# ACTION is one of 'chg', 'fix', 'new' +# +# Is WHAT the change is about. +# +# 'chg' is for refactor, small improvement, cosmetic changes... +# 'fix' is for bug fixes +# 'new' is for new features, big improvement +# +# AUDIENCE is optional and one of 'dev', 'usr', 'pkg', 'test', 'doc' +# +# Is WHO is concerned by the change. +# +# 'dev' is for developpers (API changes, refactors...) +# 'usr' is for final users (UI changes) +# 'pkg' is for packagers (packaging changes) +# 'test' is for testers (test only related changes) +# 'doc' is for doc guys (doc only changes) +# +# COMMIT_MSG is ... well ... the commit message itself. +# +# TAGs are additionnal adjective as 'refactor' 'minor' 'cosmetic' +# +# They are preceded with a '!' or a '@' (prefer the former, as the +# latter is wrongly interpreted in github.) Commonly used tags are: +# +# 'refactor' is obviously for refactoring code only +# 'minor' is for a very meaningless change (a typo, adding a comment) +# 'cosmetic' is for cosmetic driven change (re-indentation, 80-col...) +# 'wip' is for partial functionality but complete subfunctionality. +# +# Example: +# +# new: usr: support of bazaar implemented +# chg: re-indentend some lines !cosmetic +# new: dev: updated code to be compatible with last version of killer lib. +# fix: pkg: updated year of licence coverage. +# new: test: added a bunch of test around user usability of feature X. +# fix: typo in spelling my name in comment. !minor +# +# Please note that multi-line commit message are supported, and only the +# first line will be considered as the "summary" of the commit message. So +# tags, and other rules only applies to the summary. The body of the commit +# message will be displayed in the changelog without reformatting. + +# +# ``ignore_regexps`` is a line of regexps +# +# Any commit having its full commit message matching any regexp listed here +# will be ignored and won't be reported in the changelog. +# + +# r'[mM]inor.*', +# r'[tT]est fix.*', +# r'[tT]ravis fix.*', +# r'[fF]ix .*', +# r'[tT]ry .*', +# r'[tT]est.', +# r'[dD]oc fix.*', +# r'Coverage.*', +# r'^$' + +ignore_regexps = [ + r'^[tT]est$', + r'^[tT]est fix.*$', + r'^[tT]ravis fix.*$', + r'^[yY]apf (test)? fix.*$', + r'^[pP]re-commit fix.*$', + r'^[fF]ix [tT]est[s]?.*$', + r'^[fF]ix [cC]overall[s]?.*$', + r'^[iI]mprove[d]? [tT]est[s]?.*$', + r'^[cC]overage fix.*$', + r'^[cC]overage$', + r'^[tT]ry.*$', + r'^[bB]ump version.*$', + r'^[dD]oc test fix.*$', + r'^.*Merge pull request.*$', + r'@minor', + r'!minor', + r'@cosmetic', + r'!cosmetic', + r'@refactor', + r'!refactor', + r'@wip', + r'!wip', + r'^([cC]hg|[fF]ix|[nN]ew)\s*:\s*[p|P]kg:', + r'^([cC]hg|[fF]ix|[nN]ew)\s*:\s*[d|D]ev:', + r'^(.{3,3}\s*:)?\s*[fF]irst commit.?\s*$', + r'^$', # ignore commits with empty messages +] + +# ``section_regexps`` is a list of 2-tuples associating a string label and a +# list of regexp +# +# Commit messages will be classified in sections thanks to this. Section +# titles are the label, and a commit is classified under this section if any +# of the regexps associated is matching. +# +# Please note that ``section_regexps`` will only classify commits and won't +# make any changes to the contents. So you'll probably want to go check +# ``subject_process`` (or ``body_process``) to do some changes to the subject, +# whenever you are tweaking this variable. +# +section_regexps = [ + ('New', [ + r'^[nN]ew\s*:\s*((dev|use?r|pkg|test|doc)\s*:\s*)?([^\n]*)$', + ]), + ('Changes', [ + r'^[cC]hg\s*:\s*((dev|use?r|pkg|test|doc)\s*:\s*)?([^\n]*)$', + ]), + ('Fix', [ + r'^[fF]ix\s*:\s*((dev|use?r|pkg|test|doc)\s*:\s*)?([^\n]*)$', + ]), + ( + 'Other', + None # Match all lines + ), +] + +# ``body_process`` is a callable +# +# This callable will be given the original body and result will +# be used in the changelog. +# +# Available constructs are: +# +# - any python callable that take one txt argument and return txt argument. +# +# - ReSub(pattern, replacement): will apply regexp substitution. +# +# - Indent(chars=" "): will indent the text with the prefix +# Please remember that template engines gets also to modify the text and +# will usually indent themselves the text if needed. +# +# - Wrap(regexp=r"\n\n"): re-wrap text in separate paragraph to fill 80-Columns +# +# - noop: do nothing +# +# - ucfirst: ensure the first letter is uppercase. +# (usually used in the ``subject_process`` pipeline) +# +# - final_dot: ensure text finishes with a dot +# (usually used in the ``subject_process`` pipeline) +# +# - strip: remove any spaces before or after the content of the string +# +# - SetIfEmpty(msg="No commit message."): will set the text to +# whatever given ``msg`` if the current text is empty. +# +# Additionally, you can `pipe` the provided filters, for instance: +# body_process = Wrap(regexp=r'\n(?=\w+\s*:)') | Indent(chars=" ") +# body_process = Wrap(regexp=r'\n(?=\w+\s*:)') + +body_process = (strip | ReSub(r'\*', r'\\*') | ReSub(r'((^|\n)[A-Z]\w+(-\w+)*: .*(\n\s+.*)*)+$', r'') | strip | # noqa + final_dot | ucfirst) # noqa + +# ``subject_process`` is a callable +# +# This callable will be given the original subject and result will +# be used in the changelog. +# +# Available constructs are those listed in ``body_process`` doc. +subject_process = ( + strip | ReSub(r'\*', r'\\*') | ReSub( # noqa + r'^([cC]hg|[fF]ix|[nN]ew)\s*:\s*((dev|use?r|pkg|test|doc)\s*:\s*)?([^\n@]*)(@[a-z]+\s+)*$', r'\4') | + SetIfEmpty("No commit message.") | ucfirst | final_dot) # noqa + +# ``tag_filter_regexp`` is a regexp +# +# Tags that will be used for the changelog must match this regexp. +# +tag_filter_regexp = r'^[v]?[0-9]+\.[0-9]+(\.[0-9]+)?([ab][0-9])?$' + +# ``unreleased_version_label`` is a string or a callable that outputs a string +# +# This label will be used as the changelog Title of the last set of changes +# between last valid tag and HEAD if any. +unreleased_version_label = "(unreleased)" + +# ``output_engine`` is a callable +# +# This will change the output format of the generated changelog file +# +# Available choices are: +# +# - rest_py +# +# Legacy pure python engine, outputs ReSTructured text. +# This is the default. +# +# - mustache() +# +# Template name could be any of the available templates in +# ``templates/mustache/*.tpl``. +# Requires python package ``pystache``. +# Examples: +# - mustache("markdown") +# - mustache("restructuredtext") +# +# - makotemplate() +# +# Template name could be any of the available templates in +# ``templates/mako/*.tpl``. +# Requires python package ``mako``. +# Examples: +# - makotemplate("restructuredtext") +# +output_engine = rest_py # noqa +# output_engine = mustache("restructuredtext") +# output_engine = mustache("markdown") +# output_engine = makotemplate("restructuredtext") + +# ``include_merge`` is a boolean +# +# This option tells git-log whether to include merge commits in the log. +# The default is to include them. +include_merge = True + +# ``log_encoding`` is a string identifier +# +# This option tells gitchangelog what encoding is outputed by ``git log``. +# The default is to be clever about it: it checks ``git config`` for +# ``i18n.logOutputEncoding``, and if not found will default to git's own +# default: ``utf-8``. +# log_encoding = 'utf-8' + +# ``publish`` is a callable +# +# Sets what ``gitchangelog`` should do with the output generated by +# the output engine. ``publish`` is a callable taking one argument +# that is an interator on lines from the output engine. +# +# Some helper callable are provided: +# +# Available choices are: +# +# - stdout +# +# Outputs directly to standard output +# (This is the default) +# +# - FileInsertAtFirstRegexMatch(file, pattern, idx=lamda m: m.start()) +# +# Creates a callable that will parse given file for the given +# regex pattern and will insert the output in the file. +# ``idx`` is a callable that receive the matching object and +# must return a integer index point where to insert the +# the output in the file. Default is to return the position of +# the start of the matched string. +# +# - FileRegexSubst(file, pattern, replace, flags) +# +# Apply a replace inplace in the given file. Your regex pattern must +# take care of everything and might be more complex. Check the README +# for a complete copy-pastable example. +# +# publish = FileInsertIntoFirstRegexMatch( +# "CHANGELOG.rst", +# r'/(?P[0-9]+\.[0-9]+(\.[0-9]+)?)\s+\([0-9]+-[0-9]{2}-[0-9]{2}\)\n--+\n/', +# idx=lambda m: m.start(1) +# ) +# publish = stdout + +# ``revs`` is a list of callable or a list of string +# +# callable will be called to resolve as strings and allow dynamical +# computation of these. The result will be used as revisions for +# gitchangelog (as if directly stated on the command line). This allows +# to filter exaclty which commits will be read by gitchangelog. +# +# To get a full documentation on the format of these strings, please +# refer to the ``git rev-list`` arguments. There are many examples. +# +# Using callables is especially useful, for instance, if you +# are using gitchangelog to generate incrementally your changelog. +# +# Some helpers are provided, you can use them:: +# +# - FileFirstRegexMatch(file, pattern): will return a callable that will +# return the first string match for the given pattern in the given file. +# If you use named sub-patterns in your regex pattern, it'll output only +# the string matching the regex pattern named "rev". +# +# - Caret(rev): will return the rev prefixed by a "^", which is a +# way to remove the given revision and all its ancestor. +# +# Please note that if you provide a rev-list on the command line, it'll +# replace this value (which will then be ignored). +# +# If empty, then ``gitchangelog`` will act as it had to generate a full +# changelog. +# +# The default is to use all commits to make the changelog. +# revs = ["^1.0.3", ] +# revs = [ +# Caret( +# FileFirstRegexMatch( +# "CHANGELOG.rst", +# r"(?P[0-9]+\.[0-9]+(\.[0-9]+)?)\s+\([0-9]+-[0-9]{2}-[0-9]{2}\)\n--+\n")), +# "HEAD" +# ] +revs = [] diff --git a/.travis.yml b/.travis.yml index ee16d0a..5d8c5fc 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,3 +1,4 @@ +dist: trusty # note rabbitmq is currently not available on xenial language: python cache: pip @@ -22,7 +23,7 @@ matrix: env: TEST_TYPE="pytest" TEST_AIIDA_BACKEND="django" MOCK_CRY17_EXECUTABLES=true PYPI_DEPLOY=true - python: 2.7 env: TEST_TYPE="pytest" TEST_AIIDA_BACKEND="django" MOCK_CRY17_EXECUTABLES=true - - python: 3.6 + - python: 3.6 env: TEST_TYPE="pytest" TEST_AIIDA_BACKEND="sqlalchemy" MOCK_CRY17_EXECUTABLES=true - python: 3.6 env: TEST_TYPE="docs" READTHEDOCS="True" diff --git a/LICENSE b/LICENSE index 386bbe0..b081979 100644 --- a/LICENSE +++ b/LICENSE @@ -1,21 +1,165 @@ -MIT License - -Copyright (c) 2018 Chris Sewell. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. + GNU LESSER GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2019 Chris Sewell. + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + This version of the GNU Lesser General Public License incorporates +the terms and conditions of version 3 of the GNU General Public +License, supplemented by the additional permissions listed below. + + 0. Additional Definitions. + + As used herein, "this License" refers to version 3 of the GNU Lesser +General Public License, and the "GNU GPL" refers to version 3 of the GNU +General Public License. + + "The Library" refers to a covered work governed by this License, +other than an Application or a Combined Work as defined below. + + An "Application" is any work that makes use of an interface provided +by the Library, but which is not otherwise based on the Library. +Defining a subclass of a class defined by the Library is deemed a mode +of using an interface provided by the Library. + + A "Combined Work" is a work produced by combining or linking an +Application with the Library. The particular version of the Library +with which the Combined Work was made is also called the "Linked +Version". + + The "Minimal Corresponding Source" for a Combined Work means the +Corresponding Source for the Combined Work, excluding any source code +for portions of the Combined Work that, considered in isolation, are +based on the Application, and not on the Linked Version. + + The "Corresponding Application Code" for a Combined Work means the +object code and/or source code for the Application, including any data +and utility programs needed for reproducing the Combined Work from the +Application, but excluding the System Libraries of the Combined Work. + + 1. Exception to Section 3 of the GNU GPL. + + You may convey a covered work under sections 3 and 4 of this License +without being bound by section 3 of the GNU GPL. + + 2. Conveying Modified Versions. + + If you modify a copy of the Library, and, in your modifications, a +facility refers to a function or data to be supplied by an Application +that uses the facility (other than as an argument passed when the +facility is invoked), then you may convey a copy of the modified +version: + + a) under this License, provided that you make a good faith effort to + ensure that, in the event an Application does not supply the + function or data, the facility still operates, and performs + whatever part of its purpose remains meaningful, or + + b) under the GNU GPL, with none of the additional permissions of + this License applicable to that copy. + + 3. Object Code Incorporating Material from Library Header Files. + + The object code form of an Application may incorporate material from +a header file that is part of the Library. You may convey such object +code under terms of your choice, provided that, if the incorporated +material is not limited to numerical parameters, data structure +layouts and accessors, or small macros, inline functions and templates +(ten or fewer lines in length), you do both of the following: + + a) Give prominent notice with each copy of the object code that the + Library is used in it and that the Library and its use are + covered by this License. + + b) Accompany the object code with a copy of the GNU GPL and this license + document. + + 4. Combined Works. + + You may convey a Combined Work under terms of your choice that, +taken together, effectively do not restrict modification of the +portions of the Library contained in the Combined Work and reverse +engineering for debugging such modifications, if you also do each of +the following: + + a) Give prominent notice with each copy of the Combined Work that + the Library is used in it and that the Library and its use are + covered by this License. + + b) Accompany the Combined Work with a copy of the GNU GPL and this license + document. + + c) For a Combined Work that displays copyright notices during + execution, include the copyright notice for the Library among + these notices, as well as a reference directing the user to the + copies of the GNU GPL and this license document. + + d) Do one of the following: + + 0) Convey the Minimal Corresponding Source under the terms of this + License, and the Corresponding Application Code in a form + suitable for, and under terms that permit, the user to + recombine or relink the Application with a modified version of + the Linked Version to produce a modified Combined Work, in the + manner specified by section 6 of the GNU GPL for conveying + Corresponding Source. + + 1) Use a suitable shared library mechanism for linking with the + Library. A suitable mechanism is one that (a) uses at run time + a copy of the Library already present on the user's computer + system, and (b) will operate properly with a modified version + of the Library that is interface-compatible with the Linked + Version. + + e) Provide Installation Information, but only if you would otherwise + be required to provide such information under section 6 of the + GNU GPL, and only to the extent that such information is + necessary to install and execute a modified version of the + Combined Work produced by recombining or relinking the + Application with a modified version of the Linked Version. (If + you use option 4d0, the Installation Information must accompany + the Minimal Corresponding Source and Corresponding Application + Code. If you use option 4d1, you must provide the Installation + Information in the manner specified by section 6 of the GNU GPL + for conveying Corresponding Source.) + + 5. Combined Libraries. + + You may place library facilities that are a work based on the +Library side by side in a single library together with other library +facilities that are not Applications and are not covered by this +License, and convey such a combined library under terms of your +choice, if you do both of the following: + + a) Accompany the combined library with a copy of the same work based + on the Library, uncombined with any other library facilities, + conveyed under the terms of this License. + + b) Give prominent notice with the combined library that part of it + is a work based on the Library, and explaining where to find the + accompanying uncombined form of the same work. + + 6. Revised Versions of the GNU Lesser General Public License. + + The Free Software Foundation may publish revised and/or new versions +of the GNU Lesser General Public License from time to time. Such new +versions will be similar in spirit to the present version, but may +differ in detail to address new problems or concerns. + + Each version is given a distinguishing version number. If the +Library as you received it specifies that a certain numbered version +of the GNU Lesser General Public License "or any later version" +applies to it, you have the option of following the terms and +conditions either of that published version or of any later version +published by the Free Software Foundation. If the Library as you +received it does not specify a version number of the GNU Lesser +General Public License, you may choose any version of the GNU Lesser +General Public License ever published by the Free Software Foundation. + + If the Library as you received it specifies that a proxy can decide +whether future versions of the GNU Lesser General Public License shall +apply, that proxy's public statement of acceptance of any version is +permanent authorization for you to choose that version for the +Library. diff --git a/aiida_crystal17/__init__.py b/aiida_crystal17/__init__.py index 25de1c8..296b69c 100644 --- a/aiida_crystal17/__init__.py +++ b/aiida_crystal17/__init__.py @@ -1,7 +1,22 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2019 Chris Sewell +# +# This file is part of aiida-crystal17. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms and conditions +# of version 3 of the GNU Lesser General Public License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. """ aiida_crystal17 AiiDA plugin for running the CRYSTAL17 code """ -__version__ = '0.9.1b5' +__version__ = '0.9.2b5' diff --git a/aiida_crystal17/calcfunctions/__init__.py b/aiida_crystal17/calcfunctions/__init__.py index e69de29..10f1044 100644 --- a/aiida_crystal17/calcfunctions/__init__.py +++ b/aiida_crystal17/calcfunctions/__init__.py @@ -0,0 +1,15 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2019 Chris Sewell +# +# This file is part of aiida-crystal17. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms and conditions +# of version 3 of the GNU Lesser General Public License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. diff --git a/aiida_crystal17/calcfunctions/band_gap.py b/aiida_crystal17/calcfunctions/band_gap.py index 3982c78..0f43f96 100644 --- a/aiida_crystal17/calcfunctions/band_gap.py +++ b/aiida_crystal17/calcfunctions/band_gap.py @@ -1,3 +1,18 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2019 Chris Sewell +# +# This file is part of aiida-crystal17. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms and conditions +# of version 3 of the GNU Lesser General Public License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. from collections import namedtuple import traceback @@ -6,16 +21,10 @@ from aiida.orm import ArrayData, Dict, Float, List from aiida.engine import calcfunction, ExitCode -BandResult = namedtuple('BandResult', - ['fermi', 'left_edge', 'right_edge', 'non_zero_fermi']) +BandResult = namedtuple('BandResult', ['fermi', 'left_edge', 'right_edge', 'non_zero_fermi']) -def calculate_band_gap(energies, - densities, - fermi=0, - dtol=1e-8, - try_fshifts=(), - missing_edge=None): +def calculate_band_gap(energies, densities, fermi=0, dtol=1e-8, try_fshifts=(), missing_edge=None): """calculate the band gap, given an energy vs density plot Parameters @@ -40,11 +49,11 @@ def calculate_band_gap(energies, energies = np.array(energies, float) densities = np.abs(np.array(densities, float)) if not len(energies) == len(densities): - raise AssertionError("the energies and densities arrays are of different lengths") + raise AssertionError('the energies and densities arrays are of different lengths') if not fermi < energies.max(): - raise AssertionError("the energies range does not contain the fermi energy") + raise AssertionError('the energies range does not contain the fermi energy') if not fermi > energies.min(): - raise AssertionError("the energies range does not contain the fermi energy") + raise AssertionError('the energies range does not contain the fermi energy') # sort energies order = np.argsort(energies) @@ -68,8 +77,7 @@ def calculate_band_gap(energies, fermi_idx = new_index if fermi_non_zero: - return BandResult( - energies[fermi_idx], missing_edge, missing_edge, True) + return BandResult(energies[fermi_idx], missing_edge, missing_edge, True) # find left edge found_left = False @@ -85,10 +93,8 @@ def calculate_band_gap(energies, found_right = True break - return BandResult(energies[fermi_idx], - energies[left_idx] if found_left else missing_edge, - energies[right_idx] if found_right else missing_edge, - False) + return BandResult(energies[fermi_idx], energies[left_idx] if found_left else missing_edge, + energies[right_idx] if found_right else missing_edge, False) @calcfunction @@ -107,53 +113,42 @@ def calcfunction_band_gap(doss_results, doss_array, dtol=None, try_fshifts=None) """ if not isinstance(doss_results, Dict): - return ExitCode( - 101, 'doss_results is not of type `aiida.orm.Dict`: {}'.format(doss_results)) - if "fermi_energy" not in doss_results.get_dict(): - return ExitCode( - 102, '`fermi_energy` not in doss_results') - if "energy_units" not in doss_results.get_dict(): - return ExitCode( - 102, '`energy_units` not in doss_results') + return ExitCode(101, 'doss_results is not of type `aiida.orm.Dict`: {}'.format(doss_results)) + if 'fermi_energy' not in doss_results.get_dict(): + return ExitCode(102, '`fermi_energy` not in doss_results') + if 'energy_units' not in doss_results.get_dict(): + return ExitCode(102, '`energy_units` not in doss_results') if not isinstance(doss_array, ArrayData): - return ExitCode( - 103, 'doss_array is not of type `aiida.orm.ArrayData`: {}'.format(doss_array)) + return ExitCode(103, 'doss_array is not of type `aiida.orm.ArrayData`: {}'.format(doss_array)) - kwargs = { - "fermi": doss_results.get_dict()["fermi_energy"] - } + kwargs = {'fermi': doss_results.get_dict()['fermi_energy']} if dtol is not None: if not isinstance(dtol, Float): - return ExitCode( - 104, 'dtol is not of type `aiida.orm.Float`: {}'.format(dtol)) - kwargs["dtol"] = dtol.value + return ExitCode(104, 'dtol is not of type `aiida.orm.Float`: {}'.format(dtol)) + kwargs['dtol'] = dtol.value if try_fshifts is not None: if not isinstance(try_fshifts, List): - return ExitCode( - 105, 'try_fshifts is not of type `aiida.orm.List`: {}'.format(try_fshifts)) - kwargs["try_fshifts"] = try_fshifts.get_list() + return ExitCode(105, 'try_fshifts is not of type `aiida.orm.List`: {}'.format(try_fshifts)) + kwargs['try_fshifts'] = try_fshifts.get_list() array_names = doss_array.get_arraynames() - if "energies" not in array_names: - return ExitCode( - 111, 'doss_array does not contain array `energies`: {}'.format(doss_array)) - if "total" in array_names: - if "total_alpha" in array_names and "total_beta" in array_names: - return ExitCode( - 112, ('doss_array does not contains both array `total` and ' - '`total_alpha`, `total_beta`: {}'.format(doss_array))) - elif "total_alpha" in array_names and "total_beta" in array_names: - if "total" in array_names: - return ExitCode( - 112, ('doss_array does not contains both array `total` and ' - '`total_alpha`, `total_beta`: {}'.format(doss_array))) + if 'energies' not in array_names: + return ExitCode(111, 'doss_array does not contain array `energies`: {}'.format(doss_array)) + if 'total' in array_names: + if 'total_alpha' in array_names and 'total_beta' in array_names: + return ExitCode(112, ('doss_array does not contains both array `total` and ' + '`total_alpha`, `total_beta`: {}'.format(doss_array))) + elif 'total_alpha' in array_names and 'total_beta' in array_names: + if 'total' in array_names: + return ExitCode(112, ('doss_array does not contains both array `total` and ' + '`total_alpha`, `total_beta`: {}'.format(doss_array))) else: return ExitCode( 113, 'doss_array does not contain array `total` or `total_alpha` and `total_beta`: {}'.format(doss_array)) - if "total" in array_names: + if 'total' in array_names: calcs = {'total': doss_array.get_array('total')} else: alpha_density = doss_array.get_array('total_alpha') @@ -161,16 +156,11 @@ def calcfunction_band_gap(doss_results, doss_array, dtol=None, try_fshifts=None) total_density = np.abs(alpha_density) + np.abs(beta_density) calcs = {'alpha': alpha_density, 'beta': beta_density, 'total': total_density} - final_dict = { - "energy_units": doss_results.get_dict()["energy_units"] - } + final_dict = {'energy_units': doss_results.get_dict()['energy_units']} for name, density in calcs.items(): try: - result = calculate_band_gap( - doss_array.get_array('energies'), - density, - **kwargs) + result = calculate_band_gap(doss_array.get_array('energies'), density, **kwargs) except Exception: traceback.print_exc() return ExitCode(201, 'calculate_band_gap failed') @@ -180,14 +170,12 @@ def calcfunction_band_gap(doss_results, doss_array, dtol=None, try_fshifts=None) bandgap = None else: bandgap = result.right_edge - result.left_edge - final_dict.update( - { - name+'_fermi': result.fermi, - name+'_left_edge': result.left_edge, - name+'_right_edge': result.right_edge, - name+'_zero_fermi': not result.non_zero_fermi, - name+'_bandgap': bandgap - } - ) - - return {"results": Dict(dict=final_dict)} + final_dict.update({ + name + '_fermi': result.fermi, + name + '_left_edge': result.left_edge, + name + '_right_edge': result.right_edge, + name + '_zero_fermi': not result.non_zero_fermi, + name + '_bandgap': bandgap + }) + + return {'results': Dict(dict=final_dict)} diff --git a/aiida_crystal17/calculations/__init__.py b/aiida_crystal17/calculations/__init__.py index e69de29..10f1044 100644 --- a/aiida_crystal17/calculations/__init__.py +++ b/aiida_crystal17/calculations/__init__.py @@ -0,0 +1,15 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2019 Chris Sewell +# +# This file is part of aiida-crystal17. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms and conditions +# of version 3 of the GNU Lesser General Public License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. diff --git a/aiida_crystal17/calculations/cry_abstract.py b/aiida_crystal17/calculations/cry_abstract.py index 23ae2c3..cfcabff 100644 --- a/aiida_crystal17/calculations/cry_abstract.py +++ b/aiida_crystal17/calculations/cry_abstract.py @@ -1,3 +1,18 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2019 Chris Sewell +# +# This file is part of aiida-crystal17. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms and conditions +# of version 3 of the GNU Lesser General Public License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. """ Plugin to create a CRYSTAL17 output file from a supplied input file. """ @@ -24,102 +39,86 @@ def define(cls, spec): super(CryAbstractCalculation, cls).define(spec) - spec.input('metadata.options.input_file_name', - valid_type=six.string_types, default='INPUT') - spec.input('metadata.options.output_main_file_name', - valid_type=six.string_types, default='main.out') + spec.input('metadata.options.input_file_name', valid_type=six.string_types, default='INPUT') + spec.input('metadata.options.output_main_file_name', valid_type=six.string_types, default='main.out') - spec.input('metadata.options.parser_name', - valid_type=six.string_types, default='crystal17.main') + spec.input('metadata.options.parser_name', valid_type=six.string_types, default='crystal17.main') # TODO review aiidateam/aiida_core#2997, when closed, for exit code formalization # Unrecoverable errors: resources like the retrieved folder or its expected contents are missing spec.exit_code( - 200, 'ERROR_NO_RETRIEVED_FOLDER', - message='The retrieved folder data node could not be accessed.') - spec.exit_code( - 210, 'ERROR_OUTPUT_FILE_MISSING', - message='the main (stdout) output file was not found') - spec.exit_code( - 211, 'ERROR_TEMP_FOLDER_MISSING', - message='the temporary retrieved folder was not found') + 200, 'ERROR_NO_RETRIEVED_FOLDER', message='The retrieved folder data node could not be accessed.') + spec.exit_code(210, 'ERROR_OUTPUT_FILE_MISSING', message='the main (stdout) output file was not found') + spec.exit_code(211, 'ERROR_TEMP_FOLDER_MISSING', message='the temporary retrieved folder was not found') # Unrecoverable errors: required retrieved files could not be read, parsed or are otherwise incomplete spec.exit_code( - 300, 'ERROR_PARSING_STDOUT', + 300, + 'ERROR_PARSING_STDOUT', message=('An error was flagged trying to parse the ' 'crystal exec stdout file')) spec.exit_code( # TODO is this an unrecoverable error? - 301, 'ERROR_PARSING_OPTIMISATION_GEOMTRIES', - message=("An error occurred parsing the 'opta'/'optc' geomerty files")) + 301, + 'ERROR_PARSING_OPTIMISATION_GEOMTRIES', + message=("An error occurred parsing the 'opta'/'optc' geometry files")) spec.exit_code( - 302, 'TESTGEOM_DIRECTIVE', - message=('The crystal exec stdout file denoted that the run was a testgeom')) + 302, 'TESTGEOM_DIRECTIVE', message=('The crystal exec stdout file denoted that the run was a testgeom')) + spec.exit_code(350, 'ERROR_CRYSTAL_INPUT', message='the input file could not be read by CRYSTAL') spec.exit_code( - 350, 'ERROR_CRYSTAL_INPUT', - message='the input file could not be read by CRYSTAL') - spec.exit_code( - 351, 'ERROR_WAVEFUNCTION_NOT_FOUND', - message='CRYSTAL could not find the required wavefunction file') + 351, 'ERROR_WAVEFUNCTION_NOT_FOUND', message='CRYSTAL could not find the required wavefunction file') # Significant errors but calculation can be used to restart spec.exit_code( - 400, 'ERROR_OUT_OF_WALLTIME', - message='The calculation stopped prematurely because it ran out of walltime.') + 400, 'ERROR_OUT_OF_WALLTIME', message='The calculation stopped prematurely because it ran out of walltime.') spec.exit_code( - 401, 'ERROR_OUT_OF_MEMORY', - message='The calculation stopped prematurely because it ran out of memory.') + 401, 'ERROR_OUT_OF_MEMORY', message='The calculation stopped prematurely because it ran out of memory.') spec.exit_code( - 402, 'ERROR_OUT_OF_VMEMORY', + 402, + 'ERROR_OUT_OF_VMEMORY', message='The calculation stopped prematurely because it ran out of virtual memory.') spec.exit_code( - 411, 'UNCONVERGED_SCF', - message='SCF convergence did not finalise (usually due to reaching step limit)') + 411, 'UNCONVERGED_SCF', message='SCF convergence did not finalise (usually due to reaching step limit)') spec.exit_code( - 412, 'UNCONVERGED_GEOMETRY', + 412, + 'UNCONVERGED_GEOMETRY', message='Geometry convergence did not finalise (usually due to reaching step limit)') spec.exit_code( - 413, 'BASIS_SET_LINEARLY_DEPENDENT', - message='an error encountered usually during geometry optimisation') - spec.exit_code( - 414, 'ERROR_SCF_ABNORMAL_END', - message='an error was encountered during an SCF computation') - spec.exit_code( - 415, 'ERROR_MPI_ABORT', - message='an unknown error was encountered, causing the MPI to abort') - spec.exit_code( - 499, 'ERROR_CRYSTAL_RUN', - message='The main crystal output file flagged an unhandled error') + 413, 'BASIS_SET_LINEARLY_DEPENDENT', message='an error encountered usually during geometry optimisation') + spec.exit_code(414, 'ERROR_SCF_ABNORMAL_END', message='an error was encountered during an SCF computation') + spec.exit_code(415, 'ERROR_MPI_ABORT', message='an unknown error was encountered, causing the MPI to abort') + spec.exit_code(499, 'ERROR_CRYSTAL_RUN', message='The main crystal output file flagged an unhandled error') # errors in symmetry node consistency checks - spec.exit_code( - 510, 'ERROR_SYMMETRY_INCONSISTENCY', - message=('inconsistency in the input and output symmetry')) - spec.exit_code( - 520, 'ERROR_SYMMETRY_NOT_FOUND', - message=('primitive symmops were not found in the output file')) - - spec.output(cls.link_output_results, - valid_type=DataFactory('dict'), - required=True, - help='the data extracted from the main output file') + spec.exit_code(510, 'ERROR_SYMMETRY_INCONSISTENCY', message=('inconsistency in the input and output symmetry')) + spec.exit_code(520, 'ERROR_SYMMETRY_NOT_FOUND', message=('primitive symmops were not found in the output file')) + + spec.output( + cls.link_output_results, + valid_type=DataFactory('dict'), + required=True, + help='the data extracted from the main output file') spec.default_output_node = cls.link_output_results - spec.output(cls.link_output_structure, - valid_type=DataFactory('structure'), - required=False, - help='the structure output from the calculation') - spec.output(cls.link_output_symmetry, - valid_type=DataFactory('crystal17.symmetry'), - required=False, - help='the symmetry data from the calculation') - - def create_calc_info( - self, tempfolder, - local_copy_list=None, remote_copy_list=None, remote_symlink_list=None, - retrieve_list=None, retrieve_temporary_list=None): + spec.output( + cls.link_output_structure, + valid_type=DataFactory('structure'), + required=False, + help='the structure output from the calculation') + spec.output( + cls.link_output_symmetry, + valid_type=DataFactory('crystal17.symmetry'), + required=False, + help='the symmetry data from the calculation') + + def create_calc_info(self, + tempfolder, + local_copy_list=None, + remote_copy_list=None, + remote_symlink_list=None, + retrieve_list=None, + retrieve_temporary_list=None): """Prepare CalcInfo object for aiida, to describe how the computation will be executed and recovered """ @@ -130,11 +129,10 @@ def create_calc_info( if self.metadata.options.withmpi: # parallel versions of crystal (Pcrystal, Pproperties & MPPcrystal) # read data specifically from a file called INPUT - if self.metadata.options.input_file_name != "INPUT": + if self.metadata.options.input_file_name != 'INPUT': tempfolder.insert_path( - os.path.join(tempfolder.abspath, - self.metadata.options.input_file_name), - dest_name="INPUT", + os.path.join(tempfolder.abspath, self.metadata.options.input_file_name), + dest_name='INPUT', ) else: codeinfo.stdin_name = self.metadata.options.input_file_name diff --git a/aiida_crystal17/calculations/cry_basic.py b/aiida_crystal17/calculations/cry_basic.py index 7639da2..882364b 100644 --- a/aiida_crystal17/calculations/cry_basic.py +++ b/aiida_crystal17/calculations/cry_basic.py @@ -1,3 +1,18 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2019 Chris Sewell +# +# This file is part of aiida-crystal17. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms and conditions +# of version 3 of the GNU Lesser General Public License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. """ Plugin to create a CRYSTAL17 output file from a supplied input file. """ @@ -12,22 +27,21 @@ class CryBasicCalculation(CryAbstractCalculation): AiiDA calculation plugin to run the runcry17 executable, by supplying a normal .d12 input file and (optional) .gui file """ + @classmethod def define(cls, spec): super(CryBasicCalculation, cls).define(spec) - spec.input('metadata.options.external_file_name', - valid_type=six.string_types, default='fort.34') + spec.input('metadata.options.external_file_name', valid_type=six.string_types, default='fort.34') # TODO this has to be fort.34 for crystal exec (but not for parser), # so maybe should be fixed spec.input( - 'input_file', valid_type=DataFactory('singlefile'), - required=True, - help='the input .d12 file content.') + 'input_file', valid_type=DataFactory('singlefile'), required=True, help='the input .d12 file content.') spec.input( - 'input_external', valid_type=DataFactory('singlefile'), + 'input_external', + valid_type=DataFactory('singlefile'), required=False, help=('optional input fort.34 (gui) file content ' '(for use with EXTERNAL keyword).')) @@ -41,20 +55,16 @@ def prepare_for_submission(self, tempfolder): where the plugin should put all its files. """ # pylint: disable=too-many-locals,too-many-statements,too-many-branches - local_copy_list = [ - [self.inputs.input_file.uuid, - self.inputs.input_file.filename, - self.metadata.options.input_file_name]] - if "input_external" in self.inputs: + local_copy_list = [[ + self.inputs.input_file.uuid, self.inputs.input_file.filename, self.metadata.options.input_file_name + ]] + if 'input_external' in self.inputs: local_copy_list.append([ - self.inputs.input_external.uuid, - self.inputs.input_external.filename, - self.metadata.options.external_file_name]) + self.inputs.input_external.uuid, self.inputs.input_external.filename, + self.metadata.options.external_file_name + ]) return self.create_calc_info( tempfolder, local_copy_list=local_copy_list, - retrieve_list=[ - self.metadata.options.output_main_file_name, - self.metadata.options.external_file_name] - ) + retrieve_list=[self.metadata.options.output_main_file_name, self.metadata.options.external_file_name]) diff --git a/aiida_crystal17/calculations/cry_doss.py b/aiida_crystal17/calculations/cry_doss.py index d3eb1d5..b29b808 100644 --- a/aiida_crystal17/calculations/cry_doss.py +++ b/aiida_crystal17/calculations/cry_doss.py @@ -1,3 +1,18 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2019 Chris Sewell +# +# This file is part of aiida-crystal17. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms and conditions +# of version 3 of the GNU Lesser General Public License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. import os import six diff --git a/aiida_crystal17/calculations/cry_fermi.py b/aiida_crystal17/calculations/cry_fermi.py index 2a13178..d61614c 100644 --- a/aiida_crystal17/calculations/cry_fermi.py +++ b/aiida_crystal17/calculations/cry_fermi.py @@ -1,3 +1,18 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2019 Chris Sewell +# +# This file is part of aiida-crystal17. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms and conditions +# of version 3 of the GNU Lesser General Public License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. import os import six @@ -9,7 +24,7 @@ def _validate_shrink(int_data): if not int_data.value > 0: - raise InputValidationError("kpoint must be > 0") + raise InputValidationError('kpoint must be > 0') class CryFermiCalculation(CryAbstractCalculation): @@ -17,38 +32,27 @@ class CryFermiCalculation(CryAbstractCalculation): AiiDA calculation plugin to run the runprop17 executable, for NEWK calculations (to return the fermi energy) """ + @classmethod def define(cls, spec): super(CryFermiCalculation, cls).define(spec) - spec.input('metadata.options.input_wf_name', - valid_type=six.string_types, default='fort.9') - spec.input('metadata.options.symlink_wf', - valid_type=bool, default=False) + spec.input('metadata.options.input_wf_name', valid_type=six.string_types, default='fort.9') + spec.input('metadata.options.symlink_wf', valid_type=bool, default=False) - spec.input('metadata.options.parser_name', - valid_type=six.string_types, default='crystal17.fermi') + spec.input('metadata.options.parser_name', valid_type=six.string_types, default='crystal17.fermi') + spec.input('shrink_is', valid_type=Int, required=True, validator=_validate_shrink) + spec.input('shrink_isp', valid_type=Int, required=True, validator=_validate_shrink) spec.input( - 'shrink_is', valid_type=Int, - required=True, validator=_validate_shrink) - spec.input( - 'shrink_isp', valid_type=Int, - required=True, validator=_validate_shrink) - spec.input( - 'wf_folder', valid_type=RemoteData, + 'wf_folder', + valid_type=RemoteData, required=True, help='the folder containing the wavefunction fort.9 file') - spec.output("fermi_energy", - valid_type=Float, - required=True, - help='The fermi energy (in eV)') - spec.output("results", - valid_type=Dict, - required=True, - help='result from the parser') - spec.default_output_node = "results" + spec.output('fermi_energy', valid_type=Float, required=True, help='The fermi energy (in eV)') + spec.output('results', valid_type=Dict, required=True, help='result from the parser') + spec.default_output_node = 'results' def prepare_for_submission(self, tempfolder): """ @@ -59,29 +63,17 @@ def prepare_for_submission(self, tempfolder): where the plugin should put all its files. """ - input_lines = [ - "NEWK", - "{} {}".format(self.inputs.shrink_is.value, - self.inputs.shrink_isp.value), - "1 0", - "END" - ] + input_lines = ['NEWK', '{} {}'.format(self.inputs.shrink_is.value, self.inputs.shrink_isp.value), '1 0', 'END'] with tempfolder.open(self.metadata.options.input_file_name, 'w') as f: - f.write(six.ensure_text("\n".join(input_lines))) + f.write(six.ensure_text('\n'.join(input_lines))) - remote_files = [( - self.inputs.wf_folder.computer.uuid, - os.path.join(self.inputs.wf_folder.get_remote_path(), - self.metadata.options.input_wf_name), - 'fort.9' - )] + remote_files = [(self.inputs.wf_folder.computer.uuid, + os.path.join(self.inputs.wf_folder.get_remote_path(), self.metadata.options.input_wf_name), + 'fort.9')] return self.create_calc_info( tempfolder, remote_copy_list=remote_files if not self.metadata.options.symlink_wf else None, remote_symlink_list=remote_files if self.metadata.options.symlink_wf else None, - retrieve_list=[ - self.metadata.options.output_main_file_name - ] - ) + retrieve_list=[self.metadata.options.output_main_file_name]) diff --git a/aiida_crystal17/calculations/cry_main.py b/aiida_crystal17/calculations/cry_main.py index dda4770..1c283ed 100644 --- a/aiida_crystal17/calculations/cry_main.py +++ b/aiida_crystal17/calculations/cry_main.py @@ -1,3 +1,18 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2019 Chris Sewell +# +# This file is part of aiida-crystal17. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms and conditions +# of version 3 of the GNU Lesser General Public License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. """ Plugin to create a CRYSTAL17 output file, from input files created via data nodes @@ -10,9 +25,8 @@ from aiida.plugins import DataFactory from aiida_crystal17.calculations.cry_abstract import CryAbstractCalculation -from aiida_crystal17.parsers.raw.gui_parse import gui_file_write -from aiida_crystal17.parsers.raw.inputd12_write import ( - write_input, create_atom_properties) +from aiida_crystal17.parsers.raw.parse_fort34 import gui_file_write +from aiida_crystal17.parsers.raw.inputd12_write import (write_input, create_atom_properties) class CryMainCalculation(CryAbstractCalculation): @@ -21,41 +35,49 @@ class CryMainCalculation(CryAbstractCalculation): by supplying aiida nodes, with data sufficient to create the .d12 input file and .gui file """ + @classmethod def define(cls, spec): super(CryMainCalculation, cls).define(spec) spec.input( - 'parameters', valid_type=DataFactory('crystal17.parameters'), + 'parameters', + valid_type=DataFactory('crystal17.parameters'), required=True, serializer=lambda x: DataFactory('crystal17.parameters')(data=x), help='the input parameters to create the .d12 file content.') spec.input( - 'structure', valid_type=StructureData, + 'structure', + valid_type=StructureData, required=True, help='structure used to construct the input fort.34 (gui) file') spec.input( - 'symmetry', valid_type=DataFactory('crystal17.symmetry'), + 'symmetry', + valid_type=DataFactory('crystal17.symmetry'), required=False, help=('the symmetry of the structure, ' 'used to construct the input .gui file (fort.34)')) spec.input( - 'kinds', valid_type=DataFactory('crystal17.kinds'), + 'kinds', + valid_type=DataFactory('crystal17.kinds'), required=False, help=('additional structure kind specific data ' '(e.g. initial spin)')) spec.input_namespace( 'basissets', - valid_type=DataFactory('crystal17.basisset'), dynamic=True, - help=("Use a node for the basis set of one of " - "the elements in the structure. You have to pass " + valid_type=DataFactory('crystal17.basisset'), + dynamic=True, + help=('Use a node for the basis set of one of ' + 'the elements in the structure. You have to pass ' "an additional parameter ('element') specifying the " - "atomic element symbol for which you want to use this " - "basis set.")) + 'atomic element symbol for which you want to use this ' + 'basis set.')) spec.input( - 'wf_folder', valid_type=RemoteData, required=False, + 'wf_folder', + valid_type=RemoteData, + required=False, help=('An optional working directory, ' 'of a previously completed calculation, ' 'containing a fort.9 wavefunction file to restart from')) @@ -67,14 +89,21 @@ def define(cls, spec): spec.output( 'optimisation', - valid_type=TrajectoryData, required=False, - help="atomic configurations, for each optimisation step") + valid_type=TrajectoryData, + required=False, + help='atomic configurations, for each optimisation step') # pylint: disable=too-many-arguments @classmethod - def create_builder(cls, parameters, structure, bases, - symmetry=None, kinds=None, - code=None, metadata=None, unflatten=False): + def create_builder(cls, + parameters, + structure, + bases, + symmetry=None, + kinds=None, + code=None, + metadata=None, + unflatten=False): """ prepare and validate the inputs to the calculation, and return a builder pre-populated with the calculation inputs @@ -119,20 +148,18 @@ def create_builder(cls, parameters, structure, bases, # validate parameters atom_props = create_atom_properties(structure, kinds) - write_input(parameters.get_dict(), ["test_basis"], atom_props) + write_input(parameters.get_dict(), ['test_basis'], atom_props) # validate basis sets basis_cls = DataFactory('crystal17.basisset') if isinstance(bases, six.string_types): - symbol_to_basis_map = basis_cls.get_basissets_from_structure( - structure, bases, by_kind=False) + symbol_to_basis_map = basis_cls.get_basissets_from_structure(structure, bases, by_kind=False) else: elements_required = set([kind.symbol for kind in structure.kinds]) if set(bases.keys()) != elements_required: - err_msg = ( - "Mismatch between the defined basissets and the list of " - "elements of the structure. Basissets: {}; elements: {}". - format(set(bases.keys()), elements_required)) + err_msg = ('Mismatch between the defined basissets and the list of ' + 'elements of the structure. Basissets: {}; elements: {}'.format( + set(bases.keys()), elements_required)) raise InputValidationError(err_msg) symbol_to_basis_map = bases @@ -152,12 +179,10 @@ def prepare_for_submission(self, tempfolder): # for each symbol present in the `StructureData` symbols = [kind.symbol for kind in self.inputs.structure.kinds] if set(symbols) != set(self.inputs.basissets.keys()): - raise InputValidationError( - 'Mismatch between the defined basissets ' - 'and the list of symbols of the structure.\n' - 'Basissets: {};\nSymbols: {}'.format( - ', '.join(self.inputs.basissets.keys()), - ', '.join(list(symbols)))) + raise InputValidationError('Mismatch between the defined basissets ' + 'and the list of symbols of the structure.\n' + 'Basissets: {};\nSymbols: {}'.format(', '.join(self.inputs.basissets.keys()), + ', '.join(list(symbols)))) # set the initial parameters parameters = self.inputs.parameters.get_dict() @@ -165,38 +190,30 @@ def prepare_for_submission(self, tempfolder): remote_copy_list = [] # deal with scf restarts - if "wf_folder" in self.inputs: + if 'wf_folder' in self.inputs: # TODO it would be good to check if the fort.9 exists and is not empty # (fort.9 is present but empty if crystal is killed by SIGTERM (e.g. when walltime reached)) # but this would involve connecting to the remote computer, which could fail # Ideally would want to use the process exponential backoff & pause functionality - remote_copy_list.append(( - self.inputs.wf_folder.computer.uuid, - os.path.join(self.inputs.wf_folder.get_remote_path(), 'fort.9'), - 'fort.20')) + remote_copy_list.append((self.inputs.wf_folder.computer.uuid, + os.path.join(self.inputs.wf_folder.get_remote_path(), 'fort.9'), 'fort.20')) restart_fnames.append('fort.20') # modify parameters to use restart files parameters = self._modify_parameters(parameters, restart_fnames) # create fort.34 external geometry file and place it in tempfolder - gui_content = gui_file_write(self.inputs.structure, - self.inputs.get("symmetry", None)) - with tempfolder.open("fort.34", 'w') as f: - f.write(six.u("\n".join(gui_content))) + gui_content = gui_file_write(self.inputs.structure, self.inputs.get('symmetry', None)) + with tempfolder.open('fort.34', 'w') as f: + f.write(six.u('\n'.join(gui_content))) # create .d12 input file and place it in tempfolder - atom_props = create_atom_properties( - self.inputs.structure, self.inputs.get("kinds", None)) + atom_props = create_atom_properties(self.inputs.structure, self.inputs.get('kinds', None)) try: d12_filecontent = write_input( - parameters, - [self.inputs.basissets[k] for k in sorted(self.inputs.basissets.keys())], - atom_props) + parameters, [self.inputs.basissets[k] for k in sorted(self.inputs.basissets.keys())], atom_props) except (ValueError, NotImplementedError) as err: - raise InputValidationError( - "an input file could not be created from the parameters: {}". - format(err)) + raise InputValidationError('an input file could not be created from the parameters: {}'.format(err)) with tempfolder.open(self.metadata.options.input_file_name, 'w') as f: f.write(d12_filecontent) @@ -204,13 +221,8 @@ def prepare_for_submission(self, tempfolder): return self.create_calc_info( tempfolder, remote_copy_list=remote_copy_list, - retrieve_list=[ - self.metadata.options.output_main_file_name, - "fort.34", - "HESSOPT.DAT" - ], - retrieve_temporary_list=["opt[ac][0-9][0-9][0-9]"] - ) + retrieve_list=[self.metadata.options.output_main_file_name, 'fort.34', 'HESSOPT.DAT'], + retrieve_temporary_list=['opt[ac][0-9][0-9][0-9]']) @staticmethod def _modify_parameters(parameters, restart_fnames): @@ -220,20 +232,20 @@ def _modify_parameters(parameters, restart_fnames): if not restart_fnames: return parameters - if "fort.20" in restart_fnames: - parameters["scf"]["GUESSP"] = True + if 'fort.20' in restart_fnames: + parameters['scf']['GUESSP'] = True - if "HESSOPT.DAT" in restart_fnames: - if parameters.get("geometry", {}).get("optimise", False): - if isinstance(parameters["geometry"]["optimise"], bool): - parameters["geometry"]["optimise"] = {} - parameters["geometry"]["optimise"]["hessian"] = "HESSOPT" + if 'HESSOPT.DAT' in restart_fnames: + if parameters.get('geometry', {}).get('optimise', False): + if isinstance(parameters['geometry']['optimise'], bool): + parameters['geometry']['optimise'] = {} + parameters['geometry']['optimise']['hessian'] = 'HESSOPT' - if "OPTINFO.DAT" in restart_fnames: - if parameters.get("geometry", {}).get("optimise", False): - if isinstance(parameters["geometry"]["optimise"], bool): - parameters["geometry"]["optimise"] = {} - parameters["geometry"]["optimise"]["restart"] = True + if 'OPTINFO.DAT' in restart_fnames: + if parameters.get('geometry', {}).get('optimise', False): + if isinstance(parameters['geometry']['optimise'], bool): + parameters['geometry']['optimise'] = {} + parameters['geometry']['optimise']['restart'] = True return parameters @@ -265,8 +277,7 @@ def _check_remote(remote_folder, file_names): trans = remote_folder.get_authinfo().get_transport() with trans: if not trans.isdir(remote_folder.get_remote_path()): - raise IOError( - "the remote_folder's path does not exist on the remote computer") + raise IOError("the remote_folder's path does not exist on the remote computer") trans.chdir(remote_folder.get_remote_path()) remote_fnames = trans.listdir() for file_name in file_names: diff --git a/aiida_crystal17/cmndline/__init__.py b/aiida_crystal17/cmndline/__init__.py index e69de29..10f1044 100644 --- a/aiida_crystal17/cmndline/__init__.py +++ b/aiida_crystal17/cmndline/__init__.py @@ -0,0 +1,15 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2019 Chris Sewell +# +# This file is part of aiida-crystal17. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms and conditions +# of version 3 of the GNU Lesser General Public License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. diff --git a/aiida_crystal17/cmndline/basis_set.py b/aiida_crystal17/cmndline/basis_set.py index 5c8460e..2cfd3fb 100644 --- a/aiida_crystal17/cmndline/basis_set.py +++ b/aiida_crystal17/cmndline/basis_set.py @@ -1,3 +1,18 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2019 Chris Sewell +# +# This file is part of aiida-crystal17. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms and conditions +# of version 3 of the GNU Lesser General Public License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. import click import tabulate from click_spinner import spinner as cli_spinner @@ -15,17 +30,14 @@ def basisset(): @basisset.command() -@click.argument( - 'node', - type=types.DataParamType(sub_classes=('aiida.data:crystal17.basisset',))) -@click.option( - '--content', '-c', is_flag=True, help="include full basis content") +@click.argument('node', type=types.DataParamType(sub_classes=('aiida.data:crystal17.basisset',))) +@click.option('--content', '-c', is_flag=True, help='include full basis content') @decorators.with_dbenv() def show(node, content): """show the contents of a basis set node""" edict.pprint(node.metadata, depth=None, print_func=click.echo) if content: - click.echo("---") + click.echo('---') click.echo(node.content) @@ -43,22 +55,17 @@ def try_grab_description(ctx, param, value): if group_name in existing_group_names: return basis_data_cls.get_basis_group(group_name).description else: - raise click.MissingParameter( - 'A new group must be given a description.', param=param) + raise click.MissingParameter('A new group must be given a description.', param=param) return value # pylint: disable=too-many-arguments @basisset.command() @options.PATH(help='Path to a folder containing the Basis Set files') -@click.option('--ext', default="basis", help="the file extension to filter by") +@click.option('--ext', default='basis', help='the file extension to filter by') @options.FAMILY_NAME() -@options.DESCRIPTION( - help='A description for the family', callback=try_grab_description) -@click.option( - '--stop-if-existing', - is_flag=True, - help='Abort when encountering a previously uploaded Basis Set file') +@options.DESCRIPTION(help='A description for the family', callback=try_grab_description) +@click.option('--stop-if-existing', is_flag=True, help='Abort when encountering a previously uploaded Basis Set file') @options.DRY_RUN() @decorators.with_dbenv() def uploadfamily(path, ext, name, description, stop_if_existing, dry_run): @@ -67,26 +74,17 @@ def uploadfamily(path, ext, name, description, stop_if_existing, dry_run): basis_data_cls = DataFactory('crystal17.basisset') with cli_spinner(): nfiles, num_uploaded = basis_data_cls.upload_basisset_family( - path, - name, - description, - stop_if_existing=stop_if_existing, - extension=".{}".format(ext), - dry_run=dry_run) + path, name, description, stop_if_existing=stop_if_existing, extension='.{}'.format(ext), dry_run=dry_run) - click.echo( - "Basis Set files found and added to family: {}, of those {} " - "were newly uploaded".format(nfiles, num_uploaded)) + click.echo('Basis Set files found and added to family: {}, of those {} ' + 'were newly uploaded'.format(nfiles, num_uploaded)) if dry_run: click.echo('No files were uploaded due to --dry-run.') @basisset.command() @click.option( - '-e', - '--element', - multiple=True, - help='Filter for families containing potentials for all given elements.') + '-e', '--element', multiple=True, help='Filter for families containing potentials for all given elements.') @click.option('-d', '--with-description', is_flag=True) @click.option('-p', '--list-pks', is_flag=True) @decorators.with_dbenv() @@ -94,8 +92,7 @@ def listfamilies(element, with_description, list_pks): """List available families of CRYSTAL Basis Set files.""" basis_data_cls = DataFactory('crystal17.basisset') - groups = basis_data_cls.get_basis_groups( - filter_elements=None if not element else element) + groups = basis_data_cls.get_basis_groups(filter_elements=None if not element else element) table = [['Family', 'Num Basis Sets']] if with_description: @@ -107,13 +104,12 @@ def listfamilies(element, with_description, list_pks): if with_description: row.append(group.description) if list_pks: - row.append(",".join([str(n.pk) for n in group.nodes])) + row.append(','.join([str(n.pk) for n in group.nodes])) table.append(row) if len(table) > 1: click.echo(tabulate.tabulate(table, headers='firstrow')) click.echo() elif element: - click.echo( - 'No Basis Set family contains all given elements and symbols.') + click.echo('No Basis Set family contains all given elements and symbols.') else: click.echo('No Basis Set family available.') diff --git a/aiida_crystal17/cmndline/cmd_parser.py b/aiida_crystal17/cmndline/cmd_parser.py index a2aa344..3424681 100644 --- a/aiida_crystal17/cmndline/cmd_parser.py +++ b/aiida_crystal17/cmndline/cmd_parser.py @@ -1,3 +1,18 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2019 Chris Sewell +# +# This file is part of aiida-crystal17. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms and conditions +# of version 3 of the GNU Lesser General Public License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. import io from aiida.cmdline.commands.cmd_verdi import verdi @@ -36,3 +51,17 @@ def stdout(input_file, keys, fmt): if keys is not None: data = {k: v for k, v in data.items() if k in keys} options.echo_dictionary(data, fmt=fmt) + + +@parse.command('doss-f25') +@arguments.INPUT_FILE() +@options.DICT_KEYS() +@options.DICT_FORMAT() +def doss_f25(input_file, keys, fmt): + """Parse an existing fort.25 file, created from a crystal properties DOSS calculation.""" + from aiida_crystal17.parsers.raw.crystal_fort25 import parse_crystal_fort25 + with io.open(input_file) as handle: + data = parse_crystal_fort25(handle.read()) + if keys is not None: + data = {k: v for k, v in data.items() if k in keys} + options.echo_dictionary(data, fmt=fmt) diff --git a/aiida_crystal17/cmndline/options.py b/aiida_crystal17/cmndline/options.py index 04818ec..1ddcbf7 100644 --- a/aiida_crystal17/cmndline/options.py +++ b/aiida_crystal17/cmndline/options.py @@ -1,3 +1,18 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2019 Chris Sewell +# +# This file is part of aiida-crystal17. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms and conditions +# of version 3 of the GNU Lesser General Public License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. """Common click options for verdi commands""" from collections import OrderedDict import json diff --git a/aiida_crystal17/cmndline/symmetry.py b/aiida_crystal17/cmndline/symmetry.py index b25422e..2079b19 100644 --- a/aiida_crystal17/cmndline/symmetry.py +++ b/aiida_crystal17/cmndline/symmetry.py @@ -1,3 +1,18 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2019 Chris Sewell +# +# This file is part of aiida-crystal17. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms and conditions +# of version 3 of the GNU Lesser General Public License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. import click from jsonextended import edict from aiida.orm import load_node @@ -12,8 +27,7 @@ def symmetry(): @symmetry.command() -@click.option( - '--symmetries', '-s', is_flag=True, help="show full symmetry operations") +@click.option('--symmetries', '-s', is_flag=True, help='show full symmetry operations') @click.argument('pk', type=int) @decorators.with_dbenv() def show(pk, symmetries): @@ -21,8 +35,7 @@ def show(pk, symmetries): node = load_node(pk) if not isinstance(node, DataFactory('crystal17.symmetry')): - click.echo( - "The node was not of type 'crystal17.symmetry'", err=True) + click.echo("The node was not of type 'crystal17.symmetry'", err=True) elif symmetries: edict.pprint(node.data, print_func=click.echo, round_floats=5) else: diff --git a/aiida_crystal17/common/__init__.py b/aiida_crystal17/common/__init__.py index 8062dbf..2a6b9a3 100644 --- a/aiida_crystal17/common/__init__.py +++ b/aiida_crystal17/common/__init__.py @@ -1,5 +1,19 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2019 Chris Sewell +# +# This file is part of aiida-crystal17. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms and conditions +# of version 3 of the GNU Lesser General Public License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. """ common functionality """ -from .atoms import ATOMIC_NUM2SYMBOL # noqa: F401 +from .atoms import SYMBOLS # noqa: F401 from .dict_funcs import ( # noqa: F401 - get_keys, flatten_dict, unflatten_dict, display_json, - map_nested_dicts, recursive_round) + get_keys, flatten_dict, unflatten_dict, display_json, map_nested_dicts, recursive_round) diff --git a/aiida_crystal17/common/atoms.py b/aiida_crystal17/common/atoms.py index eaf0be6..3a0aa43 100644 --- a/aiida_crystal17/common/atoms.py +++ b/aiida_crystal17/common/atoms.py @@ -1,4 +1,21 @@ -ATOMIC_NUM2SYMBOL = { +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2019 Chris Sewell +# +# This file is part of aiida-crystal17. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms and conditions +# of version 3 of the GNU Lesser General Public License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. +from collections import OrderedDict + +SYMBOLS = { 1: 'H', 2: 'He', 3: 'Li', @@ -80,3 +97,603 @@ 108: 'Hs', 109: 'Mt' } + +SYMBOLS_R = {v: k for k, v in SYMBOLS.items()} + +NAMES = { + 1: 'Hydrogen', + 2: 'Helium', + 3: 'Lithium', + 4: 'Beryllium', + 5: 'Boron', + 6: 'Carbon', + 7: 'Nitrogen', + 8: 'Oxygen', + 9: 'Fluorine', + 10: 'Neon', + 11: 'Sodium', + 12: 'Magnesium', + 13: 'Aluminum', + 14: 'Silicon', + 15: 'Phosphorus', + 16: 'Sulfur', + 17: 'Chlorine', + 18: 'Argon', + 19: 'Potassium', + 20: 'Calcium', + 21: 'Scandium', + 22: 'Titanium', + 23: 'Vanadium', + 24: 'Chromium', + 25: 'Manganese', + 26: 'Iron', + 27: 'Cobalt', + 28: 'Nickel', + 29: 'Copper', + 30: 'Zinc', + 31: 'Gallium', + 32: 'Germanium', + 33: 'Arsenic', + 34: 'Selenium', + 35: 'Bromine', + 36: 'Krypton', + 37: 'Rubidium', + 38: 'Strontium', + 39: 'Yttrium', + 40: 'Zirconium', + 41: 'Niobium', + 42: 'Molybdenum', + 43: 'Technetium', + 44: 'Ruthenium', + 45: 'Rhodium', + 46: 'Palladium', + 47: 'Silver', + 48: 'Cadmium', + 49: 'Indium', + 50: 'Tin', + 51: 'Antimony', + 52: 'Tellurium', + 53: 'Iodine', + 54: 'Xenon', + 55: 'Cesium', + 56: 'Barium', + 57: 'Lanthanum', + 58: 'Cerium', + 59: 'Praseodymium', + 60: 'Neodymium', + 61: 'Promethium', + 62: 'Samarium', + 63: 'Europium', + 64: 'Gadolinium', + 65: 'Terbium', + 66: 'Dysprosium', + 67: 'Holmium', + 68: 'Erbium', + 69: 'Thulium', + 70: 'Ytterbium', + 71: 'Lutetium', + 72: 'Hafnium', + 73: 'Tantalum', + 74: 'Tungsten', + 75: 'Rhenium', + 76: 'Osmium', + 77: 'Iridium', + 78: 'Platinum', + 79: 'Gold', + 80: 'Mercury', + 81: 'Thallium', + 82: 'Lead', + 83: 'Bismuth', + 84: 'Polonium', + 85: 'Astatine', + 86: 'Radon', + 87: 'Francium', + 88: 'Radium', + 89: 'Actinium', + 90: 'Thorium', + 91: 'Protactinium', + 92: 'Uranium', + 93: 'Neptunium', + 94: 'Plutonium', + 95: 'Americium', + 96: 'Curium', + 97: 'Berkelium', + 98: 'Californium', + 99: 'Einsteinium', + 100: 'Fermium', + 101: 'Mendelevium', + 102: 'Nobelium', + 103: 'Lawrencium', + 104: 'Rutherfordium', + 105: 'Dubnium', + 106: 'Seaborgium', + 107: 'Bohrium', + 108: 'Hassium', + 109: 'Meitnerium', + 110: 'Darmstadtium', + 111: 'Roentgenium', + 112: 'Copernium', + 113: 'Nihonium', + 114: 'Flerovium', + 115: 'Moscovium', + 116: 'Livermorium', + 117: 'Tennessine', + 118: 'Oganesson' +} + +GAUSSIAN_ORBITALS = OrderedDict((('S', 1), ('P', 3), ('SP', 4), ('D', 5), ('F', 7))) # TODO G + +ELECTRON_CONFIGURATIONS = { + 1: { + 'inner': None, + 'outer': (('1s', 1),) + }, + 2: { + 'inner': None, + 'outer': (('1s', 2),) + }, + 3: { + 'inner': 2, + 'outer': (('2s', 1),) + }, + 4: { + 'inner': 2, + 'outer': (('2s', 2),) + }, + 5: { + 'inner': 2, + 'outer': (('2s', 2), ('2p', 1)) + }, + 6: { + 'inner': 2, + 'outer': (('2s', 2), ('2p', 2)) + }, + 7: { + 'inner': 2, + 'outer': (('2s', 2), ('2p', 3)) + }, + 8: { + 'inner': 2, + 'outer': (('2s', 2), ('2p', 4)) + }, + 9: { + 'inner': 2, + 'outer': (('2s', 2), ('2p', 5)) + }, + 10: { + 'inner': 2, + 'outer': (('2s', 2), ('2p', 6)) + }, + 11: { + 'inner': 10, + 'outer': (('3s', 1),) + }, + 12: { + 'inner': 10, + 'outer': (('3s', 2),) + }, + 13: { + 'inner': 10, + 'outer': (('3s', 2), ('3p', 1)) + }, + 14: { + 'inner': 10, + 'outer': (('3s', 2), ('3p', 2)) + }, + 15: { + 'inner': 10, + 'outer': (('3s', 2), ('3p', 3)) + }, + 16: { + 'inner': 10, + 'outer': (('3s', 2), ('3p', 4)) + }, + 17: { + 'inner': 10, + 'outer': (('3s', 2), ('3p', 5)) + }, + 18: { + 'inner': 10, + 'outer': (('3s', 2), ('3p', 6)) + }, + 19: { + 'inner': 18, + 'outer': (('4s', 1),) + }, + 20: { + 'inner': 18, + 'outer': (('4s', 2),) + }, + 21: { + 'inner': 18, + 'outer': (('3d', 1), ('4s', 2)) + }, + 22: { + 'inner': 18, + 'outer': (('3d', 2), ('4s', 2)) + }, + 23: { + 'inner': 18, + 'outer': (('3d', 3), ('4s', 2)) + }, + 24: { + 'inner': 18, + 'outer': (('3d', 5), ('4s', 1)) + }, + 25: { + 'inner': 18, + 'outer': (('3d', 5), ('4s', 2)) + }, + 26: { + 'inner': 18, + 'outer': (('3d', 6), ('4s', 2)) + }, + 27: { + 'inner': 18, + 'outer': (('3d', 7), ('4s', 2)) + }, + 28: { + 'inner': 18, + 'outer': (('3d', 8), ('4s', 2)) + }, + 29: { + 'inner': 18, + 'outer': (('3d', 10), ('4s', 1)) + }, + 30: { + 'inner': 18, + 'outer': (('3d', 10), ('4s', 2)) + }, + 31: { + 'inner': 18, + 'outer': (('3d', 10), ('4s', 2), ('4p', 1)) + }, + 32: { + 'inner': 18, + 'outer': (('3d', 10), ('4s', 2), ('4p', 2)) + }, + 33: { + 'inner': 18, + 'outer': (('3d', 10), ('4s', 2), ('4p', 3)) + }, + 34: { + 'inner': 18, + 'outer': (('3d', 10), ('4s', 2), ('4p', 4)) + }, + 35: { + 'inner': 18, + 'outer': (('3d', 10), ('4s', 2), ('4p', 5)) + }, + 36: { + 'inner': 18, + 'outer': (('3d', 10), ('4s', 2), ('4p', 6)) + }, + 37: { + 'inner': 36, + 'outer': (('5s', 1),) + }, + 38: { + 'inner': 36, + 'outer': (('5s', 2),) + }, + 39: { + 'inner': 36, + 'outer': (('4d', 1), ('5s', 2)) + }, + 40: { + 'inner': 36, + 'outer': (('4d', 2), ('5s', 2)) + }, + 41: { + 'inner': 36, + 'outer': (('4d', 4), ('5s', 1)) + }, + 42: { + 'inner': 36, + 'outer': (('4d', 5), ('5s', 1)) + }, + 43: { + 'inner': 36, + 'outer': (('4d', 5), ('5s', 2)) + }, + 44: { + 'inner': 36, + 'outer': (('4d', 7), ('5s', 1)) + }, + 45: { + 'inner': 36, + 'outer': (('4d', 8), ('5s', 1)) + }, + 46: { + 'inner': 36, + 'outer': (('4d', 10),) + }, + 47: { + 'inner': 36, + 'outer': (('4d', 10), ('5s', 1)) + }, + 48: { + 'inner': 36, + 'outer': (('4d', 10), ('5s', 2)) + }, + 49: { + 'inner': 36, + 'outer': (('4d', 10), ('5s', 2), ('5p', 1)) + }, + 50: { + 'inner': 36, + 'outer': (('4d', 10), ('5s', 2), ('5p', 2)) + }, + 51: { + 'inner': 36, + 'outer': (('4d', 10), ('5s', 2), ('5p', 3)) + }, + 52: { + 'inner': 36, + 'outer': (('4d', 10), ('5s', 2), ('5p', 4)) + }, + 53: { + 'inner': 36, + 'outer': (('4d', 10), ('5s', 2), ('5p', 5)) + }, + 54: { + 'inner': 36, + 'outer': (('4d', 10), ('5s', 2), ('5p', 6)) + }, + 55: { + 'inner': 54, + 'outer': (('6s', 1),) + }, + 56: { + 'inner': 54, + 'outer': (('6s', 2),) + }, + 57: { + 'inner': 54, + 'outer': (('5d', 1), ('6s', 2)) + }, + 58: { + 'inner': 54, + 'outer': (('4f', 1), ('5d', 1), ('6s', 2)) + }, + 59: { + 'inner': 54, + 'outer': (('4f', 3), ('6s', 2)) + }, + 60: { + 'inner': 54, + 'outer': (('4f', 4), ('6s', 2)) + }, + 61: { + 'inner': 54, + 'outer': (('4f', 5), ('6s', 2)) + }, + 62: { + 'inner': 54, + 'outer': (('4f', 6), ('6s', 2)) + }, + 63: { + 'inner': 54, + 'outer': (('4f', 7), ('6s', 2)) + }, + 64: { + 'inner': 54, + 'outer': (('4f', 7), ('5d', 1), ('6s', 2)) + }, + 65: { + 'inner': 54, + 'outer': (('4f', 9), ('6s', 2)) + }, + 66: { + 'inner': 54, + 'outer': (('4f', 10), ('6s', 2)) + }, + 67: { + 'inner': 54, + 'outer': (('4f', 11), ('6s', 2)) + }, + 68: { + 'inner': 54, + 'outer': (('4f', 12), ('6s', 2)) + }, + 69: { + 'inner': 54, + 'outer': (('4f', 13), ('6s', 2)) + }, + 70: { + 'inner': 54, + 'outer': (('4f', 14), ('6s', 2)) + }, + 71: { + 'inner': 54, + 'outer': (('4f', 14), ('5d', 1), ('6s', 2)) + }, + 72: { + 'inner': 54, + 'outer': (('4f', 14), ('5d', 2), ('6s', 2)) + }, + 73: { + 'inner': 54, + 'outer': (('4f', 14), ('5d', 3), ('6s', 2)) + }, + 74: { + 'inner': 54, + 'outer': (('4f', 14), ('5d', 4), ('6s', 2)) + }, + 75: { + 'inner': 54, + 'outer': (('4f', 14), ('5d', 5), ('6s', 2)) + }, + 76: { + 'inner': 54, + 'outer': (('4f', 14), ('5d', 6), ('6s', 2)) + }, + 77: { + 'inner': 54, + 'outer': (('4f', 14), ('5d', 7), ('6s', 2)) + }, + 78: { + 'inner': 54, + 'outer': (('4f', 14), ('5d', 9), ('6s', 1)) + }, + 79: { + 'inner': 54, + 'outer': (('4f', 14), ('5d', 10), ('6s', 1)) + }, + 80: { + 'inner': 54, + 'outer': (('4f', 14), ('5d', 10), ('6s', 2)) + }, + 81: { + 'inner': 54, + 'outer': (('4f', 14), ('5d', 10), ('6s', 2), ('6p', 1)) + }, + 82: { + 'inner': 54, + 'outer': (('4f', 14), ('5d', 10), ('6s', 2), ('6p', 2)) + }, + 83: { + 'inner': 54, + 'outer': (('4f', 14), ('5d', 10), ('6s', 2), ('6p', 3)) + }, + 84: { + 'inner': 54, + 'outer': (('4f', 14), ('5d', 10), ('6s', 2), ('6p', 4)) + }, + 85: { + 'inner': 54, + 'outer': (('4f', 14), ('5d', 10), ('6s', 2), ('6p', 5)) + }, + 86: { + 'inner': 54, + 'outer': (('4f', 14), ('5d', 10), ('6s', 2), ('6p', 6)) + }, + 87: { + 'inner': 86, + 'outer': (('7s', 1),) + }, + 88: { + 'inner': 86, + 'outer': (('7s', 2),) + }, + 89: { + 'inner': 86, + 'outer': (('6d', 1), ('7s', 2)) + }, + 90: { + 'inner': 86, + 'outer': (('6d', 2), ('7s', 2)) + }, + 91: { + 'inner': 86, + 'outer': (('5f', 2), ('6d', 1), ('7s', 2)) + }, + 92: { + 'inner': 86, + 'outer': (('5f', 3), ('6d', 1), ('7s', 2)) + }, + 93: { + 'inner': 86, + 'outer': (('5f', 4), ('6d', 1), ('7s', 2)) + }, + 94: { + 'inner': 86, + 'outer': (('5f', 6), ('7s', 2)) + }, + 95: { + 'inner': 86, + 'outer': (('5f', 7), ('7s', 2)) + }, + 96: { + 'inner': 86, + 'outer': (('5f', 7), ('6d', 1), ('7s', 2)) + }, + 97: { + 'inner': 86, + 'outer': (('5f', 9), ('7s', 2)) + }, + 98: { + 'inner': 86, + 'outer': (('5f', 10), ('7s', 2)) + }, + 99: { + 'inner': 86, + 'outer': (('5f', 11), ('7s', 2)) + }, + 100: { + 'inner': 86, + 'outer': (('5f', 12), ('7s', 2)) + }, + 101: { + 'inner': 86, + 'outer': (('5f', 13), ('7s', 2)) + }, + 102: { + 'inner': 86, + 'outer': (('5f', 14), ('7s', 2)) + }, + 103: { + 'inner': 86, + 'outer': (('5f', 14), ('7s', 2), ('7p', 1)) + }, + 104: { + 'inner': 86, + 'outer': (('5f', 14), ('6d', 2), ('7s', 2)) + }, + 105: { + 'inner': 86, + 'outer': (('5f', 14), ('6d', 3), ('7s', 2)) + }, + 106: { + 'inner': 86, + 'outer': (('5f', 14), ('6d', 4), ('7s', 2)) + }, + 107: { + 'inner': 86, + 'outer': (('5f', 14), ('6d', 5), ('7s', 2)) + }, + 108: { + 'inner': 86, + 'outer': (('5f', 14), ('6d', 6), ('7s', 2)) + }, + 109: { + 'inner': 86, + 'outer': (('5f', 14), ('6d', 7), ('7s', 2)) + }, + 110: { + 'inner': 86, + 'outer': (('5f', 14), ('6d', 9), ('7s', 1)) + }, + 111: { + 'inner': 86, + 'outer': (('5f', 14), ('6d', 10), ('7s', 1)) + }, + 112: { + 'inner': 86, + 'outer': (('5f', 14), ('6d', 10), ('7s', 2)) + }, + 113: { + 'inner': 86, + 'outer': (('5f', 14), ('6d', 10), ('7s', 2), ('7p', 1)) + }, + 114: { + 'inner': 86, + 'outer': (('5f', 14), ('6d', 10), ('7s', 2), ('7p', 2)) + }, + 115: { + 'inner': 86, + 'outer': (('5f', 14), ('6d', 10), ('7s', 2), ('7p', 3)) + }, + 116: { + 'inner': 86, + 'outer': (('5f', 14), ('6d', 10), ('7s', 2), ('7p', 4)) + }, + 117: { + 'inner': 86, + 'outer': (('5f', 14), ('6d', 10), ('7s', 2), ('7p', 5)) + }, + 118: { + 'inner': 86, + 'outer': (('5f', 14), ('6d', 10), ('7s', 2), ('7p', 6)) + } +} diff --git a/aiida_crystal17/common/dict_funcs.py b/aiida_crystal17/common/dict_funcs.py index ddf19a8..a861ec1 100644 --- a/aiida_crystal17/common/dict_funcs.py +++ b/aiida_crystal17/common/dict_funcs.py @@ -1,14 +1,29 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2019 Chris Sewell +# +# This file is part of aiida-crystal17. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms and conditions +# of version 3 of the GNU Lesser General Public License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. from collections import Mapping import json from textwrap import wrap from jsonextended import edict -def unflatten_dict(indict, delimiter="."): +def unflatten_dict(indict, delimiter='.'): return edict.unflatten(indict, key_as_tuple=False, delim=delimiter) -def flatten_dict(indict, delimiter="."): +def flatten_dict(indict, delimiter='.'): return edict.flatten(indict, key_as_tuple=False, sep=delimiter) @@ -27,8 +42,7 @@ def get_keys(dct, keys, default=None, raise_error=False): subdct = subdct[key] except (KeyError, IndexError): if raise_error: - raise ValueError("could not find key path: {}".format( - keys[0:i + 1])) + raise ValueError('could not find key path: {}'.format(keys[0:i + 1])) else: return default return subdct @@ -37,8 +51,7 @@ def get_keys(dct, keys, default=None, raise_error=False): def map_nested_dicts(ob, func, apply_lists=False): """ map a function on to all values of a nested dictionary """ if isinstance(ob, Mapping): - return {k: map_nested_dicts(v, func, apply_lists) - for k, v in ob.items()} + return {k: map_nested_dicts(v, func, apply_lists) for k, v in ob.items()} elif apply_lists and isinstance(ob, (list, tuple)): return [map_nested_dicts(v, func, apply_lists) for v in ob] else: @@ -57,6 +70,7 @@ def _round(value): class BuilderEncoder(json.JSONEncoder): + def default(self, obj): try: return dict(obj) @@ -68,6 +82,4 @@ def default(self, obj): def display_json(builder, indent=2): """ pretty print a dictionary object in a Jupyter Notebook """ from IPython.display import display_markdown - return display_markdown( - "```json\n{}\n```".format( - json.dumps(builder, cls=BuilderEncoder, indent=indent)), raw=True) + return display_markdown('```json\n{}\n```'.format(json.dumps(builder, cls=BuilderEncoder, indent=indent)), raw=True) diff --git a/aiida_crystal17/common/kpoints.py b/aiida_crystal17/common/kpoints.py index bdb7c99..4304005 100644 --- a/aiida_crystal17/common/kpoints.py +++ b/aiida_crystal17/common/kpoints.py @@ -1,3 +1,20 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2019 Chris Sewell +# +# This file is part of aiida-crystal17. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms and conditions +# of version 3 of the GNU Lesser General Public License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. + + def create_kpoints_from_distance(structure, distance, force_parity=True): """ Generate a uniformly spaced kpoint mesh for a given structure where the spacing between kpoints in reciprocal diff --git a/aiida_crystal17/common/mapping.py b/aiida_crystal17/common/mapping.py index e5aebec..ead6543 100644 --- a/aiida_crystal17/common/mapping.py +++ b/aiida_crystal17/common/mapping.py @@ -1,4 +1,18 @@ +#!/usr/bin/env python # -*- coding: utf-8 -*- +# +# Copyright 2019 Chris Sewell +# +# This file is part of aiida-crystal17. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms and conditions +# of version 3 of the GNU Lesser General Public License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. from __future__ import absolute_import from collections import Mapping @@ -42,11 +56,12 @@ def update_mapping(original, source): source = source.get_dict() for key, value in source.items(): - if ( - key in original and - (isinstance(value, Mapping) or isinstance(value, Dict)) and - (isinstance(original[key], Mapping) or isinstance(original[key], Dict)) - ): + if key not in original: + original[key] = value + continue + mappable_value = (isinstance(value, Mapping) or isinstance(value, Dict)) + mappable_original = (isinstance(original[key], Mapping) or isinstance(original[key], Dict)) + if mappable_value and mappable_original: original[key] = update_mapping(original[key], value) else: original[key] = value diff --git a/aiida_crystal17/common/parsing.py b/aiida_crystal17/common/parsing.py index 3f1a198..8574236 100644 --- a/aiida_crystal17/common/parsing.py +++ b/aiida_crystal17/common/parsing.py @@ -1,10 +1,25 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2019 Chris Sewell +# +# This file is part of aiida-crystal17. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms and conditions +# of version 3 of the GNU Lesser General Public License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. from decimal import Decimal import re -def convert_units(value, in_units, out_units, standard="codata2014"): +def convert_units(value, in_units, out_units, standard='codata2014'): # TODO use units.yaml - if in_units == "hartree" and out_units == "eV": + if in_units == 'hartree' and out_units == 'eV': return value * 27.21138602 @@ -38,11 +53,7 @@ def split_numbers(string, as_decimal=False): [0.001, -2.0] """ - _match_number = re.compile( - '-?\\ *[0-9]+\\.?[0-9]*(?:[Ee]\\ *[+-]?\\ *[0-9]+)?') - string = string.replace(" .", " 0.") - string = string.replace("-.", "-0.") - return [ - Decimal(s) if as_decimal else float(s) - for s in re.findall(_match_number, string) - ] + _match_number = re.compile('-?\\ *[0-9]+\\.?[0-9]*(?:[Ee]\\ *[+-]?\\ *[0-9]+)?') + string = string.replace(' .', ' 0.') + string = string.replace('-.', '-0.') + return [Decimal(s) if as_decimal else float(s) for s in re.findall(_match_number, string)] diff --git a/aiida_crystal17/data/__init__.py b/aiida_crystal17/data/__init__.py index 592b1f1..089c347 100644 --- a/aiida_crystal17/data/__init__.py +++ b/aiida_crystal17/data/__init__.py @@ -1,4 +1,18 @@ +#!/usr/bin/env python # -*- coding: utf-8 -*- +# +# Copyright 2019 Chris Sewell +# +# This file is part of aiida-crystal17. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms and conditions +# of version 3 of the GNU Lesser General Public License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. """ Data types provided by plugin diff --git a/aiida_crystal17/data/basis_set.py b/aiida_crystal17/data/basis_set.py index b73f542..224604b 100644 --- a/aiida_crystal17/data/basis_set.py +++ b/aiida_crystal17/data/basis_set.py @@ -1,3 +1,18 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2019 Chris Sewell +# +# This file is part of aiida-crystal17. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms and conditions +# of version 3 of the GNU Lesser General Public License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. """ a data type to store CRYSTAL17 basis sets """ from __future__ import absolute_import @@ -11,8 +26,9 @@ from ruamel.yaml import YAML from aiida.common.utils import classproperty from aiida.orm import Data, Str -from aiida_crystal17.common import ( - flatten_dict, unflatten_dict, ATOMIC_NUM2SYMBOL) +from aiida_crystal17.common import flatten_dict, unflatten_dict +from aiida_crystal17.common.atoms import SYMBOLS_R +from aiida_crystal17.parsers.raw.parse_bases import parse_bsets_stdin BASISGROUP_TYPE = 'crystal17.basisset' @@ -38,122 +54,21 @@ def _retrieve_basis_sets(files, stop_if_existing): if existing_basis is None: # return the basis set data instances, not stored - basisset, created = BasisSetData.get_or_create( - f, use_first=True, store_basis=False) + basisset, created = BasisSetData.get_or_create(f, use_first=True, store_basis=False) # to check whether only one basis set per element exists # NOTE: actually, created has the meaning of "to_be_created" basis_and_created.append((basisset, created)) else: if stop_if_existing: - raise ValueError("A Basis Set with identical MD5 to " - " {} cannot be added with stop_if_existing" - "".format(f)) + raise ValueError('A Basis Set with identical MD5 to ' + ' {} cannot be added with stop_if_existing' + ''.format(f)) existing_basis = existing_basis[0] basis_and_created.append((existing_basis, False)) return basis_and_created -def _parse_first_line(line, fname): - """ parse the first line of the basis set - - :param line: the line string - :param fname: the filename string - :return: (atomic_number, basis_type, num_shells) - """ - from aiida.common.exceptions import ParsingError - - # first line should contain the atomic number as the first argument - first_line = line.strip().split() - if not len(first_line) == 2: - raise ParsingError( - "The first line should contain only two fields: '{}' for file {}". - format(line, fname)) - - atomic_number_str = first_line[0] - - if not atomic_number_str.isdigit(): - raise ParsingError( - "The first field should be the atomic number '{}' for file {}". - format(line, fname)) - anumber = int(atomic_number_str) - atomic_number = None - basis_type = None - if anumber < 99: - atomic_number = anumber - basis_type = "all-electron" - - elif 200 < anumber < 999: - raise NotImplementedError( - "valence electron basis sets not currently supported") - # TODO support valence electron basis sets not currently supported - # (ECP must also be defined) - # atomic_number = anumber % 100 - # basis_type = "valence-electron" - - elif anumber > 1000: - atomic_number = anumber % 100 - basis_type = "all-electron" - - if atomic_number is None: - raise ParsingError("Illegal atomic number {} for file {}".format( - anumber, fname)) - - num_shells_str = first_line[1] - if not num_shells_str.isdigit(): - raise ParsingError( - "The second field should be the number of shells {} for file {}". - format(line, fname)) - num_shells = int(num_shells_str) - - # we would deal with different numbering at .d12 creation time - newline = "{0} {1}\n".format( - atomic_number if basis_type == "all-electron" else 200 + atomic_number, - num_shells) - - return atomic_number, basis_type, num_shells, newline - - -def validate_basis_string(instr): - """ validate that only one basis set is present, - in a recognised format - - :param instr: content of basis set - :return: passed - """ - lines = instr.strip().splitlines() - indx = 0 - - try: - anum, nshells = lines[indx].strip().split( - ) # pylint: disable=unused-variable - anum, nshells = int(anum), int(nshells) - except ValueError: - raise ValueError("expected 'anum nshells': {}".format( - lines[indx].strip())) - for i in range(nshells): - indx += 1 - try: - btype, stype, nfuncs, charge, scale = lines[indx].strip().split() - btype, stype, nfuncs = [int(i) for i in [btype, stype, nfuncs]] - charge, scale = [float(i) for i in [charge, scale] - ] # pylint: disable=unused-variable - except ValueError: - raise ValueError( - "expected 'btype, stype, nfuncs, charge, scale': {}".format( - lines[indx].strip())) - if btype == 0: - for _ in range(nfuncs): - indx += 1 - - if len(lines) > indx + 1: - raise ValueError( - "the basis set string contains more than one basis set " - "or has trailing empty lines:\n{}".format(instr)) - - return True - - def parse_basis(fname): """get relevant information from the basis file @@ -185,9 +100,7 @@ def parse_basis(fname): in_yaml = False yaml_lines = [] - protected_keys = [ - "atomic_number", "num_shells", "element", "basis_type", "content" - ] + protected_keys = ['atomic_number', 'num_shells', 'element', 'basis_type', 'content'] parsing_data = False content = [] @@ -196,28 +109,25 @@ def parse_basis(fname): fname = fname.name except AttributeError: with io.open(fname, encoding='utf8') as f: - contentlines = f.readlines() + contentlines = f.read().splitlines() for line in contentlines: # ignore commented and blank lines - if line.strip().startswith("#") or not line.strip(): + if line.strip().startswith('#') or not line.strip(): continue - if line.strip() == "---" and not parsing_data: + if line.strip() == '---' and not parsing_data: if not in_yaml: in_yaml = True continue else: yaml = YAML(typ='safe') - head_data = yaml.load("".join(yaml_lines)) + head_data = yaml.load('\n'.join(yaml_lines)) head_data = {} if not head_data else head_data if not isinstance(head_data, dict): - raise ParsingError( - "the header data could not be read for file: {}". - format(fname)) + raise ParsingError('the header data could not be read for file: {}'.format(fname)) if set(head_data.keys()).intersection(protected_keys): - raise ParsingError( - "the header data contained a forbidden key(s) " - "{} for file: {}".format(protected_keys, fname)) + raise ParsingError('the header data contained a forbidden key(s) ' + '{} for file: {}'.format(protected_keys, fname)) meta_data = head_data in_yaml = False parsing_data = True @@ -228,24 +138,29 @@ def parse_basis(fname): parsing_data = True - if not content: - (atomic_number, basis_type, - num_shells, line) = _parse_first_line(line, fname) + content.append(line.strip()) - meta_data["atomic_number"] = atomic_number - meta_data["element"] = ATOMIC_NUM2SYMBOL[atomic_number] - meta_data["basis_type"] = basis_type - meta_data["num_shells"] = num_shells + data = parse_bsets_stdin('\n'.join(content), isolated=True) + if len(data) > 1: + raise ParsingError('the basis set string contains more than one basis set: {}'.format(list(data.keys()))) + atomic_symbol = list(data.keys())[0] - content.append(line) + meta_data['atomic_number'] = atomic_number = SYMBOLS_R[atomic_symbol] + meta_data['element'] = atomic_symbol + meta_data['basis_type'] = basis_type = data[atomic_symbol]['type'] + meta_data['num_shells'] = num_shells = len(data[atomic_symbol]['bs']) + meta_data['orbital_types'] = [o['type'] for o in data[atomic_symbol]['bs']] - if not content: + # the input atomic number may be > 100, but we should standardise this in the stored file + first_line = content[0].strip().split() + if len(first_line) != 2 or first_line[1] != str(num_shells): raise ParsingError( - "The basis set file contains no content: {}".format(fname)) + "The first line should contain only the atomic id and num shells ({}): '{}' for file {}".format( + num_shells, line, fname)) + newline = '{0} {1}'.format(atomic_number if basis_type == 'all-electron' else 200 + atomic_number, num_shells) + content[0] = newline - validate_basis_string("".join(content)) - - return meta_data, "".join(content) + return meta_data, '\n'.join(content) def md5_from_string(string, encoding='utf-8'): @@ -312,9 +227,8 @@ def set_file(self, filepath): """ # to keep things simple, # we only allow one file to ever be set for one class instance - if "filename" in list(self.attributes_keys()): - raise ValueError( - "a file has already been set for this BasisSetData instance") + if 'filename' in list(self.attributes_keys()): + raise ValueError('a file has already been set for this BasisSetData instance') metadata, content = parse_basis(filepath) md5sum = md5_from_string(content) @@ -327,12 +241,11 @@ def set_file(self, filepath): # store the rest of the file content as a file in the file repository filename = os.path.basename(filepath) with tempfile.NamedTemporaryFile() as f: - with io.open(f.name, "w", encoding='utf8') as fobj: + with io.open(f.name, 'w', encoding='utf8') as fobj: fobj.writelines(content) super(BasisSetData, self).put_object_from_file( - path=f.name, key=filename, - mode='w', encoding='utf8', force=False) + path=f.name, key=filename, mode='w', encoding='utf8', force=False) self.set_attribute('filename', filename) @@ -381,6 +294,10 @@ def element(self): """return the element symbol associated with the basis set""" return self.get_attribute('element', None) + def get_data(self): + """ return the basis set content, parsed to a JSON format""" + return parse_bsets_stdin(self.content, isolated=True)[self.element] + @classmethod def get_or_create(cls, filepath, use_first=False, store_basis=True): """ @@ -399,9 +316,11 @@ def get_or_create(cls, filepath, use_first=False, store_basis=True): or False if the object was retrieved from the DB. """ if not os.path.isabs(filepath): - raise ValueError("filepath must be an absolute path") + raise ValueError('filepath must be an absolute path') _, content = parse_basis(filepath) + print() + print(content) md5sum = md5_from_string(content) basissets = cls.from_md5(md5sum) @@ -417,10 +336,9 @@ def get_or_create(cls, filepath, use_first=False, store_basis=True): if use_first: return (basissets[0], False) else: - raise ValueError("More than one copy of a basis set " - "with the same MD5 has been found in the " - "DB. pks={}".format(",".join( - [str(i.pk) for i in basissets]))) + raise ValueError('More than one copy of a basis set ' + 'with the same MD5 has been found in the ' + 'DB. pks={}'.format(','.join([str(i.pk) for i in basissets]))) return basissets[0], False def store(self, with_transaction=True, use_cache=None): @@ -447,7 +365,7 @@ def store(self, with_transaction=True, use_cache=None): return self if self.md5sum is None: - raise ValidationError("No valid Basis Set was passed!") + raise ValidationError('No valid Basis Set was passed!') with self.open('r') as handle: metadata, content = parse_basis(handle) @@ -457,8 +375,7 @@ def store(self, with_transaction=True, use_cache=None): self.set_attribute(key, val) self.set_attribute('md5', md5sum) - return super(BasisSetData, self).store( - with_transaction=with_transaction, use_cache=use_cache) + return super(BasisSetData, self).store(with_transaction=with_transaction, use_cache=use_cache) def _validate(self): from aiida.common.exceptions import ValidationError, ParsingError @@ -472,25 +389,21 @@ def _validate(self): objects = self.list_object_names() if [filename] != objects: - raise ValidationError("The list of files in the folder does not " + raise ValidationError('The list of files in the folder does not ' "match the 'filename' attribute. " - "_filename='{}', content: {}".format( - filename, self.list_object_names())) + "_filename='{}', content: {}".format(filename, self.list_object_names())) try: with self.open('r') as handle: metadata, content = parse_basis(handle) - except ParsingError: - raise ValidationError("The file '{}' could not be " - "parsed") + except (ParsingError, IOError, NotImplementedError) as err: + raise ValidationError("The file '{}' could not be " 'parsed: {}'.format(err)) md5 = md5_from_string(content) try: element = metadata['element'] except KeyError: - raise ValidationError( - "No 'element' could be parsed in the " - "BasisSet file") + raise ValidationError("No 'element' could be parsed in the " 'BasisSet file') try: attr_element = self.get_attribute('element') @@ -504,12 +417,10 @@ def _validate(self): if attr_element != element: raise ValidationError("Attribute 'element' says '{}' but '{}' was " - "parsed instead.".format( - attr_element, element)) + 'parsed instead.'.format(attr_element, element)) if attr_md5 != md5: - raise ValidationError("Attribute 'md5' says '{}' but '{}' was " - "parsed instead.".format(attr_md5, md5)) + raise ValidationError("Attribute 'md5' says '{}' but '{}' was " 'parsed instead.'.format(attr_md5, md5)) @classproperty def basisfamily_type_string(cls): @@ -521,10 +432,7 @@ def get_basis_family_names(self): """ from aiida.orm import Group - return [ - _.name for _ in Group.query( - nodes=self, type_string=self.basisfamily_type_string) - ] + return [_.name for _ in Group.query(nodes=self, type_string=self.basisfamily_type_string)] @classmethod def get_basis_group(cls, group_name): @@ -533,8 +441,7 @@ def get_basis_group(cls, group_name): """ from aiida.orm import Group - return Group.objects.get( - label=group_name, type_string=cls.basisfamily_type_string) + return Group.objects.get(label=group_name, type_string=cls.basisfamily_type_string) @classmethod def get_basis_group_map(cls, group_name): @@ -562,9 +469,8 @@ def get_basis_group_map(cls, group_name): for node in family.nodes: if isinstance(node, cls): if node.element in family_bases: - raise MultipleObjectsError( - "More than one BasisSetData for element {} found in " - "family {}".format(node.element, group_name)) + raise MultipleObjectsError('More than one BasisSetData for element {} found in ' + 'family {}'.format(node.element, group_name)) family_bases[node.element] = node return family_bases @@ -592,24 +498,21 @@ def get_basis_groups(cls, filter_elements=None, user=None): query.append(Group, filters=filters, tag='group', project='*') if user is not None: - query.append(User, filters={ - 'email': {'==': user}}, with_group='group') + query.append(User, filters={'email': {'==': user}}, with_group='group') if isinstance(filter_elements, six.string_types): filter_elements = [filter_elements] if filter_elements is not None: # actual_filter_elements = [_ for _ in filter_elements] - query.append(BasisSetData, filters={'attributes.element': { - 'in': filter_elements}}, with_group='group') + query.append(BasisSetData, filters={'attributes.element': {'in': filter_elements}}, with_group='group') query.order_by({Group: {'id': 'asc'}}) query.distinct() return [_[0] for _ in query.all()] @classmethod - def get_basissets_from_structure(cls, structure, family_name, - by_kind=False): + def get_basissets_from_structure(cls, structure, family_name, by_kind=False): """ Given a family name (a BasisSetFamily group in the DB) and an AiiDA structure, return a dictionary associating each element or kind name @@ -629,9 +532,7 @@ def get_basissets_from_structure(cls, structure, family_name, for kind in structure.kinds: symbol = kind.symbol if symbol not in family_bases: - raise NotExistent( - "No BasisSetData for element {} found in family {}".format( - symbol, family_name)) + raise NotExistent('No BasisSetData for element {} found in family {}'.format(symbol, family_name)) if by_kind: basis_list[kind.name] = family_bases[symbol] else: @@ -651,8 +552,7 @@ def get_basissets_by_kind(cls, structure, family_name): from collections import defaultdict # A dict {kind_name: basis_object} - kind_basis_dict = cls.get_basissets_from_structure( - structure, family_name, by_kind=True) + kind_basis_dict = cls.get_basissets_from_structure(structure, family_name, by_kind=True) # We have to group the species by basis, I use the basis PK # basis_dict will just map PK->basis_object @@ -704,10 +604,9 @@ def prepare_and_validate_inputs(cls, structure, basissets=None, basis_family=Non elements_required = set([kind.symbol for kind in structure.kinds]) if set(basissets.keys()) != elements_required: - err_msg = ( - "Mismatch between the defined basissets and the list of " - "elements of the structure. Basissets: {}; elements: {}". - format(set(basissets.keys()), elements_required)) + err_msg = ('Mismatch between the defined basissets and the list of ' + 'elements of the structure. Basissets: {}; elements: {}'.format( + set(basissets.keys()), elements_required)) raise ValueError(err_msg) return basissets @@ -719,7 +618,7 @@ def upload_basisset_family(cls, group_name, group_description, stop_if_existing=True, - extension=".basis", + extension='.basis', dry_run=False): """ Upload a set of Basis Set files in a given group. @@ -740,7 +639,7 @@ def upload_basisset_family(cls, from aiida.common.exceptions import UniquenessError if not os.path.isdir(folder): - raise ValueError("folder must be a directory") + raise ValueError('folder must be a directory') # only files, and only those ending with specified exension; # go to the real file if it is a symlink @@ -756,14 +655,12 @@ def upload_basisset_family(cls, automatic_user = User.objects.get_default() group, group_created = Group.objects.get_or_create( - label=group_name, type_string=BASISGROUP_TYPE, - user=automatic_user) + label=group_name, type_string=BASISGROUP_TYPE, user=automatic_user) if group.user.email != automatic_user.email: - raise UniquenessError( - "There is already a BasisFamily group with name {}" - ", but it belongs to user {}, therefore you " - "cannot modify it".format(group_name, group.user.email)) + raise UniquenessError('There is already a BasisFamily group with name {}' + ', but it belongs to user {}, therefore you ' + 'cannot modify it'.format(group_name, group.user.email)) # Always update description, even if the group already existed group.description = group_description @@ -782,18 +679,14 @@ def upload_basisset_family(cls, continue elements.append((aiida_n.element, aiida_n.md5sum)) - elements = set( - elements) # Discard elements with the same MD5, that would + elements = set(elements) # Discard elements with the same MD5, that would # not be stored twice elements_names = [e[0] for e in elements] if not len(elements_names) == len(set(elements_names)): - duplicates = set( - [x for x in elements_names if elements_names.count(x) > 1]) - duplicates_string = ", ".join(i for i in duplicates) - raise UniquenessError( - ("More than one Basis found for the elements: " - "{}").format(duplicates_string)) + duplicates = set([x for x in elements_names if elements_names.count(x) > 1]) + duplicates_string = ', '.join(i for i in duplicates) + raise UniquenessError(('More than one Basis found for the elements: ' '{}').format(duplicates_string)) # At this point, save the group, if still unstored if group_created and not dry_run: diff --git a/aiida_crystal17/data/input_params.py b/aiida_crystal17/data/input_params.py index 4b6fce4..ede1d62 100644 --- a/aiida_crystal17/data/input_params.py +++ b/aiida_crystal17/data/input_params.py @@ -1,3 +1,18 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2019 Chris Sewell +# +# This file is part of aiida-crystal17. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms and conditions +# of version 3 of the GNU Lesser General Public License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. import copy from aiida.common.utils import classproperty @@ -16,7 +31,7 @@ def data_schema(cls): """ return the data schema, which is loaded from file the first time it is called""" if cls._data_schema is None: - cls._data_schema = load_schema("inputd12.schema.json") + cls._data_schema = load_schema('inputd12.schema.json') return copy.deepcopy(cls._data_schema) @classmethod diff --git a/aiida_crystal17/data/kinds.py b/aiida_crystal17/data/kinds.py index 37622f6..90e7a1e 100644 --- a/aiida_crystal17/data/kinds.py +++ b/aiida_crystal17/data/kinds.py @@ -1,3 +1,18 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2019 Chris Sewell +# +# This file is part of aiida-crystal17. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms and conditions +# of version 3 of the GNU Lesser General Public License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. import copy import jsonschema @@ -12,27 +27,26 @@ class KindData(Data): """stores additional data for StructureData Kinds""" _data_schema = { - "$schema": "http://json-schema.org/draft-07/schema", - "title": "additional kind data", - "type": "object", - "required": [ - "kind_names" - ], - "additionalProperties": False, - "properties": { - "kind_names": { - "type": "array", - "minimum": 1, - "items": {"type": "string"}, - "uniqueItems": True, + '$schema': 'http://json-schema.org/draft-07/schema', + 'title': 'additional kind data', + 'type': 'object', + 'required': ['kind_names'], + 'additionalProperties': False, + 'properties': { + 'kind_names': { + 'type': 'array', + 'minimum': 1, + 'items': { + 'type': 'string' + }, + 'uniqueItems': True, } }, - "patternProperties": { - ".+": { - "type": "array" + 'patternProperties': { + '.+': { + 'type': 'array' } } - } def __init__(self, data=None, **kwargs): @@ -57,12 +71,10 @@ def _validate(self): except SchemeError as err: raise ValidationError(err) - kinds = self.data["kind_names"] + kinds = self.data['kind_names'] for key, value in self.get_dict().items(): if len(value) != len(kinds): - raise ValidationError( - "'{}' array not the same length as 'kind_names'" - "".format(key)) + raise ValidationError("'{}' array not the same length as 'kind_names'" ''.format(key)) def set_data(self, data): """ @@ -78,12 +90,10 @@ def set_data(self, data): except SchemeError as err: raise ValidationError(err) - kinds = data["kind_names"] + kinds = data['kind_names'] for key, value in data.items(): if len(value) != len(kinds): - raise ValidationError( - "'{}' array not the same length as 'kind_names'" - "".format(key)) + raise ValidationError("'{}' array not the same length as 'kind_names'" ''.format(key)) # store all but the symmetry operations as attributes backup_dict = copy.deepcopy(self.get_dict()) @@ -136,7 +146,7 @@ def kind_dict(self): Return an AttributeDict with nested keys . = value """ data = dict(self.attributes) - kind_names = data.pop("kind_names") + kind_names = data.pop('kind_names') dct = {k: {} for k in kind_names} for key, values in data.items(): for kind, value in zip(kind_names, values): @@ -149,7 +159,7 @@ def field_dict(self): Return an AttributeDict with nested keys . = value """ data = dict(self.attributes) - kind_names = data.pop("kind_names") + kind_names = data.pop('kind_names') dct = {} for key, values in data.items(): dct[key] = {} diff --git a/aiida_crystal17/data/symmetry.py b/aiida_crystal17/data/symmetry.py index c3ce4cc..beb34e5 100644 --- a/aiida_crystal17/data/symmetry.py +++ b/aiida_crystal17/data/symmetry.py @@ -1,3 +1,18 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2019 Chris Sewell +# +# This file is part of aiida-crystal17. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms and conditions +# of version 3 of the GNU Lesser General Public License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. import copy import tempfile @@ -22,7 +37,7 @@ class SymmetryData(Data): are stored as attributes in the database """ - _ops_filename = "operations.npy" + _ops_filename = 'operations.npy' _data_schema = None @classproperty @@ -30,7 +45,7 @@ def data_schema(cls): """ return the data schema, which is loaded from file the first time it is called""" if cls._data_schema is None: - cls._data_schema = load_schema("symmetry.schema.json") + cls._data_schema = load_schema('symmetry.schema.json') return copy.deepcopy(cls._data_schema) def __init__(self, **kwargs): @@ -51,7 +66,7 @@ def _validate(self): fname = self._ops_filename if fname not in self.list_object_names(): - raise SchemeError("operations not set") + raise SchemeError('operations not set') validate_against_schema(self.get_dict(), self.data_schema) @@ -72,10 +87,8 @@ def set_data(self, data): try: # Clear existing attributes and set the new dictionary - self._update_attributes( - {k: v - for k, v in data.items() if k != "operations"}) - self.set_attribute("num_symops", len(data["operations"])) + self._update_attributes({k: v for k, v in data.items() if k != 'operations'}) + self.set_attribute('num_symops', len(data['operations'])) except ModificationNotAllowed: # pylint: disable=try-except-raise # I re-raise here to avoid to go in the generic 'except' below that # would raise the same exception again @@ -87,7 +100,7 @@ def set_data(self, data): raise # store the symmetry operations on file - self._set_operations(data["operations"]) + self._set_operations(data['operations']) def _update_attributes(self, data): """ @@ -116,14 +129,12 @@ def _set_operations(self, ops): # Write the numpy array to the repository, # keeping the byte representation - self.put_object_from_filelike(handle, fname, - mode='wb', encoding=None) + self.put_object_from_filelike(handle, fname, mode='wb', encoding=None) def _get_operations(self): filename = self._ops_filename if filename not in self.list_object_names(): - raise KeyError("symmetry operations not set for node pk={}".format( - self.pk)) + raise KeyError('symmetry operations not set for node pk={}'.format(self.pk)) # Open a handle in binary read mode as the arrays are written # as binary files as well @@ -138,37 +149,37 @@ def data(self): Return the data as an AttributeDict """ data = dict(self.attributes) - if "num_symops" in data: - data.pop("num_symops") - data["operations"] = self._get_operations() + if 'num_symops' in data: + data.pop('num_symops') + data['operations'] = self._get_operations() return AttributeDict(data) def get_dict(self): """get dictionary of data""" data = dict(self.attributes) - if "num_symops" in data: - data.pop("num_symops") - data["operations"] = self._get_operations() + if 'num_symops' in data: + data.pop('num_symops') + data['operations'] = self._get_operations() return data def get_description(self): """ return a short string description of the data """ desc = [] - hall_number = self.get_attribute("hall_number", None) - num_symops = self.get_attribute("num_symops", None) + hall_number = self.get_attribute('hall_number', None) + num_symops = self.get_attribute('num_symops', None) if hall_number is not None: - desc.append("hall_number: {}".format(hall_number)) + desc.append('hall_number: {}'.format(hall_number)) if num_symops is not None: - desc.append("symmops: {}".format(num_symops)) - return "\n".join(desc) + desc.append('symmops: {}'.format(num_symops)) + return '\n'.join(desc) @property def num_symops(self): - return self.get_attribute("num_symops", None) + return self.get_attribute('num_symops', None) @property def hall_number(self): - return self.get_attribute("hall_number", None) + return self.get_attribute('hall_number', None) @property def spacegroup_info(self): @@ -177,14 +188,13 @@ def spacegroup_info(self): """ info = spglib.get_spacegroup_type(self.hall_number) if info is None: - raise ValueError("the hall number could not be converted") + raise ValueError('the hall number could not be converted') return AttributeDict(info) def add_path(self, src_abs, dst_path): from aiida.common.exceptions import ModificationNotAllowed - raise ModificationNotAllowed( - "Cannot add files or directories to StructSettingsData object") + raise ModificationNotAllowed('Cannot add files or directories to StructSettingsData object') def compare_operations(self, ops, decimal=5): """compare operations against stored ones @@ -196,14 +206,13 @@ def compare_operations(self, ops, decimal=5): ops_orig = self._get_operations() # create a set for each - ops_orig = set( - [tuple([round(i, decimal) for i in op]) for op in ops_orig]) + ops_orig = set([tuple([round(i, decimal) for i in op]) for op in ops_orig]) ops_new = set([tuple([round(i, decimal) for i in op]) for op in ops]) differences = {} if ops_orig.difference(ops_new): - differences["missing"] = ops_orig.difference(ops_new) + differences['missing'] = ops_orig.difference(ops_new) if ops_new.difference(ops_orig): - differences["additional"] = ops_new.difference(ops_orig) + differences['additional'] = ops_new.difference(ops_orig) return differences diff --git a/aiida_crystal17/gulp/__init__.py b/aiida_crystal17/gulp/__init__.py index e69de29..10f1044 100644 --- a/aiida_crystal17/gulp/__init__.py +++ b/aiida_crystal17/gulp/__init__.py @@ -0,0 +1,15 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2019 Chris Sewell +# +# This file is part of aiida-crystal17. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms and conditions +# of version 3 of the GNU Lesser General Public License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. diff --git a/aiida_crystal17/gulp/calculations/__init__.py b/aiida_crystal17/gulp/calculations/__init__.py index e69de29..10f1044 100644 --- a/aiida_crystal17/gulp/calculations/__init__.py +++ b/aiida_crystal17/gulp/calculations/__init__.py @@ -0,0 +1,15 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2019 Chris Sewell +# +# This file is part of aiida-crystal17. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms and conditions +# of version 3 of the GNU Lesser General Public License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. diff --git a/aiida_crystal17/gulp/calculations/gulp_abstract.py b/aiida_crystal17/gulp/calculations/gulp_abstract.py index de94dfb..f2b04df 100644 --- a/aiida_crystal17/gulp/calculations/gulp_abstract.py +++ b/aiida_crystal17/gulp/calculations/gulp_abstract.py @@ -1,3 +1,18 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2019 Chris Sewell +# +# This file is part of aiida-crystal17. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms and conditions +# of version 3 of the GNU Lesser General Public License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. """ Plugin to run GULP """ @@ -9,7 +24,7 @@ def potential_validator(potential): - assert not potential.has_fitting_flags, "fitting flags should not be set for the potential" + assert not potential.has_fitting_flags, 'fitting flags should not be set for the potential' class GulpAbstractCalculation(CalcJob): @@ -27,25 +42,26 @@ def define(cls, spec): super(GulpAbstractCalculation, cls).define(spec) - spec.input('metadata.options.input_file_name', - valid_type=six.string_types, default='main.gin') - spec.input('metadata.options.output_main_file_name', - valid_type=six.string_types, default='main.gout') - spec.input('metadata.options.output_stderr_file_name', - valid_type=six.string_types, default='main_stderr.txt') + spec.input('metadata.options.input_file_name', valid_type=six.string_types, default='main.gin') + spec.input('metadata.options.output_main_file_name', valid_type=six.string_types, default='main.gout') + spec.input('metadata.options.output_stderr_file_name', valid_type=six.string_types, default='main_stderr.txt') spec.input( - 'structure', valid_type=DataFactory('structure'), + 'structure', + valid_type=DataFactory('structure'), required=True, help=('atomic structure used to create the ' 'geometry section of .gin file content.')) spec.input( - 'potential', valid_type=DataFactory('gulp.potential'), - required=True, validator=potential_validator, + 'potential', + valid_type=DataFactory('gulp.potential'), + required=True, + validator=potential_validator, help=('parameters to create the ' 'potential section of the .gin file content.')) spec.input( - 'parameters', valid_type=DataFactory('dict'), + 'parameters', + valid_type=DataFactory('dict'), required=False, help=('additional input parameters ' 'to create the .gin file content.')) @@ -54,39 +70,36 @@ def define(cls, spec): # Unrecoverable errors: resources like the retrieved folder or its expected contents are missing spec.exit_code( - 200, 'ERROR_NO_RETRIEVED_FOLDER', - message='The retrieved folder data node could not be accessed.') - spec.exit_code( - 210, 'ERROR_OUTPUT_FILE_MISSING', - message='the main output file was not found') + 200, 'ERROR_NO_RETRIEVED_FOLDER', message='The retrieved folder data node could not be accessed.') + spec.exit_code(210, 'ERROR_OUTPUT_FILE_MISSING', message='the main output file was not found') # Unrecoverable errors: required retrieved files could not be read, parsed or are otherwise incomplete spec.exit_code( - 300, 'ERROR_PARSING_STDOUT', - message=('An error was flagged trying to parse the ' - 'main gulp output file')) - spec.exit_code( - 301, 'ERROR_STDOUT_EMPTY', - message=('The stdout file is empty')) + 300, 'ERROR_PARSING_STDOUT', message=('An error was flagged trying to parse the ' + 'main gulp output file')) + spec.exit_code(301, 'ERROR_STDOUT_EMPTY', message=('The stdout file is empty')) # Significant errors but calculation can be used to restart spec.exit_code( - 400, 'ERROR_GULP_UNHANDLED', - message='The main gulp output file flagged an error not handled elsewhere') + 400, 'ERROR_GULP_UNHANDLED', message='The main gulp output file flagged an error not handled elsewhere') spec.exit_code( - 410, 'ERROR_OPTIMISE_UNSUCCESFUL', + 410, + 'ERROR_OPTIMISE_UNSUCCESFUL', message='The main gulp output file did not signal that an expected optimisation completed') spec.exit_code( - 411, 'ERROR_OPTIMISE_MAX_ATTEMPTS', + 411, + 'ERROR_OPTIMISE_MAX_ATTEMPTS', message='The main gulp output file did not signal that an expected optimisation completed') spec.exit_code( - 412, 'ERROR_OPTIMISE_MAX_CALLS', + 412, + 'ERROR_OPTIMISE_MAX_CALLS', message='The main gulp output file did not signal that an expected optimisation completed') - spec.output(cls.link_output_results, - valid_type=DataFactory('dict'), - required=True, - help='the data extracted from the main output file') + spec.output( + cls.link_output_results, + valid_type=DataFactory('dict'), + required=True, + help='the data extracted from the main output file') spec.default_output_node = cls.link_output_results def prepare_for_submission(self, tempfolder): @@ -97,12 +110,8 @@ def prepare_for_submission(self, tempfolder): :param tempfolder: an aiida.common.folders.Folder subclass where the plugin should put all its files. """ - content = self.create_input( - self.inputs.structure, - self.inputs.potential, - self.inputs.get("parameters", None), - self.inputs.get("symmetry", None) - ) + content = self.create_input(self.inputs.structure, self.inputs.potential, self.inputs.get('parameters', None), + self.inputs.get('symmetry', None)) if not isinstance(content, six.text_type): content = six.u(content) @@ -130,15 +139,10 @@ def prepare_for_submission(self, tempfolder): return calcinfo - def create_input(self, - structure, potential, - parameters=None, symmetry=None): + def create_input(self, structure, potential, parameters=None, symmetry=None): """ should return the content for main.gin""" raise NotImplementedError def get_retrieve_list(self): """ should return the files to be retrieved """ - return [ - self.metadata.options.output_main_file_name, - self.metadata.options.output_stderr_file_name - ] + return [self.metadata.options.output_main_file_name, self.metadata.options.output_stderr_file_name] diff --git a/aiida_crystal17/gulp/calculations/gulp_fitting.py b/aiida_crystal17/gulp/calculations/gulp_fitting.py index fab2a97..aa35919 100644 --- a/aiida_crystal17/gulp/calculations/gulp_fitting.py +++ b/aiida_crystal17/gulp/calculations/gulp_fitting.py @@ -1,3 +1,18 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2019 Chris Sewell +# +# This file is part of aiida-crystal17. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms and conditions +# of version 3 of the GNU Lesser General Public License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. """ a calculation plugin to perform fitting of potentials, given a set of structures and observables """ @@ -24,10 +39,10 @@ class GulpFittingCalculation(CalcJob): """ _settings_schema = None _observable_defaults = { - "weighting": 100.0, - "energy_units": "eV", - "energy_units_key": "energy_units", - "energy_key": "energy" + 'weighting': 100.0, + 'energy_units': 'eV', + 'energy_units_key': 'energy_units', + 'energy_key': 'energy' } @classproperty @@ -35,7 +50,7 @@ def settings_schema(cls): """ return the settings schema, which is loaded from file the first time it is called only""" if cls._settings_schema is None: - cls._settings_schema = load_schema("fitting_settings.schema.json") + cls._settings_schema = load_schema('fitting_settings.schema.json') return copy.deepcopy(cls._settings_schema) @classmethod @@ -51,116 +66,99 @@ def validate_settings(cls, dct): @classmethod def validate_potential(cls, potential): - assert potential.has_fitting_flags, "fitting flags should be set for the potential" + assert potential.has_fitting_flags, 'fitting flags should be set for the potential' @classmethod def define(cls, spec): """ define the process specification """ super(GulpFittingCalculation, cls).define(spec) - spec.input('metadata.options.input_file_name', - valid_type=six.string_types, default='main.gin') - spec.input('metadata.options.output_main_file_name', - valid_type=six.string_types, default='main.gout') - spec.input('metadata.options.output_stderr_file_name', - valid_type=six.string_types, default='main_stderr.txt') - spec.input('metadata.options.output_dump_file_name', - valid_type=six.string_types, default='fitting.grs') - spec.input('metadata.options.allow_create_potential_fail', - valid_type=bool, default=False) - spec.input('metadata.options.parser_name', - valid_type=six.string_types, default='gulp.fitting') + spec.input('metadata.options.input_file_name', valid_type=six.string_types, default='main.gin') + spec.input('metadata.options.output_main_file_name', valid_type=six.string_types, default='main.gout') + spec.input('metadata.options.output_stderr_file_name', valid_type=six.string_types, default='main_stderr.txt') + spec.input('metadata.options.output_dump_file_name', valid_type=six.string_types, default='fitting.grs') + spec.input('metadata.options.allow_create_potential_fail', valid_type=bool, default=False) + spec.input('metadata.options.parser_name', valid_type=six.string_types, default='gulp.fitting') spec.input( - "settings", valid_type=Dict, required=True, validator=cls.validate_settings, + 'settings', + valid_type=Dict, + required=True, + validator=cls.validate_settings, serializer=to_aiida_type, - help=("Settings for the fitting, " - "see `GulpFittingCalculation.settings_schema` for the accepted format") - ) + help=('Settings for the fitting, ' + 'see `GulpFittingCalculation.settings_schema` for the accepted format')) spec.input( - "potential", valid_type=DataFactory('gulp.potential'), required=True, - serializer=to_aiida_type, validator=cls.validate_potential, - help=("a dictionary defining the potential. " - "Note this should have been created with fitting flags initialised") - ) + 'potential', + valid_type=DataFactory('gulp.potential'), + required=True, + serializer=to_aiida_type, + validator=cls.validate_potential, + help=('a dictionary defining the potential. ' + 'Note this should have been created with fitting flags initialised')) spec.input_namespace( - "structures", valid_type=StructureData, dynamic=True, - help="a dict of structures to fit the potential to" - ) + 'structures', valid_type=StructureData, dynamic=True, help='a dict of structures to fit the potential to') spec.input_namespace( - "observables", valid_type=Dict, dynamic=True, - help="a dictionary of observables for each structure" - ) + 'observables', valid_type=Dict, dynamic=True, help='a dictionary of observables for each structure') # TODO review aiidateam/aiida_core#2997, when closed, for exit code formalization # Unrecoverable errors: resources like the retrieved folder or its expected contents are missing spec.exit_code( - 200, 'ERROR_NO_RETRIEVED_FOLDER', - message='The retrieved folder data node could not be accessed.') - spec.exit_code( - 210, 'ERROR_OUTPUT_FILE_MISSING', - message='the main (stdout) output file was not found') - spec.exit_code( - 211, 'ERROR_TEMP_FOLDER_MISSING', - message='the temporary retrieved folder was not found') + 200, 'ERROR_NO_RETRIEVED_FOLDER', message='The retrieved folder data node could not be accessed.') + spec.exit_code(210, 'ERROR_OUTPUT_FILE_MISSING', message='the main (stdout) output file was not found') + spec.exit_code(211, 'ERROR_TEMP_FOLDER_MISSING', message='the temporary retrieved folder was not found') # Unrecoverable errors: required retrieved files could not be read, parsed or are otherwise incomplete spec.exit_code( - 300, 'ERROR_PARSING_STDOUT', - message=('An error was flagged trying to parse the ' - 'gulp exec stdout file')) + 300, 'ERROR_PARSING_STDOUT', message=('An error was flagged trying to parse the ' + 'gulp exec stdout file')) + spec.exit_code(301, 'ERROR_STDOUT_EMPTY', message=('The stdout file is empty')) spec.exit_code( - 301, 'ERROR_STDOUT_EMPTY', - message=('The stdout file is empty')) - spec.exit_code( - 310, 'ERROR_NOT_ENOUGH_OBSERVABLES', + 310, + 'ERROR_NOT_ENOUGH_OBSERVABLES', message=('The number of fitting variables exceeds the number of observables')) + spec.exit_code(311, 'ERROR_FIT_UNSUCCESFUL', message=('The fit was not successful')) spec.exit_code( - 311, 'ERROR_FIT_UNSUCCESFUL', - message=('The fit was not successful')) - spec.exit_code( - 312, 'ERROR_GULP_UNKNOWN', + 312, + 'ERROR_GULP_UNKNOWN', message=('An error was flagged by GULP, which is not accounted for in another exit code')) spec.exit_code( - 313, 'ERROR_CREATING_NEW_POTENTIAL', - message=('An error occurred trying to create the new potential')) + 313, 'ERROR_CREATING_NEW_POTENTIAL', message=('An error occurred trying to create the new potential')) # Significant errors but calculation can be used to restart - spec.output( - "results", valid_type=Dict, required=True, - help="the data extracted from the main output file" - ) - spec.default_output_node = "results" + spec.output('results', valid_type=Dict, required=True, help='the data extracted from the main output file') + spec.default_output_node = 'results' spec.output( - "potential", valid_type=DataFactory('gulp.potential'), required=False, - help=("a dictionary defining the fitted potential.") - ) + 'potential', + valid_type=DataFactory('gulp.potential'), + required=False, + help=('a dictionary defining the fitted potential.')) def create_observable_map(self, settings): - observables = settings["observables"] + observables = settings['observables'] observable_map = {} - if "energy" in observables: - units = observables["energy"].get("units", self._observable_defaults["energy_units"]) - units_key = observables["energy"].get("units_key", self._observable_defaults["energy_units_key"]) - energy_key = observables["energy"].get("energy_key", self._observable_defaults["energy_key"]) - weighting = observables["energy"].get("weighting", self._observable_defaults["weighting"]) - if units == "eV": - key = "energy ev" + if 'energy' in observables: + units = observables['energy'].get('units', self._observable_defaults['energy_units']) + units_key = observables['energy'].get('units_key', self._observable_defaults['energy_units_key']) + energy_key = observables['energy'].get('energy_key', self._observable_defaults['energy_key']) + weighting = observables['energy'].get('weighting', self._observable_defaults['weighting']) + if units == 'eV': + key = 'energy ev' else: - key = "energy " + units + key = 'energy ' + units def _get_energy(data): dct = data.get_dict() for key in [units_key, energy_key]: if key not in dct: - raise AssertionError( - "the observable data Pk={0} does not contain a '{1}' key".format(data.id, key)) + raise AssertionError("the observable data Pk={0} does not contain a '{1}' key".format( + data.id, key)) if dct[units_key] != units: # TODO units conversion raise AssertionError("'{}' != {}".format(units_key, units)) @@ -179,35 +177,32 @@ def prepare_for_submission(self, tempfolder): where the plugin should put all its files. """ settings = {} - if "settings" in self.inputs: + if 'settings' in self.inputs: settings = self.inputs.settings.get_dict() # validate that the structures and observables have the same keys struct_keys = set(self.inputs.structures.keys()) observe_keys = set(self.inputs.observables.keys()) if struct_keys != observe_keys: - raise InputValidationError( - "The structures and observables do not match: {} != {}".format(struct_keys, observe_keys)) + raise InputValidationError('The structures and observables do not match: {} != {}'.format( + struct_keys, observe_keys)) # validate number of fitting variables vs number of observables if len(observe_keys) < self.inputs.potential.number_of_variables: - raise InputValidationError( - "The number of observables supplied ({}) " - "is less than the number of variables required to be fit ({})".format( - len(observe_keys), self.inputs.potential.number_of_variables - )) + raise InputValidationError('The number of observables supplied ({}) ' + 'is less than the number of variables required to be fit ({})'.format( + len(observe_keys), self.inputs.potential.number_of_variables)) content_lines, snames = create_input_lines( self.inputs.potential, self.inputs.structures, self.inputs.observables, observables=self.create_observable_map(settings), - delta=settings.get("gradient_delta", None), - dump_file=self.metadata.options.output_dump_file_name - ) + delta=settings.get('gradient_delta', None), + dump_file=self.metadata.options.output_dump_file_name) with tempfolder.open(self.metadata.options.input_file_name, 'w') as f: - f.write(six.ensure_text("\n".join(content_lines))) + f.write(six.ensure_text('\n'.join(content_lines))) with tempfolder.open('structure_names.json', 'w') as handle: handle.write(six.ensure_text(json.dumps(snames))) @@ -229,8 +224,7 @@ def prepare_for_submission(self, tempfolder): calcinfo.local_copy_list = [] calcinfo.remote_copy_list = [] calcinfo.retrieve_list = [ - self.metadata.options.output_main_file_name, - self.metadata.options.output_stderr_file_name, + self.metadata.options.output_main_file_name, self.metadata.options.output_stderr_file_name, self.metadata.options.output_dump_file_name ] calcinfo.retrieve_temporary_list = [] diff --git a/aiida_crystal17/gulp/calculations/gulp_optimize.py b/aiida_crystal17/gulp/calculations/gulp_optimize.py index 835a19d..2cd5827 100644 --- a/aiida_crystal17/gulp/calculations/gulp_optimize.py +++ b/aiida_crystal17/gulp/calculations/gulp_optimize.py @@ -1,3 +1,18 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2019 Chris Sewell +# +# This file is part of aiida-crystal17. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms and conditions +# of version 3 of the GNU Lesser General Public License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. import six from aiida.plugins import DataFactory from aiida_crystal17.gulp.calculations.gulp_abstract import GulpAbstractCalculation @@ -15,51 +30,50 @@ def define(cls, spec): super(GulpOptCalculation, cls).define(spec) - spec.input('metadata.options.parser_name', - valid_type=six.string_types, default='gulp.optimize') + spec.input('metadata.options.parser_name', valid_type=six.string_types, default='gulp.optimize') - spec.input('metadata.options.out_cif_file_name', - valid_type=six.string_types, default='output.cif', - help="name of the cif file to output with final geometry") - spec.input('metadata.options.use_input_kinds', - valid_type=bool, default=True, - help=( - "if True, use the atoms kinds from the input structure, " - "when creating the output structure")) + spec.input( + 'metadata.options.out_cif_file_name', + valid_type=six.string_types, + default='output.cif', + help='name of the cif file to output with final geometry') + spec.input( + 'metadata.options.use_input_kinds', + valid_type=bool, + default=True, + help=('if True, use the atoms kinds from the input structure, ' + 'when creating the output structure')) # spec.input('metadata.options.out_str_file_name', # valid_type=six.string_types, default='output.str', # help="name of the str file (i.e. a CRYSTAL98 .gui file)") spec.input( - 'symmetry', valid_type=DataFactory('crystal17.symmetry'), + 'symmetry', + valid_type=DataFactory('crystal17.symmetry'), required=False, help=('parameters to create the symmetry section of the ' '.gin file content (for constrained optimisation).')) + spec.exit_code(250, 'ERROR_CIF_FILE_MISSING', message='the output cif file was not found') spec.exit_code( - 250, 'ERROR_CIF_FILE_MISSING', - message='the output cif file was not found') - spec.exit_code( - 251, 'ERROR_MISSING_INPUT_STRUCTURE', + 251, + 'ERROR_MISSING_INPUT_STRUCTURE', message='an input structure is required to create the output structure of an optimisation') spec.exit_code( - 252, 'ERROR_CIF_INCONSISTENT', - message='the output cif file was not consistent with the input structure') + 252, 'ERROR_CIF_INCONSISTENT', message='the output cif file was not consistent with the input structure') spec.exit_code( - 253, 'ERROR_STRUCTURE_PARSING', + 253, + 'ERROR_STRUCTURE_PARSING', message='The final structure coordinates were not parsed from the output file') - spec.output(cls.link_output_structure, - valid_type=DataFactory('structure'), - required=True, - help='the optimized structure output from the calculation') + spec.output( + cls.link_output_structure, + valid_type=DataFactory('structure'), + required=True, + help='the optimized structure output from the calculation') - def create_input(self, - structure, potential, - parameters=None, symmetry=None): - input_creation = InputCreationOpt( - outputs={"cif": self.metadata.options.out_cif_file_name} - ) + def create_input(self, structure, potential, parameters=None, symmetry=None): + input_creation = InputCreationOpt(outputs={'cif': self.metadata.options.out_cif_file_name}) # TODO assert potential species contains at least one from structure input_creation.create_content(structure, potential.get_input_lines(), parameters, symmetry) return input_creation.get_content() @@ -67,7 +81,6 @@ def create_input(self, def get_retrieve_list(self): """ should return the files to be retrieved """ return [ - self.metadata.options.output_main_file_name, - self.metadata.options.output_stderr_file_name, + self.metadata.options.output_main_file_name, self.metadata.options.output_stderr_file_name, self.metadata.options.out_cif_file_name ] diff --git a/aiida_crystal17/gulp/calculations/gulp_single.py b/aiida_crystal17/gulp/calculations/gulp_single.py index 7f5d3e5..707471a 100644 --- a/aiida_crystal17/gulp/calculations/gulp_single.py +++ b/aiida_crystal17/gulp/calculations/gulp_single.py @@ -1,3 +1,18 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2019 Chris Sewell +# +# This file is part of aiida-crystal17. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms and conditions +# of version 3 of the GNU Lesser General Public License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. import six from aiida_crystal17.gulp.calculations.gulp_abstract import GulpAbstractCalculation from aiida_crystal17.gulp.parsers.raw.write_input import InputCreationSingle @@ -14,12 +29,9 @@ def define(cls, spec): super(GulpSingleCalculation, cls).define(spec) - spec.input('metadata.options.parser_name', - valid_type=six.string_types, default='gulp.single') + spec.input('metadata.options.parser_name', valid_type=six.string_types, default='gulp.single') - def create_input(self, - structure, potential, - parameters=None, symmetry=None): + def create_input(self, structure, potential, parameters=None, symmetry=None): # TODO assert potential species contains at least one from structure input_creation = InputCreationSingle() input_creation.create_content(structure, potential.get_input_lines(), parameters, symmetry) diff --git a/aiida_crystal17/gulp/cmndline/__init__.py b/aiida_crystal17/gulp/cmndline/__init__.py index e69de29..10f1044 100644 --- a/aiida_crystal17/gulp/cmndline/__init__.py +++ b/aiida_crystal17/gulp/cmndline/__init__.py @@ -0,0 +1,15 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2019 Chris Sewell +# +# This file is part of aiida-crystal17. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms and conditions +# of version 3 of the GNU Lesser General Public License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. diff --git a/aiida_crystal17/gulp/cmndline/potentials.py b/aiida_crystal17/gulp/cmndline/potentials.py index 700edcf..087d78a 100644 --- a/aiida_crystal17/gulp/cmndline/potentials.py +++ b/aiida_crystal17/gulp/cmndline/potentials.py @@ -1,3 +1,18 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2019 Chris Sewell +# +# This file is part of aiida-crystal17. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms and conditions +# of version 3 of the GNU Lesser General Public License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. import click from jsonextended import edict from aiida.cmdline.commands.cmd_verdi import verdi @@ -13,11 +28,10 @@ def potentials(): @potentials.command('list') @click.argument('entry_point', type=click.STRING, required=False) -@click.option('-d', '--depth', 'schema_depth', default=2, - help="nested depth with which to print data schema") +@click.option('-d', '--depth', 'schema_depth', default=2, help='nested depth with which to print data schema') def potential_list(entry_point, schema_depth): """Display a list of all available plugins""" - entry_point_group = "gulp.potentials" + entry_point_group = 'gulp.potentials' if entry_point: try: plugin = load_entry_point(entry_point_group, entry_point) @@ -27,27 +41,21 @@ def potential_list(entry_point, schema_depth): try: echo.echo(str(plugin.get_description()), bold=True) except (AttributeError, TypeError): - echo.echo_error( - 'No description available for {}'.format(entry_point)) + echo.echo_error('No description available for {}'.format(entry_point)) try: schema = plugin.get_schema() - echo.echo("Data Schema:") - edict.pprint(schema, depth=schema_depth, print_func=echo.echo, - keycolor="blue") + echo.echo('Data Schema:') + edict.pprint(schema, depth=schema_depth, print_func=echo.echo, keycolor='blue') except (AttributeError, TypeError): - echo.echo_error( - 'No validation schema available for {}'.format(entry_point)) + echo.echo_error('No validation schema available for {}'.format(entry_point)) else: entry_points = get_entry_point_names(entry_point_group) if entry_points: - echo.echo('Registered entry points for {}:'.format( - entry_point_group)) + echo.echo('Registered entry points for {}:'.format(entry_point_group)) for registered_entry_point in entry_points: echo.echo('* {}'.format(registered_entry_point)) echo.echo('') - echo.echo_info( - 'Pass the entry point as an argument to display detailed information') + echo.echo_info('Pass the entry point as an argument to display detailed information') else: - echo.echo_error( - 'No plugins found for group {}'.format(entry_point_group)) + echo.echo_error('No plugins found for group {}'.format(entry_point_group)) diff --git a/aiida_crystal17/gulp/data/__init__.py b/aiida_crystal17/gulp/data/__init__.py index e69de29..10f1044 100644 --- a/aiida_crystal17/gulp/data/__init__.py +++ b/aiida_crystal17/gulp/data/__init__.py @@ -0,0 +1,15 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2019 Chris Sewell +# +# This file is part of aiida-crystal17. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms and conditions +# of version 3 of the GNU Lesser General Public License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. diff --git a/aiida_crystal17/gulp/data/potential.py b/aiida_crystal17/gulp/data/potential.py index 5919f08..67cc177 100644 --- a/aiida_crystal17/gulp/data/potential.py +++ b/aiida_crystal17/gulp/data/potential.py @@ -1,3 +1,18 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2019 Chris Sewell +# +# This file is part of aiida-crystal17. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms and conditions +# of version 3 of the GNU Lesser General Public License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. import copy from hashlib import md5 import json @@ -46,8 +61,7 @@ def set_data(self, pair_style, potential_data, fitting_data=None, additional_dat potential_writer = self.load_pair_style(pair_style)() description = potential_writer.get_description() - output = potential_writer.create_content( - potential_data, fitting_data=fitting_data) + output = potential_writer.create_content(potential_data, fitting_data=fitting_data) with self.open(self._default_potential_filename, 'w') as handle: handle.write(six.ensure_text(output.content)) @@ -63,7 +77,7 @@ def set_data(self, pair_style, potential_data, fitting_data=None, additional_dat 'pair_style': pair_style, 'description': description, 'species': potential_data['species'], - 'input_lines_md5': md5(output.content.encode("utf-8")).hexdigest(), + 'input_lines_md5': md5(output.content.encode('utf-8')).hexdigest(), 'fitting_flags': fitting_data is not None, 'total_flags': output.number_of_flags, 'number_flagged': output.number_flagged, @@ -72,7 +86,7 @@ def set_data(self, pair_style, potential_data, fitting_data=None, additional_dat 'fitting_json': self._default_fitting_json } if additional_data is not None: - dictionary["additional"] = additional_data + dictionary['additional'] = additional_data dictionary_backup = copy.deepcopy(self.attributes) @@ -106,8 +120,7 @@ def get_potential_dict(self): """ potential_json = self.get_attribute('potential_json') if potential_json not in self.list_object_names(): - raise KeyError("potential dict not set for node pk={}".format( - self.pk)) + raise KeyError('potential dict not set for node pk={}'.format(self.pk)) with self.open(potential_json, mode='r') as handle: data = json.load(handle) @@ -121,8 +134,7 @@ def get_fitting_dict(self): """ fitting_json = self.get_attribute('fitting_json') if fitting_json not in self.list_object_names(): - raise KeyError("fitting dict not set for node pk={}".format( - self.pk)) + raise KeyError('fitting dict not set for node pk={}'.format(self.pk)) with self.open(fitting_json, mode='r') as handle: data = json.load(handle) @@ -155,8 +167,7 @@ def get_description(self): def get_input_lines(self): potential_filename = self.get_attribute('potential_filename') if potential_filename not in self.list_object_names(): - raise KeyError("potential file not set for node pk={}".format( - self.pk)) + raise KeyError('potential file not set for node pk={}'.format(self.pk)) with self.open(potential_filename, mode='r') as handle: lines = handle.read() diff --git a/aiida_crystal17/gulp/parsers/__init__.py b/aiida_crystal17/gulp/parsers/__init__.py index e69de29..10f1044 100644 --- a/aiida_crystal17/gulp/parsers/__init__.py +++ b/aiida_crystal17/gulp/parsers/__init__.py @@ -0,0 +1,15 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2019 Chris Sewell +# +# This file is part of aiida-crystal17. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms and conditions +# of version 3 of the GNU Lesser General Public License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. diff --git a/aiida_crystal17/gulp/parsers/parse_fitting.py b/aiida_crystal17/gulp/parsers/parse_fitting.py index 02014f8..9f95e2d 100644 --- a/aiida_crystal17/gulp/parsers/parse_fitting.py +++ b/aiida_crystal17/gulp/parsers/parse_fitting.py @@ -1,3 +1,18 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2019 Chris Sewell +# +# This file is part of aiida-crystal17. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms and conditions +# of version 3 of the GNU Lesser General Public License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. """ A parser to read output from a standard CRYSTAL17 run """ @@ -27,41 +42,38 @@ def parse(self, **kwargs): except exceptions.NotExistent: return self.exit_codes.ERROR_NO_RETRIEVED_FOLDER - mainout_file = self.node.get_option("output_main_file_name") + mainout_file = self.node.get_option('output_main_file_name') if mainout_file not in output_folder.list_object_names(): return self.exit_codes.ERROR_OUTPUT_FILE_MISSING # parse the main output file and add nodes - self.logger.info("parsing main out file") + self.logger.info('parsing main out file') with output_folder.open(mainout_file) as handle: try: - result_dict, exit_code = parse_file( - handle, parser_class=self.__class__.__name__) + result_dict, exit_code = parse_file(handle, parser_class=self.__class__.__name__) except Exception: traceback.print_exc() return self.exit_codes.ERROR_PARSING_STDOUT - if result_dict["parser_errors"]: - self.logger.warning( - "the parser raised the following errors:\n{}".format( - "\n\t".join(result_dict["parser_errors"]))) - if result_dict["errors"]: - self.logger.warning( - "the calculation raised the following errors:\n{}".format( - "\n\t".join(result_dict["errors"]))) + if result_dict['parser_errors']: + self.logger.warning('the parser raised the following errors:\n{}'.format('\n\t'.join( + result_dict['parser_errors']))) + if result_dict['errors']: + self.logger.warning('the calculation raised the following errors:\n{}'.format('\n\t'.join( + result_dict['errors']))) if 'structure_names.json' in self.node.list_object_names(): - result_dict["config_names"] = json.loads(self.node.get_object_content('structure_names.json')) + result_dict['config_names'] = json.loads(self.node.get_object_content('structure_names.json')) # look a stderr for fortran warnings, etc, e.g. IEEE_INVALID_FLAG IEEE_OVERFLOW_FLAG IEEE_UNDERFLOW_FLAG - stderr_file = self.node.get_option("output_stderr_file_name") + stderr_file = self.node.get_option('output_stderr_file_name') if stderr_file in output_folder.list_object_names(): with output_folder.open(stderr_file) as handle: stderr_content = handle.read() if stderr_content: - self.logger.warning("the calculation stderr file was not empty:") + self.logger.warning('the calculation stderr file was not empty:') self.logger.warning(stderr_content) - result_dict["warnings"].append(stderr_content.strip()) + result_dict['warnings'].append(stderr_content.strip()) exit_code_dump = self.extract_from_dump(output_folder) @@ -80,41 +92,41 @@ def extract_from_dump(self, output_folder): from the corresponding `gulp.potentials` entry point class """ - dump_file = self.node.get_option("output_dump_file_name") + dump_file = self.node.get_option('output_dump_file_name') if dump_file not in output_folder.list_object_names(): - self.logger.error("dump file `{}` not present in retrieved folder".format(dump_file)) - return "ERROR_CREATING_NEW_POTENTIAL" + self.logger.error('dump file `{}` not present in retrieved folder'.format(dump_file)) + return 'ERROR_CREATING_NEW_POTENTIAL' with output_folder.open(dump_file) as handle: dump_content = handle.read() dump_lines = dump_content.splitlines() - if "potential" not in self.node.inputs: - self.logger.error("the node does not have a `potential` node input") - return "ERROR_CREATING_NEW_POTENTIAL" + if 'potential' not in self.node.inputs: + self.logger.error('the node does not have a `potential` node input') + return 'ERROR_CREATING_NEW_POTENTIAL' try: pair_style = self.node.inputs.potential.pair_style parser = self.node.inputs.potential.load_pair_style(pair_style) except Exception: - self.logger.error("could not load dump parser:") + self.logger.error('could not load dump parser:') traceback.print_exc() - return "ERROR_CREATING_NEW_POTENTIAL" + return 'ERROR_CREATING_NEW_POTENTIAL' try: pot_dict = parser().read_exising(dump_lines) except Exception: - self.logger.error("could not parse dump file:") + self.logger.error('could not parse dump file:') traceback.print_exc() - return "ERROR_CREATING_NEW_POTENTIAL" + return 'ERROR_CREATING_NEW_POTENTIAL' try: pot_data = EmpiricalPotential(pair_style, pot_dict) except Exception: - self.logger.error("could not create new potential:") + self.logger.error('could not create new potential:') traceback.print_exc() - return "ERROR_CREATING_NEW_POTENTIAL" + return 'ERROR_CREATING_NEW_POTENTIAL' - self.out("potential", pot_data) + self.out('potential', pot_data) return None diff --git a/aiida_crystal17/gulp/parsers/parse_opt.py b/aiida_crystal17/gulp/parsers/parse_opt.py index e369096..fbd0ae3 100644 --- a/aiida_crystal17/gulp/parsers/parse_opt.py +++ b/aiida_crystal17/gulp/parsers/parse_opt.py @@ -1,3 +1,18 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2019 Chris Sewell +# +# This file is part of aiida-crystal17. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms and conditions +# of version 3 of the GNU Lesser General Public License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. """ A parser to read output from a standard CRYSTAL17 run """ @@ -28,39 +43,36 @@ def parse(self, **kwargs): except exceptions.NotExistent: return self.exit_codes.ERROR_NO_RETRIEVED_FOLDER - mainout_file = self.node.get_option("output_main_file_name") + mainout_file = self.node.get_option('output_main_file_name') if mainout_file not in output_folder.list_object_names(): return self.exit_codes.ERROR_OUTPUT_FILE_MISSING # parse the main output file and add nodes - self.logger.info("parsing main out file") + self.logger.info('parsing main out file') with output_folder.open(mainout_file) as handle: try: - result_dict, exit_code = parse_file( - handle, parser_class=self.__class__.__name__) + result_dict, exit_code = parse_file(handle, parser_class=self.__class__.__name__) except Exception: traceback.print_exc() return self.exit_codes.ERROR_PARSING_STDOUT - if result_dict["parser_errors"]: - self.logger.warning( - "the parser raised the following errors:\n{}".format( - "\n\t".join(result_dict["parser_errors"]))) - if result_dict["errors"]: - self.logger.warning( - "the calculation raised the following errors:\n{}".format( - "\n\t".join(result_dict["errors"]))) + if result_dict['parser_errors']: + self.logger.warning('the parser raised the following errors:\n{}'.format('\n\t'.join( + result_dict['parser_errors']))) + if result_dict['errors']: + self.logger.warning('the calculation raised the following errors:\n{}'.format('\n\t'.join( + result_dict['errors']))) # look a stderr for fortran warnings, etc, # e.g. IEEE_INVALID_FLAG IEEE_OVERFLOW_FLAG IEEE_UNDERFLOW_FLAG - stderr_file = self.node.get_option("output_stderr_file_name") + stderr_file = self.node.get_option('output_stderr_file_name') if stderr_file in output_folder.list_object_names(): with output_folder.open(stderr_file) as handle: stderr_content = handle.read() if stderr_content: - self.logger.warning("the calculation stderr file was not empty:") + self.logger.warning('the calculation stderr file was not empty:') self.logger.warning(stderr_content) - result_dict["warnings"].append(stderr_content.strip()) + result_dict['warnings'].append(stderr_content.strip()) self.out('results', Dict(dict=result_dict)) @@ -77,42 +89,42 @@ def parse(self, **kwargs): def create_structure(self, results_dict, output_folder): """ create the output structure """ - opt_type = results_dict.get("opt_type", None) - if opt_type == "polymer": - if "final_coords" not in results_dict: - return None, "ERROR_STRUCTURE_PARSING" - final_coords = results_dict.pop("final_coords") - if "structure" not in self.node.inputs: - self.logger.error("the input structure node is not set") - return None, "ERROR_MISSING_INPUT_STRUCTURE" - if not set(final_coords.keys()).issuperset(["id", "x", "y", "z", "label"]): + opt_type = results_dict.get('opt_type', None) + if opt_type == 'polymer': + if 'final_coords' not in results_dict: + return None, 'ERROR_STRUCTURE_PARSING' + final_coords = results_dict.pop('final_coords') + if 'structure' not in self.node.inputs: + self.logger.error('the input structure node is not set') + return None, 'ERROR_MISSING_INPUT_STRUCTURE' + if not set(final_coords.keys()).issuperset(['id', 'x', 'y', 'z', 'label']): self.logger.error('expected final_coords to contain ["id", "x", "y", "z", "label"]') - return None, "ERROR_STRUCTURE_PARSING" - if not final_coords["id"] == list(range(1, len(final_coords["id"]) + 1)): - self.logger.error("the final_coords ids were not ordered 1,2,3,...") - return None, "ERROR_STRUCTURE_PARSING" - if not final_coords["label"] == self.node.inputs.structure.get_ase().get_chemical_symbols(): - self.logger.error("the final_coords labels are != to the input structure symbols") - return None, "ERROR_STRUCTURE_PARSING" + return None, 'ERROR_STRUCTURE_PARSING' + if not final_coords['id'] == list(range(1, len(final_coords['id']) + 1)): + self.logger.error('the final_coords ids were not ordered 1,2,3,...') + return None, 'ERROR_STRUCTURE_PARSING' + if not final_coords['label'] == self.node.inputs.structure.get_ase().get_chemical_symbols(): + self.logger.error('the final_coords labels are != to the input structure symbols') + return None, 'ERROR_STRUCTURE_PARSING' try: validate_1d_geometry(self.node.inputs.structure) except Exception as err: self.logger.error(str(err)) - return None, "ERROR_STRUCTURE_PARSING" + return None, 'ERROR_STRUCTURE_PARSING' out_structure = self.node.inputs.structure.clone() positions = [] - for x, y, z in zip(final_coords["x"], final_coords["y"], final_coords["z"]): + for x, y, z in zip(final_coords['x'], final_coords['y'], final_coords['z']): # x are fractional and y,z are cartesian positions.append([x * out_structure.cell[0][0], y, z]) out_structure.reset_sites_positions(positions) - elif opt_type == "surface": - self.logger.error("creating output structures for surface optimisations has not yet been implemented") - return None, "ERROR_STRUCTURE_PARSING" + elif opt_type == 'surface': + self.logger.error('creating output structures for surface optimisations has not yet been implemented') + return None, 'ERROR_STRUCTURE_PARSING' else: - cif_file = self.node.get_option("out_cif_file_name") + cif_file = self.node.get_option('out_cif_file_name') if cif_file not in output_folder.list_object_names(): - self.logger.error("the output cif file is missing") - return None, "ERROR_CIF_FILE_MISSING" + self.logger.error('the output cif file is missing') + return None, 'ERROR_CIF_FILE_MISSING' # We do not use this method, since currently different kinds are set for each atom # see aiidateam/aiida_core#2942 @@ -125,24 +137,23 @@ def create_structure(self, results_dict, output_folder): # ase.io.read returns a warnings that can be ignored # UserWarning: crystal system 'triclinic' is not interpreted for space group 1. # This may result in wrong setting! - warnings.simplefilter("ignore", UserWarning) - with output_folder.open(cif_file, mode="r") as handle: - atoms = ase_read(handle, index=':', format="cif")[-1] + warnings.simplefilter('ignore', UserWarning) + with output_folder.open(cif_file, mode='r') as handle: + atoms = ase_read(handle, index=':', format='cif')[-1] atoms.set_tags(0) - if self.node.get_option("use_input_kinds"): + if self.node.get_option('use_input_kinds'): - if "structure" not in self.node.inputs: - self.logger.error("the input structure node is not set") - return None, "ERROR_MISSING_INPUT_STRUCTURE" + if 'structure' not in self.node.inputs: + self.logger.error('the input structure node is not set') + return None, 'ERROR_MISSING_INPUT_STRUCTURE' in_structure = self.node.inputs.structure in_atoms = in_structure.get_ase() if in_atoms.get_chemical_symbols() != atoms.get_chemical_symbols(): - self.logger.error( - "the input and cif structures have different atomic configurations") - return None, "ERROR_CIF_INCONSISTENT" + self.logger.error('the input and cif structures have different atomic configurations') + return None, 'ERROR_CIF_INCONSISTENT' out_structure = in_structure.clone() out_structure.set_cell(atoms.cell) diff --git a/aiida_crystal17/gulp/parsers/parse_single.py b/aiida_crystal17/gulp/parsers/parse_single.py index 9a184d4..f6add15 100644 --- a/aiida_crystal17/gulp/parsers/parse_single.py +++ b/aiida_crystal17/gulp/parsers/parse_single.py @@ -1,3 +1,18 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2019 Chris Sewell +# +# This file is part of aiida-crystal17. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms and conditions +# of version 3 of the GNU Lesser General Public License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. """ A parser to read output from a standard CRYSTAL17 run """ @@ -25,40 +40,37 @@ def parse(self, **kwargs): except exceptions.NotExistent: return self.exit_codes.ERROR_NO_RETRIEVED_FOLDER - mainout_file = self.node.get_option("output_main_file_name") + mainout_file = self.node.get_option('output_main_file_name') if mainout_file not in output_folder.list_object_names(): return self.exit_codes.ERROR_OUTPUT_FILE_MISSING # parse the main output file and add nodes - self.logger.info("parsing main out file") + self.logger.info('parsing main out file') with output_folder.open(mainout_file) as handle: try: result_dict, exit_code = parse_file( - handle, parser_class=self.__class__.__name__, - single_point_only=True) + handle, parser_class=self.__class__.__name__, single_point_only=True) except Exception: traceback.print_exc() return self.exit_codes.ERROR_PARSING_STDOUT - if result_dict["parser_errors"]: - self.logger.warning( - "the parser raised the following errors:\n{}".format( - "\n\t".join(result_dict["parser_errors"]))) - if result_dict["errors"]: - self.logger.warning( - "the calculation raised the following errors:\n{}".format( - "\n\t".join(result_dict["errors"]))) + if result_dict['parser_errors']: + self.logger.warning('the parser raised the following errors:\n{}'.format('\n\t'.join( + result_dict['parser_errors']))) + if result_dict['errors']: + self.logger.warning('the calculation raised the following errors:\n{}'.format('\n\t'.join( + result_dict['errors']))) # look a stderr for fortran warnings, etc, # e.g. IEEE_INVALID_FLAG IEEE_OVERFLOW_FLAG IEEE_UNDERFLOW_FLAG - stderr_file = self.node.get_option("output_stderr_file_name") + stderr_file = self.node.get_option('output_stderr_file_name') if stderr_file in output_folder.list_object_names(): with output_folder.open(stderr_file) as handle: stderr_content = handle.read() if stderr_content: - self.logger.warning("the calculation stderr file was not empty:") + self.logger.warning('the calculation stderr file was not empty:') self.logger.warning(stderr_content) - result_dict["warnings"].append(stderr_content.strip()) + result_dict['warnings'].append(stderr_content.strip()) self.out('results', Dict(dict=result_dict)) diff --git a/aiida_crystal17/gulp/parsers/raw/__init__.py b/aiida_crystal17/gulp/parsers/raw/__init__.py index e69de29..10f1044 100644 --- a/aiida_crystal17/gulp/parsers/raw/__init__.py +++ b/aiida_crystal17/gulp/parsers/raw/__init__.py @@ -0,0 +1,15 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2019 Chris Sewell +# +# This file is part of aiida-crystal17. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms and conditions +# of version 3 of the GNU Lesser General Public License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. diff --git a/aiida_crystal17/gulp/parsers/raw/parse_output_common.py b/aiida_crystal17/gulp/parsers/raw/parse_output_common.py index 32ef166..3dc3e01 100644 --- a/aiida_crystal17/gulp/parsers/raw/parse_output_common.py +++ b/aiida_crystal17/gulp/parsers/raw/parse_output_common.py @@ -1,3 +1,18 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2019 Chris Sewell +# +# This file is part of aiida-crystal17. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms and conditions +# of version 3 of the GNU Lesser General Public License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. """common parsing functions for GULP output files """ import re @@ -45,26 +60,26 @@ def read_gulp_table(lines, lineno, field_names, field_conversions, star_to_none= """ if not len(field_names) == len(field_conversions): - raise AssertionError("the length of field_names ({}) and field_conversions ({}) " - "are different".format(len(field_names), len(field_conversions))) + raise AssertionError('the length of field_names ({}) and field_conversions ({}) ' + 'are different'.format(len(field_names), len(field_conversions))) num_fields = len(field_conversions) start_lineno = lineno line = lines[lineno] - while not line.strip().startswith("---"): + while not line.strip().startswith('---'): lineno += 1 if lineno >= len(lines): - raise IOError("reached end of file trying to find start of table, " - "starting from line #{}".format(start_lineno)) + raise IOError('reached end of file trying to find start of table, ' + 'starting from line #{}'.format(start_lineno)) line = lines[lineno] lineno += 1 line = lines[lineno] - while not line.strip().startswith("---"): + while not line.strip().startswith('---'): lineno += 1 if lineno >= len(lines): - raise IOError("reached end of file trying to find end of table header, " - "starting from line #{}".format(start_lineno)) + raise IOError('reached end of file trying to find end of table header, ' + 'starting from line #{}'.format(start_lineno)) line = lines[lineno] lineno += 1 @@ -72,25 +87,24 @@ def read_gulp_table(lines, lineno, field_names, field_conversions, star_to_none= values = {f: [] for f in field_names} - while not line.strip().startswith("---"): + while not line.strip().startswith('---'): value_list = line.strip().split(None, num_fields - 1) if not len(value_list) == num_fields: - raise IOError("line #{} did not have at least the expected number of fields ({}): " - "{}".format(lineno, num_fields, value_list)) + raise IOError('line #{} did not have at least the expected number of fields ({}): ' + '{}'.format(lineno, num_fields, value_list)) try: for value, name, convert in zip(value_list, field_names, field_conversions): - if value.startswith("******") and star_to_none: + if value.startswith('******') and star_to_none: values[name].append(None) else: values[name].append(convert(value)) except Exception as err: - raise IOError("line #{} could not be converted to the required format: " - "{}".format(lineno, err)) + raise IOError('line #{} could not be converted to the required format: ' '{}'.format(lineno, err)) lineno += 1 if lineno >= len(lines): - raise IOError("reached end of file trying to find end of table, " - "starting from line #{}".format(start_lineno)) + raise IOError('reached end of file trying to find end of table, ' + 'starting from line #{}'.format(start_lineno)) line = lines[lineno] return lineno, values @@ -143,47 +157,47 @@ def read_energy_components(lines, lineno): """ start_lineno = lineno line = lines[lineno] - while not line.strip().startswith("---"): + while not line.strip().startswith('---'): lineno += 1 if lineno >= len(lines): - raise IOError("reached end of file trying to find start of energy components, " - "starting from line {}".format(start_lineno)) + raise IOError('reached end of file trying to find start of energy components, ' + 'starting from line {}'.format(start_lineno)) line = lines[lineno] lineno += 1 line = lines[lineno] - while not line.strip().startswith("---"): + while not line.strip().startswith('---'): # TODO parse this section lineno += 1 if lineno >= len(lines): - raise IOError("reached end of file trying to find start of total energy section, " - "starting from line {}".format(start_lineno)) + raise IOError('reached end of file trying to find start of total energy section, ' + 'starting from line {}'.format(start_lineno)) line = lines[lineno] lineno += 1 line = lines[lineno] - if "Total lattice energy" not in line: + if 'Total lattice energy' not in line: raise IOError("Expected line {} to contain 'Total lattice energy': {}".format(lineno, line)) - if "=" in line: + if '=' in line: # structure is primitive - energy_match = re.findall("Total lattice energy[\\s]*=[\\s]*([^\\s]+) eV", line) + energy_match = re.findall('Total lattice energy[\\s]*=[\\s]*([^\\s]+) eV', line) if not energy_match: raise IOError("Expected line {} to match 'Total lattice energy = () eV': {}".format(lineno, line)) energy = primitive_energy = float(energy_match[0]) - elif ":" in line: + elif ':' in line: # structure is non-primitive lineno += 1 line = lines[lineno] - energy_match = re.findall("Primitive unit cell[\\s]*=[\\s]*([^\\s]+) eV", line) + energy_match = re.findall('Primitive unit cell[\\s]*=[\\s]*([^\\s]+) eV', line) if not energy_match: raise IOError("Expected line {} to match 'Primitive unit cell = () eV': {}".format(lineno, line)) primitive_energy = float(energy_match[0]) lineno += 1 line = lines[lineno] - energy_match = re.findall("Non-primitive unit cell[\\s]*=[\\s]*([^\\s]+) eV", line) + energy_match = re.findall('Non-primitive unit cell[\\s]*=[\\s]*([^\\s]+) eV', line) if not energy_match: raise IOError("Expected line {} to match 'Non-primitive unit cell = () eV': {}".format(lineno, line)) energy = float(energy_match[0]) @@ -226,9 +240,9 @@ def read_reaxff_econtribs(lines, lineno): energies = {} - while "=" in line: + while '=' in line: - energy_match = re.findall("E\\((.+)\\)[\\s]*=[\\s]*([^\\s]+) eV", line) + energy_match = re.findall('E\\((.+)\\)[\\s]*=[\\s]*([^\\s]+) eV', line) if not energy_match: raise IOError("Expected line {} to start 'E\\((.+)\\)[\\s]*=[\\s]*([^\\s]+) eV': {}".format(lineno, line)) energies[energy_match[0][0]] = float(energy_match[0][1]) diff --git a/aiida_crystal17/gulp/parsers/raw/parse_output_fit.py b/aiida_crystal17/gulp/parsers/raw/parse_output_fit.py index ab95bfa..5f8d784 100644 --- a/aiida_crystal17/gulp/parsers/raw/parse_output_fit.py +++ b/aiida_crystal17/gulp/parsers/raw/parse_output_fit.py @@ -1,3 +1,18 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2019 Chris Sewell +# +# This file is part of aiida-crystal17. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms and conditions +# of version 3 of the GNU Lesser General Public License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. import re from aiida_crystal17 import __version__ @@ -17,125 +32,122 @@ def parse_file(file_obj, parser_class=None): 'parser_class': parser_class, 'parser_errors': [], 'parser_warnings': [], - "warnings": [], - "errors": [] + 'warnings': [], + 'errors': [] } if not lines: - return output, "ERROR_STDOUT_EMPTY" + return output, 'ERROR_STDOUT_EMPTY' lineno = 0 - section = "heading" + section = 'heading' while lineno < lines_length: line = lines[lineno] lineno += 1 - if line.strip().startswith("!! ERROR"): - output["errors"].append(line.strip()) + if line.strip().startswith('!! ERROR'): + output['errors'].append(line.strip()) continue - if line.strip().startswith("!! WARNING"): - output["warnings"].append(line.strip()) + if line.strip().startswith('!! WARNING'): + output['warnings'].append(line.strip()) continue - if section == "heading": - version = re.findall("\\* Version = ([0-9]+\\.[0-9]+\\.[0-9]+) \\* Last modified", line) + if section == 'heading': + version = re.findall('\\* Version = ([0-9]+\\.[0-9]+\\.[0-9]+) \\* Last modified', line) if version: output['gulp_version'] = version[0] continue - configs = re.findall("Total number of configurations input =[\\s]+([0-9]+)", line) + configs = re.findall('Total number of configurations input =[\\s]+([0-9]+)', line) if configs: output['total_configurations'] = int(configs[0]) continue - if line.strip().startswith("* General input information"): - section = "pre_info" + if line.strip().startswith('* General input information'): + section = 'pre_info' continue - if line.strip().startswith("Start of fitting :"): - section = "fitting" + if line.strip().startswith('Start of fitting :'): + section = 'fitting' continue - if section == "fitting": - if line.strip().startswith("Cycle:"): - output["num_cycles"] = output.get("num_cycles", 0) + 1 + if section == 'fitting': + if line.strip().startswith('Cycle:'): + output['num_cycles'] = output.get('num_cycles', 0) + 1 - if line.strip().startswith("**** Fit completed successfully ****"): - output["fit_succeeded"] = True - section = "post_info" + if line.strip().startswith('**** Fit completed successfully ****'): + output['fit_succeeded'] = True + section = 'post_info' continue - if line.strip().startswith("**** No lower sum of squares could be found ****"): - output["errors"].append("**** No lower sum of squares could be found ****") - output["fit_succeeded"] = False - section = "post_info" + if line.strip().startswith('**** No lower sum of squares could be found ****'): + output['errors'].append('**** No lower sum of squares could be found ****') + output['fit_succeeded'] = False + section = 'post_info' continue - if line.strip().startswith("**** No. of variables exceeds no. of observables ****"): - output["errors"].append("**** No. of variables exceeds no. of observables ****") - output["fit_succeeded"] = False - section = "post_info" + if line.strip().startswith('**** No. of variables exceeds no. of observables ****'): + output['errors'].append('**** No. of variables exceeds no. of observables ****') + output['fit_succeeded'] = False + section = 'post_info' continue - if line.strip().startswith("Final sum of squares") and not output["fit_succeeded"]: - output["fit_succeeded"] = False - section = "post_info" + if line.strip().startswith('Final sum of squares') and not output['fit_succeeded']: + output['fit_succeeded'] = False + section = 'post_info' continue - if section == "post_info": + if section == 'post_info': try: - if line.strip().startswith("Final values of parameters"): - lineno, output["final_parameters"] = read_gulp_table( - lines, lineno, ["parameter", "original", "final", "type"], [int, float, float, assess_species]) + if line.strip().startswith('Final values of parameters'): + lineno, output['final_parameters'] = read_gulp_table( + lines, lineno, ['parameter', 'original', 'final', 'type'], [int, float, float, assess_species]) continue - if line.strip().startswith("Final values of numerical parameter gradients"): - lineno, output["final_gradients"] = read_gulp_table( - lines, lineno, ["parameter", "gradient", "type"], [int, float, assess_species]) + if line.strip().startswith('Final values of numerical parameter gradients'): + lineno, output['final_gradients'] = read_gulp_table( + lines, lineno, ['parameter', 'gradient', 'type'], [int, float, assess_species]) continue - if line.strip().startswith("Final values of residuals"): - lineno, output["final_residuals"] = read_gulp_table( - lines, lineno, - ["observable", "type", "value", "calculated", "residual", "error"], + if line.strip().startswith('Final values of residuals'): + lineno, output['final_residuals'] = read_gulp_table( + lines, lineno, ['observable', 'type', 'value', 'calculated', 'residual', 'error'], [int, str, float, float, float, float]) continue - if line.strip().startswith("Comparison of initial and final observables"): - lineno, output["calculated_observables"] = read_gulp_table( - lines, lineno, - ["observable", "type", "value", "initial", "final"], + if line.strip().startswith('Comparison of initial and final observables'): + lineno, output['calculated_observables'] = read_gulp_table( + lines, lineno, ['observable', 'type', 'value', 'initial', 'final'], [int, str, float, float, float]) continue - if line.strip().startswith("Energy shifts for configurations"): - lineno, output["energy_shifts"] = read_gulp_table( - lines, lineno, ["configuration", "energy", "scale_factor"], [int, float, float]) + if line.strip().startswith('Energy shifts for configurations'): + lineno, output['energy_shifts'] = read_gulp_table( + lines, lineno, ['configuration', 'energy', 'scale_factor'], [int, float, float]) continue - if line.strip().startswith("Peak dynamic memory used"): + if line.strip().startswith('Peak dynamic memory used'): # 'Peak dynamic memory used = 0.56 MB' - mem_match = re.findall("Peak dynamic memory used[\\s]*=[\\s]*([+-]?[0-9]*[.]?[0-9]+) MB", line) + mem_match = re.findall('Peak dynamic memory used[\\s]*=[\\s]*([+-]?[0-9]*[.]?[0-9]+) MB', line) if mem_match: - output["peak_dynamic_memory_mb"] = float(mem_match[0]) + output['peak_dynamic_memory_mb'] = float(mem_match[0]) continue - if line.strip().startswith("Total CPU time"): + if line.strip().startswith('Total CPU time'): # 'Total CPU time 0.0187' - mem_match = re.findall("Total CPU time[\\s]*([+-]?[0-9]*[.]?[0-9]+)", line) + mem_match = re.findall('Total CPU time[\\s]*([+-]?[0-9]*[.]?[0-9]+)', line) if mem_match: - output["total_time_second"] = float(mem_match[0]) + output['total_time_second'] = float(mem_match[0]) continue except IOError as err: - output["parser_errors"].append(str(err)) + output['parser_errors'].append(str(err)) continue - return output, assign_exit_code( - output.get("fit_succeeded", False), output["errors"], output["parser_errors"]) + return output, assign_exit_code(output.get('fit_succeeded', False), output['errors'], output['parser_errors']) def assess_species(value): @@ -148,21 +160,21 @@ def assess_species(value): """ # TODO assess_species - if value.startswith("Energy shift"): + if value.startswith('Energy shift'): return value[:12] return value def assign_exit_code(fit_succeeded, gulp_errors, parser_errors): """ given the error messages, assign an exit code """ - if "**** No. of variables exceeds no. of observables ****" in gulp_errors: - return "ERROR_NOT_ENOUGH_OBSERVABLES" - elif "**** No lower sum of squares could be found ****" in gulp_errors: - return "ERROR_FIT_UNSUCCESFUL" + if '**** No. of variables exceeds no. of observables ****' in gulp_errors: + return 'ERROR_NOT_ENOUGH_OBSERVABLES' + elif '**** No lower sum of squares could be found ****' in gulp_errors: + return 'ERROR_FIT_UNSUCCESFUL' elif not fit_succeeded: - return "ERROR_FIT_UNSUCCESFUL" + return 'ERROR_FIT_UNSUCCESFUL' elif gulp_errors: - return "ERROR_GULP_UNKNOWN" + return 'ERROR_GULP_UNKNOWN' elif parser_errors: - return "ERROR_PARSING_STDOUT" + return 'ERROR_PARSING_STDOUT' return None diff --git a/aiida_crystal17/gulp/parsers/raw/parse_output_std.py b/aiida_crystal17/gulp/parsers/raw/parse_output_std.py index 035931a..e4f661d 100644 --- a/aiida_crystal17/gulp/parsers/raw/parse_output_std.py +++ b/aiida_crystal17/gulp/parsers/raw/parse_output_std.py @@ -1,11 +1,26 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2019 Chris Sewell +# +# This file is part of aiida-crystal17. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms and conditions +# of version 3 of the GNU Lesser General Public License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. """ parse the main.gout file of a GULP run and create the required output nodes """ import re from aiida_crystal17 import __version__ -from aiida_crystal17.gulp.parsers.raw.parse_output_common import ( - read_gulp_table, read_energy_components, read_reaxff_econtribs) +from aiida_crystal17.gulp.parsers.raw.parse_output_common import (read_gulp_table, read_energy_components, + read_reaxff_econtribs) def parse_file(file_obj, parser_class=None, single_point_only=False): @@ -22,117 +37,115 @@ def parse_file(file_obj, parser_class=None, single_point_only=False): 'parser_class': parser_class, 'parser_errors': [], 'parser_warnings': [], - "warnings": [], - "errors": [], - "energy_units": "eV" + 'warnings': [], + 'errors': [], + 'energy_units': 'eV' } if not lines: - return output, "ERROR_STDOUT_EMPTY" + return output, 'ERROR_STDOUT_EMPTY' lineno = 0 - section = "heading" + section = 'heading' while lineno < lines_length: line = lines[lineno] lineno += 1 - if line.strip().startswith("!! ERROR"): - output["errors"].append(line.strip()) + if line.strip().startswith('!! ERROR'): + output['errors'].append(line.strip()) continue - if line.strip().startswith("!! WARNING"): - output["warnings"].append(line.strip()) + if line.strip().startswith('!! WARNING'): + output['warnings'].append(line.strip()) continue - if section == "heading": + if section == 'heading': - version = re.findall("\\* Version = ([0-9]+\\.[0-9]+\\.[0-9]+) \\* Last modified", line) + version = re.findall('\\* Version = ([0-9]+\\.[0-9]+\\.[0-9]+) \\* Last modified', line) if version: output['gulp_version'] = version[0] continue - if line.strip().startswith("* Output for configuration"): - section = "output" + if line.strip().startswith('* Output for configuration'): + section = 'output' continue if lineno >= lines_length: - output['parser_errors'].append("Reached end of file before finding output section") + output['parser_errors'].append('Reached end of file before finding output section') continue - if section == "output": + if section == 'output': - optimise_start = re.findall("Start of (bulk|surface|polymer) optimisation", line) + optimise_start = re.findall('Start of (bulk|surface|polymer) optimisation', line) if optimise_start: - output["opt_type"] = optimise_start[0] - section = "optimisation" + output['opt_type'] = optimise_start[0] + section = 'optimisation' continue - if section == "optimisation": + if section == 'optimisation': - if line.strip().startswith("**** Optimisation achieved ****"): - output["opt_succeeded"] = True - section = "post_opt" + if line.strip().startswith('**** Optimisation achieved ****'): + output['opt_succeeded'] = True + section = 'post_opt' continue - if "Conditions for a minimum have not been satisfied. However" in line: - output["opt_succeeded"] = True - section = "post_opt" - output['warnings'].append( - ("Conditions for a minimum have not been satisfied. " - "However no lower point can be found - treat results with caution")) + if 'Conditions for a minimum have not been satisfied. However' in line: + output['opt_succeeded'] = True + section = 'post_opt' + output['warnings'].append(('Conditions for a minimum have not been satisfied. ' + 'However no lower point can be found - treat results with caution')) continue - if "No variables to optimise - single point performed" in line: - output["opt_succeeded"] = True - section = "post_opt" - output['warnings'].append("No variables to optimise - single point performed") + if 'No variables to optimise - single point performed' in line: + output['opt_succeeded'] = True + section = 'post_opt' + output['warnings'].append('No variables to optimise - single point performed') continue - if "**** Too many failed attempts to optimise ****" in line: - output["opt_succeeded"] = False - section = "post_opt" - output['errors'].append("**** Too many failed attempts to optimise ****") + if '**** Too many failed attempts to optimise ****' in line: + output['opt_succeeded'] = False + section = 'post_opt' + output['errors'].append('**** Too many failed attempts to optimise ****') continue - if "**** Maximum number of function calls has been reached ****" in line: - output["opt_succeeded"] = False - section = "post_opt" - output['errors'].append("**** Maximum number of function calls has been reached ****") + if '**** Maximum number of function calls has been reached ****' in line: + output['opt_succeeded'] = False + section = 'post_opt' + output['errors'].append('**** Maximum number of function calls has been reached ****') continue - if line.strip().startswith("Final energy"): - output["opt_succeeded"] = False - section = "post_opt" + if line.strip().startswith('Final energy'): + output['opt_succeeded'] = False + section = 'post_opt' output['parser_errors'].append("Reached final energy, before finding 'Optimisation achieved'") continue - if section == "output": + if section == 'output': - if line.strip().startswith("Components of energy :"): - energy, penergy = (("energy", "primitive_energy") if single_point_only - else ("initial_energy", "initial_primitive_energy")) + if line.strip().startswith('Components of energy :'): + energy, penergy = (('energy', 'primitive_energy') if single_point_only else + ('initial_energy', 'initial_primitive_energy')) try: - output[energy], output[penergy], lineno = read_energy_components( - lines, lineno) + output[energy], output[penergy], lineno = read_energy_components(lines, lineno) except (IOError, ValueError) as err: - output["parser_errors"].append(str(err)) + output['parser_errors'].append(str(err)) continue # TODO convert this to energy if single-point calculation - if section == "post_opt": + if section == 'post_opt': - if line.strip().startswith("Components of energy :"): + if line.strip().startswith('Components of energy :'): try: - output["final_energy"], output["final_primitive_energy"], lineno = read_energy_components( + output['final_energy'], output['final_primitive_energy'], lineno = read_energy_components( lines, lineno) except (IOError, ValueError) as err: - output["parser_errors"].append(str(err)) + output['parser_errors'].append(str(err)) continue - if section == "output" or section == "optimisation": + if section == 'output' or section == 'optimisation': # will be in 'output' if single energy calculation if line.strip().startswith('ReaxFF : Energy contributions:'): @@ -140,71 +153,68 @@ def parse_file(file_obj, parser_class=None, single_point_only=False): try: output['energy_contributions'], lineno = read_reaxff_econtribs(lines, lineno) except (IOError, ValueError) as err: - output["parser_errors"].append(str(err)) + output['parser_errors'].append(str(err)) continue - if section == "output" or section == "post_opt": + if section == 'output' or section == 'post_opt': # will be in 'output' if single energy calculation # if line.strip().startswith("Final energy ="): # # this should be the same as the (primitive energy from the components section) # continue - if line.strip().startswith("Final fractional/Cartesian coordinates of atoms"): + if line.strip().startswith('Final fractional/Cartesian coordinates of atoms'): # output for surfaces and polymers try: - lineno, output['final_coords'] = read_gulp_table( - lines, lineno, - ["id", "label", "type", "x", "y", "z", "radius"], - [int, str, str, float, float, float, float]) + lineno, output['final_coords'] = read_gulp_table(lines, lineno, + ['id', 'label', 'type', 'x', 'y', 'z', 'radius'], + [int, str, str, float, float, float, float]) except (IOError, ValueError) as err: - output["parser_errors"].append(str(err)) + output['parser_errors'].append(str(err)) continue - if line.strip().startswith("Final charges from ReaxFF"): - lineno, output["reaxff_charges"] = read_gulp_table( - lines, lineno, ["index", "atomic_number", "charge"], [int, int, float]) + if line.strip().startswith('Final charges from ReaxFF'): + lineno, output['reaxff_charges'] = read_gulp_table(lines, lineno, ['index', 'atomic_number', 'charge'], + [int, int, float]) continue - if line.strip().startswith("Time to end of optimisation"): + if line.strip().startswith('Time to end of optimisation'): # 'Time to end of optimisation = 0.0899 seconds' - time_match = re.findall("Time to end of optimisation[\\s]*=[\\s]*([+-]?[0-9]*[.]?[0-9]+) seconds", line) + time_match = re.findall('Time to end of optimisation[\\s]*=[\\s]*([+-]?[0-9]*[.]?[0-9]+) seconds', line) if time_match: - output["opt_time_second"] = float(time_match[0]) + output['opt_time_second'] = float(time_match[0]) continue - if line.strip().startswith("Peak dynamic memory used"): + if line.strip().startswith('Peak dynamic memory used'): # 'Peak dynamic memory used = 0.56 MB' - mem_match = re.findall("Peak dynamic memory used[\\s]*=[\\s]*([+-]?[0-9]*[.]?[0-9]+) MB", line) + mem_match = re.findall('Peak dynamic memory used[\\s]*=[\\s]*([+-]?[0-9]*[.]?[0-9]+) MB', line) if mem_match: - output["peak_dynamic_memory_mb"] = float(mem_match[0]) + output['peak_dynamic_memory_mb'] = float(mem_match[0]) continue - if line.strip().startswith("Total CPU time"): + if line.strip().startswith('Total CPU time'): # 'Total CPU time 0.0187' - mem_match = re.findall("Total CPU time[\\s]*([+-]?[0-9]*[.]?[0-9]+)", line) + mem_match = re.findall('Total CPU time[\\s]*([+-]?[0-9]*[.]?[0-9]+)', line) if mem_match: - output["total_time_second"] = float(mem_match[0]) + output['total_time_second'] = float(mem_match[0]) continue return output, assign_exit_code( - output.get("opt_succeeded", None), - output["errors"], output["parser_errors"], - single_point_only) + output.get('opt_succeeded', None), output['errors'], output['parser_errors'], single_point_only) def assign_exit_code(opt_succeeded, gulp_errors, parser_errors, single_point_only): """ given the error messages, assign an exit code """ - if "**** Too many failed attempts to optimise ****" in gulp_errors: - return "ERROR_OPTIMISE_MAX_ATTEMPTS" - elif "**** Maximum number of function calls has been reached ****" in gulp_errors: - return "ERROR_OPTIMISE_MAX_CALLS" + if '**** Too many failed attempts to optimise ****' in gulp_errors: + return 'ERROR_OPTIMISE_MAX_ATTEMPTS' + elif '**** Maximum number of function calls has been reached ****' in gulp_errors: + return 'ERROR_OPTIMISE_MAX_CALLS' elif opt_succeeded is False and not single_point_only: - return "ERROR_OPTIMISE_UNSUCCESFUL" + return 'ERROR_OPTIMISE_UNSUCCESFUL' elif gulp_errors: - return "ERROR_GULP_UNHANDLED" + return 'ERROR_GULP_UNHANDLED' elif parser_errors: - return "ERROR_PARSING_STDOUT" + return 'ERROR_PARSING_STDOUT' elif opt_succeeded is None and not single_point_only: - return "ERROR_GULP_UNHANDLED" + return 'ERROR_GULP_UNHANDLED' return None diff --git a/aiida_crystal17/gulp/parsers/raw/write_geometry.py b/aiida_crystal17/gulp/parsers/raw/write_geometry.py index 56b621c..5d6f68b 100644 --- a/aiida_crystal17/gulp/parsers/raw/write_geometry.py +++ b/aiida_crystal17/gulp/parsers/raw/write_geometry.py @@ -1,11 +1,26 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2019 Chris Sewell +# +# This file is part of aiida-crystal17. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms and conditions +# of version 3 of the GNU Lesser General Public License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. import numpy as np from aiida_crystal17.symmetry import convert_structure, operation_cart_to_frac -from aiida_crystal17.parsers.raw.gui_parse import get_crystal_type_name +from aiida_crystal17.parsers.raw.parse_fort34 import get_crystal_type_name from aiida_crystal17.validation import validate_against_schema -def create_geometry_lines(structure_data, symmetry_data=None, name="main-geometry"): +def create_geometry_lines(structure_data, symmetry_data=None, name='main-geometry'): """ create list of lines for geometry section of .gin Parameters @@ -24,11 +39,11 @@ def create_geometry_lines(structure_data, symmetry_data=None, name="main-geometr """ lines = ['name {}'.format(name)] - atoms = convert_structure(structure_data, "ase") + atoms = convert_structure(structure_data, 'ase') if sum(atoms.get_pbc()) == 1: if symmetry_data is not None: - raise NotImplementedError("cannot set symmetry data for 1D structures") + raise NotImplementedError('cannot set symmetry data for 1D structures') return create_1d_geometry(lines, atoms) if not all(atoms.get_pbc()): @@ -40,12 +55,12 @@ def create_geometry_lines(structure_data, symmetry_data=None, name="main-geometr pass # symmetry_data = structure_to_symmetry(structure_data) else: - validate_against_schema(symmetry_data, "symmetry.schema.json") + validate_against_schema(symmetry_data, 'symmetry.schema.json') # add cell vectors lines.append('vectors') for vector in atoms.cell: - lines.append("{0:.6f} {1:.6f} {2:.6f}".format(*vector)) + lines.append('{0:.6f} {1:.6f} {2:.6f}'.format(*vector)) # add atomic sites lines.append('cartesian') @@ -53,21 +68,19 @@ def create_geometry_lines(structure_data, symmetry_data=None, name="main-geometr if symmetry_data is not None: # if symmetry operations are specified, # then only symmetry inequivalent sites should be added - if "equivalent_sites" not in symmetry_data: + if 'equivalent_sites' not in symmetry_data: raise KeyError("symmetry data does not contain the 'equivalent_sites' key") - equivalent = symmetry_data["equivalent_sites"] + equivalent = symmetry_data['equivalent_sites'] if atoms.get_number_of_atoms() != len(equivalent): - raise ValueError("number of atomic sites != number of symmetry equivalent sites") + raise ValueError('number of atomic sites != number of symmetry equivalent sites') used_equivalents = [] for site, eq in zip(atoms, equivalent): if eq not in used_equivalents: - lines.append("{0} core {1:.6f} {2:.6f} {3:.6f}".format( - site.symbol, *site.position)) + lines.append('{0} core {1:.6f} {2:.6f} {3:.6f}'.format(site.symbol, *site.position)) used_equivalents.append(eq) else: for site in atoms: - lines.append("{0} core {1:.6f} {2:.6f} {3:.6f}".format( - site.symbol, *site.position)) + lines.append('{0} core {1:.6f} {2:.6f} {3:.6f}'.format(site.symbol, *site.position)) # TODO creating shell models @@ -80,30 +93,27 @@ def create_geometry_lines(structure_data, symmetry_data=None, name="main-geometr if crystal_type_name is None and hall_number is not None: crystal_type_name = get_crystal_type_name(hall_number) if crystal_type_name is not None: + if crystal_type_name in ['trigonal', 'rhombohedral']: + crystal_type_name = 'hexagonal' assert crystal_type_name in [ - "triclinic", "monoclinic", "orthorhombic", "tetragonal", - "hexagonal", "rhombohedral", "cubic" - ] - lines.append("symmetry_cell {}".format(crystal_type_name)) + 'triclinic', 'monoclinic', 'orthorhombic', 'tetragonal', 'hexagonal', 'rhombohedral', 'cubic' + ], crystal_type_name + lines.append('symmetry_cell {}'.format(crystal_type_name)) # add symmetry operations if symmetry_data is not None: - operations = symmetry_data["operations"] - if operations and symmetry_data["basis"] == "cartesian": - operations = operation_cart_to_frac( - operations, atoms.cell) + operations = symmetry_data['operations'] + if operations and symmetry_data['basis'] == 'cartesian': + operations = operation_cart_to_frac(operations, atoms.cell) for op in operations: if np.allclose(op, [1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0]): # identity matrix is not required continue lines.append('symmetry_operator') - lines.append("{0:8.5f} {1:8.5f} {2:8.5f} {3:8.5f}".format( - op[0], op[3], op[6], op[9])) - lines.append("{0:8.5f} {1:8.5f} {2:8.5f} {3:8.5f}".format( - op[1], op[4], op[7], op[10])) - lines.append("{0:8.5f} {1:8.5f} {2:8.5f} {3:8.5f}".format( - op[2], op[5], op[8], op[11])) + lines.append('{0:8.5f} {1:8.5f} {2:8.5f} {3:8.5f}'.format(op[0], op[3], op[6], op[9])) + lines.append('{0:8.5f} {1:8.5f} {2:8.5f} {3:8.5f}'.format(op[1], op[4], op[7], op[10])) + lines.append('{0:8.5f} {1:8.5f} {2:8.5f} {3:8.5f}'.format(op[2], op[5], op[8], op[11])) return lines @@ -113,25 +123,23 @@ def create_1d_geometry(lines, atoms): # TODO creating shell models validate_1d_geometry(atoms) lines.append('pcell') - lines.append("{0:.6f}".format(atoms.cell[0][0])) + lines.append('{0:.6f}'.format(atoms.cell[0][0])) lines.append('pfractional') symbols = atoms.get_chemical_symbols() fcoords = atoms.get_scaled_positions() ccoords = atoms.positions for symbol, fcoords, ccoords in zip(symbols, fcoords, ccoords): - lines.append("{0} core {1:.6f} {2:.6f} {3:.6f}".format( - symbol, fcoords[0], ccoords[1], ccoords[2])) + lines.append('{0} core {1:.6f} {2:.6f} {3:.6f}'.format(symbol, fcoords[0], ccoords[1], ccoords[2])) return lines def validate_1d_geometry(structure): """ validate a 1-d structure """ if not list(structure.pbc) == [True, False, False]: - raise NotImplementedError("a 1-D structure can only be periodic in the x-direction") + raise NotImplementedError('a 1-D structure can only be periodic in the x-direction') expected_cell = np.eye(3) for i in range(3): expected_cell[i][i] = structure.cell[i][i] if not np.allclose(structure.cell, expected_cell): - raise NotImplementedError( - "a 1-D structure cell must be of the form " - "[[x, 0, 0], [0, y, 0], [0, 0, z]]: {}".format(structure.cell)) + raise NotImplementedError('a 1-D structure cell must be of the form ' + '[[x, 0, 0], [0, y, 0], [0, 0, z]]: {}'.format(structure.cell)) diff --git a/aiida_crystal17/gulp/parsers/raw/write_input.py b/aiida_crystal17/gulp/parsers/raw/write_input.py index 5fdd7b1..7d7dbae 100644 --- a/aiida_crystal17/gulp/parsers/raw/write_input.py +++ b/aiida_crystal17/gulp/parsers/raw/write_input.py @@ -1,4 +1,18 @@ - +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2019 Chris Sewell +# +# This file is part of aiida-crystal17. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms and conditions +# of version 3 of the GNU Lesser General Public License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. import hashlib import io @@ -27,16 +41,16 @@ def __init__(self, outputs=None): """ self._outputs = {} if outputs is None else outputs self._content_lines = None - self._encoding = "utf-8" + self._encoding = 'utf-8' def get_content(self): if self._content_lines is None: - raise ValueError("content has not been set") - return "\n".join(self._content_lines) + raise ValueError('content has not been set') + return '\n'.join(self._content_lines) def get_content_lines(self): if self._content_lines is None: - raise ValueError("content has not been set") + raise ValueError('content has not been set') return self._content_lines[:] def get_content_hash(self): @@ -50,7 +64,7 @@ def write_content(self, file_like): """ content = six.u(self.get_content()) if isinstance(file_like, six.string_types): - with io.open(file_like, "w", encoding=self._encoding) as handle: + with io.open(file_like, 'w', encoding=self._encoding) as handle: handle.write(content) else: file_like.write(content) @@ -108,8 +122,7 @@ def get_other_option_lines(self, parameters): """ return [] - def create_content(self, structure, potential_lines, - parameters=None, symmetry=None): + def create_content(self, structure, potential_lines, parameters=None, symmetry=None): """create main input content for gulp.in Parameters @@ -132,10 +145,10 @@ def create_content(self, structure, potential_lines, if parameters is None: parameters = {} else: - if hasattr(parameters, "get_dict"): + if hasattr(parameters, 'get_dict'): parameters = parameters.get_dict() if symmetry is not None: - if hasattr(symmetry, "get_dict"): + if hasattr(symmetry, 'get_dict'): symmetry = symmetry.get_dict() # validation @@ -144,40 +157,40 @@ def create_content(self, structure, potential_lines, content = [] # keywords - content.append(" ".join(self.get_input_keywords(parameters))) - content.append("") + content.append(' '.join(self.get_input_keywords(parameters))) + content.append('') # TITLE if 'title' in parameters: - content.append("title") - content.append("{}".format(parameters["title"])) - content.append("end") - content.append("") + content.append('title') + content.append('{}'.format(parameters['title'])) + content.append('end') + content.append('') # GEOMETRY - content.append("# Geometry") + content.append('# Geometry') content.extend(self.get_geometry_lines(structure, symmetry)) - content.append("") + content.append('') # TODO kind specific inputs (e.g. initial charge)? # FORCE FIELD - content.append("# Force Field") + content.append('# Force Field') content.extend(potential_lines) - content.append("") + content.append('') # OTHER OPTIONS other_opts = self.get_other_option_lines(parameters) if other_opts: - content.append("# Other Options") + content.append('# Other Options') content.extend(other_opts) - content.append("") + content.append('') # EXTERNAL OUTPUT OPTIONS if self._outputs: - content.append("# External Outputs") + content.append('# External Outputs') for out_type, fname in self._outputs.items(): - content.append("output {0} {1}".format(out_type, fname)) - content.append("") + content.append('output {0} {1}'.format(out_type, fname)) + content.append('') self._content_lines = content return content @@ -209,7 +222,7 @@ class InputCreationSingle(InputCreationBase): class InputCreationOpt(InputCreationBase): def validate_parameters(self, parameters): - validate_against_schema(parameters, "gulp_optimize.schema.json") + validate_against_schema(parameters, 'gulp_optimize.schema.json') def get_input_keywords(self, parameters): keywords = ['optimise', 'verb', parameters['relax']['type']] @@ -231,15 +244,13 @@ def get_other_option_lines(self, parameters): lines = [] if parameters['relax'].get('pressure', False): - pressure, punits = get_pressure(parameters['relax']['pressure'], - parameters['units']) + pressure, punits = get_pressure(parameters['relax']['pressure'], parameters['units']) lines.append('pressure {0:.4f} {1}'.format(pressure, punits)) # NB: Causes energy to be replaced by enthalpy in calculations. # maximum number of optimisation steps (default 1000) if 'max_iterations' in parameters['minimize']: - lines.append('maxcyc opt {}'.format( - parameters['minimize']['max_iterations'])) + lines.append('maxcyc opt {}'.format(parameters['minimize']['max_iterations'])) # TODO how do these compare to tolerances from LAMMPS? # maximum parameter tolerance (default 0.00001) diff --git a/aiida_crystal17/gulp/parsers/raw/write_input_fitting.py b/aiida_crystal17/gulp/parsers/raw/write_input_fitting.py index ce05355..c61fdec 100644 --- a/aiida_crystal17/gulp/parsers/raw/write_input_fitting.py +++ b/aiida_crystal17/gulp/parsers/raw/write_input_fitting.py @@ -1,9 +1,29 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2019 Chris Sewell +# +# This file is part of aiida-crystal17. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms and conditions +# of version 3 of the GNU Lesser General Public License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. from aiida_crystal17.gulp.parsers.raw.write_geometry import create_geometry_lines -def create_input_lines(potential, structures, observable_datas, - observables, delta=None, - dump_file="fitting.grs",): +def create_input_lines( + potential, + structures, + observable_datas, + observables, + delta=None, + dump_file='fitting.grs', +): """create the input file for a potential fitting Parameters @@ -34,44 +54,44 @@ def create_input_lines(potential, structures, observable_datas, snames = [] # intial key words - lines.append("fit noflags") - lines.append("") + lines.append('fit noflags') + lines.append('') if delta is not None: - lines.append("delta") - lines.append("{0:.8f}".format(delta)) - lines.append("") + lines.append('delta') + lines.append('{0:.8f}'.format(delta)) + lines.append('') # The following command makes a uniform shift # to the energies of all structures to remove # the constant offset => we are only fitting # the local curvature. - lines.extend(["shift", str(1.0)]) - lines.append("") + lines.extend(['shift', str(1.0)]) + lines.append('') for name in sorted(structures.keys()): snames.append(name) lines.extend(create_geometry_lines(structures[name], name=name)) - lines.append("") - lines.append("observables") + lines.append('') + lines.append('observables') for oname in sorted(observables.keys()): lines.append(oname) value, weighting = observables[oname](observable_datas[name]) - lines.append("{0:.8f} {1:.8f}".format(value, weighting)) + lines.append('{0:.8f} {1:.8f}'.format(value, weighting)) - lines.append("end") - lines.append("") + lines.append('end') + lines.append('') # Tell the program to fit the overall shift - lines.extend(["vary", "shift", "end"]) - lines.append("") + lines.extend(['vary', 'shift', 'end']) + lines.append('') # Force Field lines.extend(potential.get_input_lines()) - lines.append("") - lines.append("dump {}".format(dump_file)) + lines.append('') + lines.append('dump {}'.format(dump_file)) # NOTE can also dump every interval ('noover' will output to separate files) return lines, snames diff --git a/aiida_crystal17/gulp/potentials/__init__.py b/aiida_crystal17/gulp/potentials/__init__.py index e69de29..10f1044 100644 --- a/aiida_crystal17/gulp/potentials/__init__.py +++ b/aiida_crystal17/gulp/potentials/__init__.py @@ -0,0 +1,15 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2019 Chris Sewell +# +# This file is part of aiida-crystal17. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms and conditions +# of version 3 of the GNU Lesser General Public License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. diff --git a/aiida_crystal17/gulp/potentials/base.py b/aiida_crystal17/gulp/potentials/base.py index 0c98318..dacd06f 100644 --- a/aiida_crystal17/gulp/potentials/base.py +++ b/aiida_crystal17/gulp/potentials/base.py @@ -1,3 +1,18 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2019 Chris Sewell +# +# This file is part of aiida-crystal17. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms and conditions +# of version 3 of the GNU Lesser General Public License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. from collections import namedtuple import copy import re @@ -5,8 +20,7 @@ from aiida_crystal17.validation import validate_against_schema from aiida_crystal17.gulp.potentials.common import filter_by_species -PotentialContent = namedtuple('PotentialContent', - ['content', 'number_of_flags', "number_flagged"]) +PotentialContent = namedtuple('PotentialContent', ['content', 'number_of_flags', 'number_flagged']) """used for returning the content creation for a potential Parameters @@ -20,97 +34,66 @@ """ -RE_SYMBOL = "([A-Z][a-z]?)" -RE_SYMBOL_TYPE = "([A-Z][a-z]?)\\s+(\\bc\\b|\\bcore\\b|\\bs\\b|\\bshell\\b)" +RE_SYMBOL = '([A-Z][a-z]?)' +RE_SYMBOL_TYPE = '([A-Z][a-z]?)\\s+(\\bc\\b|\\bcore\\b|\\bs\\b|\\bshell\\b)' # take from version 4.5.3 OPTION_TERMS = ( - '3coulomb', 'absdipolemoment', 'absolute_coordinates', 'accelerations', - 'accuracy', 'ala_cutoff', 'ala_disp', 'ala_processors', 'ala_shrink', - 'and', 'anisotropic_pressure', 'ashift', 'atomab', 'aver', - 'axilrod-teller', 'bacoscross', 'bacross', 'bagcross', 'balcross', - 'baskes', 'bbar', 'bcoscross', 'bcross', 'becke_johnson_c6', 'best', - 'blocksize', 'boattractive', 'bocharge', 'bocnswitch', 'bocntolerance', - 'bocoordination', 'bondtype', 'borepulsive', 'bornq', 'boselfenergy', - 'both', 'botwobody', 'box', 'brenner', 'bsm', 'bspline', 'buck4', - 'buckingham', 'buffered_lj', 'bulk_modulus', 'cartesian', 'catomic_stress', - 'caver', 'cell', 'cellstrain', 'centre', 'cfaver', 'cfm_fermi', - 'cfm_gaussian', 'cfm_harmonic', 'cfm_power', 'charge', 'chemshell_mode', - 'cmm', 'configurations', 'connect', 'constrain', 'contents', 'coordno', - 'cosh-spring', 'cosmoframe', 'cosmoshape', 'coulomb_subtract', 'covalent', - 'covexp', 'crossover', 'current_time', 'cutd', 'cutmany', 'cutp', 'cuts', - 'cv', 'cvec', 'cwolf', 'damped_dispersion', 'default_weight', 'deflist', - 'delay_field', 'delay_force', 'delf', 'delta', 'dhkl', 'discrete', - 'dispersion', 'ditto', 'dmaximum', 'dminimum', 'dump', 'eam_alloy', - 'eam_density', 'eam_functional', 'eam_potential_shift', 'edip_accuracy', - 'edip_coordination', 'edip_threebody', 'edip_twobody', 'edip_zmax', - 'einstein', 'elastic', 'electronegativity', 'element', 'end_field', - 'end_force', 'energy', 'ensemble', 'entropy', 'epsilon/sigma', - 'equatorial', 'equilibration', 'erferfc', 'erfpot', 'erongi', - 'ewaldrealradius', 'exp2', 'exponential_three_body', 'exppowers', - 'external_force', 'external_potential', 'extracutoff', 'factor', 'fangle', - 'fbond', 'fc_supercell', 'fcartesian', 'fcell', 'fenergy', 'fermi-dirac', - 'ffractional', 'field', 'finite', 'fix_atom', 'forceconstant', - 'fractional', 'frequency', 'frqtol', 'ftol', 'fvectors', 'g3coulomb', - 'gamma_angular_steps', 'gamma_direction_of_approach', 'gastdamping', - 'gastiter', 'gastparam', 'gasttol', 'gcmcexistingmolecules', - 'gcmcmolecule', 'gcmcspecies', 'gcoulomb', 'gdcrit', 'general', 'genetic', - 'gexp', 'ghost_supercell', 'gmax', 'gradients', 'grid', 'grimme_c6', - 'gtol', 'harmonic', 'hfdlc', 'hfrefractive_index', 'high-fq', - 'hydrogen-bond', 'igauss', 'ignore', 'impurity', 'include', 'index_k', - 'initial_coordinates', 'intconserved', 'integrator', 'inter', - 'interstitial', 'intra', 'inversion', 'ionic', 'iterations', 'keyword', - 'kim_model', 'kpoints', 'lbfgs_order', 'lennard', 'library', 'lin3', - 'line', 'ljbuffered', 'lorentzian_tolerance', 'lowest_mode', 'manybody', - 'marvin', 'mass', 'maths', 'matrix_format', 'maxcyc', 'maximise', - 'maximum', 'mcchemicalpotential', 'mccreate', 'mcdestroy', 'mclowest', - 'mcmaxdisplacement', 'mcmaxrotation', 'mcmaxstrain', 'mcmeans', 'mcmove', - 'mcoutfreq', 'mcrotate', 'mcsample', 'mcstep', 'mcstrain', 'mcswap', - 'mctrial', 'mcvolume', 'mdarchive', 'mdmaxtemp', 'mdmaxvolume', - 'meam_density', 'meam_functional', 'meam_rhotype', 'meam_screening', - 'mei-davenport', 'mincell', 'minimum', 'mm3angle', 'mm3buck', 'mm3stretch', - 'mode', 'mode2a', 'momentum_correct', 'monopoleq', 'morse', 'move_2a_to_1', - 'murrell-mottram', 'mutation', 'name', 'nebiterations', 'nebrandom', - 'nebreplica', 'nebspring', 'nebtangent', 'nebtolerance', 'nmr', 'nobond', - 'observables', 'odirection', 'omega', 'omega_af', 'omega_damping', - 'origin', 'outofplane', 'output', 'p_flexible', 'p_isotropic', 'parallel', - 'pcell', 'pdf', 'pfinite', 'pfractional', 'piezoelectric', 'plane_lj', - 'plumed_input', 'plumed_log', 'pointsperatom', 'poisson_ratio', - 'polarisability', 'polynomial', 'potential', 'potential_interpolation', - 'potgrid', 'potsites', 'pressure', 'print', 'production', 'project_dos', - 'pvector', 'qelectronegativity', 'qeqiter', 'qeqradius', 'qeqtol', 'qerfc', - 'qgrid', 'qincrement', 'qiterations', 'qmmm', 'qonsas', 'qoverr2', - 'qreaxff', 'qsolver', 'qtaper', 'qwolf', 'radial_force', 'random', - 'rangeforsmooth', 'rbins', 'rcartesian', 'rcell', 'rcspatial', - 'rdirection', 'reaction', 'reaxff0_bond', 'reaxff0_lonepair', - 'reaxff0_over', 'reaxff0_penalty', 'reaxff0_torsion', 'reaxff0_valence', - 'reaxff0_vdw', 'reaxff1_angle', 'reaxff1_include_under', - 'reaxff1_lonepair', 'reaxff1_morse', 'reaxff1_over', 'reaxff1_radii', - 'reaxff1_under', 'reaxff1_valence', 'reaxff2_bo', 'reaxff2_bond', - 'reaxff2_morse', 'reaxff2_over', 'reaxff2_pen', 'reaxff3_angle', - 'reaxff3_conjugation', 'reaxff3_hbond', 'reaxff3_pen', 'reaxff4_torsion', - 'reaxff_chi', 'reaxff_gamma', 'reaxff_mu', 'reaxff_q0', 'reaxff_qshell', - 'reaxff_r12', 'reaxfftol', 'region_1', 'reldef', 'reperfc', 'resetvectors', - 'rfractional', 'rmax', 'rspeed', 'rtol', 'ryckaert', 'rydberg', 'sample', - 'sasexclude', 'sasparticles', 'sbulkenergy', 'scale', 'scan_cell', 'scell', - 'scmaxsearch', 'sdlc', 'seed', 'segmentsperatom', 'sfinite', 'sfractional', - 'shear_modulus', 'shellmass', 'shift', 'shrink', 'siginc', 'size', - 'slater', 'slower', 'smelectronegativity', 'solventepsilon', - 'solventradius', 'solventrmax', 'spacegroup', 'species', 'spline', 'split', - 'spring', 'sqomega', 'squaredharmonic', 'srefractive_index', 'sregion2', - 'srglue', 'sshift', 'start', 'static', 'stepmx', 'stop', - 'strain_derivative', 'stress', 'supercell', 'svectors', 'sw2', 'sw2jb', - 'sw3', 'sw3jb', 'switch_minimiser', 'switch_stepmx', 'symbol', - 'symmetry_cell', 'symmetry_number', 'symmetry_operator', 'synciterations', - 'syncsteps', 'synctolerance', 'tau_barostat', 'tau_thermostat', - 'td_external_force', 'td_field', 'temperature', 'terse', 'tether', - 'three-body', 'threshold', 'time', 'timestep', 'title', 'torangle', - 'torcosangle', 'torexp', 'torharm', 'torsion', 'tortaper', 'totalenergy', - 'tournament', 'tpxo', 'translate', 'tscale', 'tsuneyuki', 'ttol', 'twist', - 'uff1', 'uff3', 'uff4', 'uff_bondorder', 'uffoop', 'unfreeze', 'unique', - 'units', 'update', 'urey-bradley', 'vacancy', 'variables', 'vbo_twobody', - 'vdw', 'vectors', 'velocities', 'volume', 'weight', 'wmax', 'wmin', - 'write', 'xangleangle', 'xcosangleangle', 'xoutofplane', 'xtol', + '3coulomb', 'absdipolemoment', 'absolute_coordinates', 'accelerations', 'accuracy', 'ala_cutoff', 'ala_disp', + 'ala_processors', 'ala_shrink', 'and', 'anisotropic_pressure', 'ashift', 'atomab', 'aver', 'axilrod-teller', + 'bacoscross', 'bacross', 'bagcross', 'balcross', 'baskes', 'bbar', 'bcoscross', 'bcross', 'becke_johnson_c6', + 'best', 'blocksize', 'boattractive', 'bocharge', 'bocnswitch', 'bocntolerance', 'bocoordination', 'bondtype', + 'borepulsive', 'bornq', 'boselfenergy', 'both', 'botwobody', 'box', 'brenner', 'bsm', 'bspline', 'buck4', + 'buckingham', 'buffered_lj', 'bulk_modulus', 'cartesian', 'catomic_stress', 'caver', 'cell', 'cellstrain', 'centre', + 'cfaver', 'cfm_fermi', 'cfm_gaussian', 'cfm_harmonic', 'cfm_power', 'charge', 'chemshell_mode', 'cmm', + 'configurations', 'connect', 'constrain', 'contents', 'coordno', 'cosh-spring', 'cosmoframe', 'cosmoshape', + 'coulomb_subtract', 'covalent', 'covexp', 'crossover', 'current_time', 'cutd', 'cutmany', 'cutp', 'cuts', 'cv', + 'cvec', 'cwolf', 'damped_dispersion', 'default_weight', 'deflist', 'delay_field', 'delay_force', 'delf', 'delta', + 'dhkl', 'discrete', 'dispersion', 'ditto', 'dmaximum', 'dminimum', 'dump', 'eam_alloy', 'eam_density', + 'eam_functional', 'eam_potential_shift', 'edip_accuracy', 'edip_coordination', 'edip_threebody', 'edip_twobody', + 'edip_zmax', 'einstein', 'elastic', 'electronegativity', 'element', 'end_field', 'end_force', 'energy', 'ensemble', + 'entropy', 'epsilon/sigma', 'equatorial', 'equilibration', 'erferfc', 'erfpot', 'erongi', 'ewaldrealradius', 'exp2', + 'exponential_three_body', 'exppowers', 'external_force', 'external_potential', 'extracutoff', 'factor', 'fangle', + 'fbond', 'fc_supercell', 'fcartesian', 'fcell', 'fenergy', 'fermi-dirac', 'ffractional', 'field', + 'finite', 'fix_atom', 'forceconstant', 'fractional', 'frequency', 'frqtol', 'ftol', 'fvectors', 'g3coulomb', + 'gamma_angular_steps', 'gamma_direction_of_approach', 'gastdamping', 'gastiter', 'gastparam', 'gasttol', + 'gcmcexistingmolecules', 'gcmcmolecule', 'gcmcspecies', 'gcoulomb', 'gdcrit', 'general', 'genetic', 'gexp', + 'ghost_supercell', 'gmax', 'gradients', 'grid', 'grimme_c6', 'gtol', 'harmonic', 'hfdlc', 'hfrefractive_index', + 'high-fq', 'hydrogen-bond', 'igauss', 'ignore', 'impurity', 'include', 'index_k', 'initial_coordinates', + 'intconserved', 'integrator', 'inter', 'interstitial', 'intra', 'inversion', 'ionic', 'iterations', 'keyword', + 'kim_model', 'kpoints', 'lbfgs_order', 'lennard', 'library', 'lin3', 'line', 'ljbuffered', 'lorentzian_tolerance', + 'lowest_mode', 'manybody', 'marvin', 'mass', 'maths', 'matrix_format', 'maxcyc', 'maximise', 'maximum', + 'mcchemicalpotential', 'mccreate', 'mcdestroy', 'mclowest', 'mcmaxdisplacement', 'mcmaxrotation', 'mcmaxstrain', + 'mcmeans', 'mcmove', 'mcoutfreq', 'mcrotate', 'mcsample', 'mcstep', 'mcstrain', 'mcswap', 'mctrial', 'mcvolume', + 'mdarchive', 'mdmaxtemp', 'mdmaxvolume', 'meam_density', 'meam_functional', 'meam_rhotype', 'meam_screening', + 'mei-davenport', 'mincell', 'minimum', 'mm3angle', 'mm3buck', 'mm3stretch', 'mode', 'mode2a', 'momentum_correct', + 'monopoleq', 'morse', 'move_2a_to_1', 'murrell-mottram', 'mutation', 'name', 'nebiterations', 'nebrandom', + 'nebreplica', 'nebspring', 'nebtangent', 'nebtolerance', 'nmr', 'nobond', 'observables', 'odirection', 'omega', + 'omega_af', 'omega_damping', 'origin', 'outofplane', 'output', 'p_flexible', 'p_isotropic', 'parallel', 'pcell', + 'pdf', 'pfinite', 'pfractional', 'piezoelectric', 'plane_lj', 'plumed_input', 'plumed_log', 'pointsperatom', + 'poisson_ratio', 'polarisability', 'polynomial', 'potential', 'potential_interpolation', 'potgrid', 'potsites', + 'pressure', 'print', 'production', 'project_dos', 'pvector', 'qelectronegativity', 'qeqiter', 'qeqradius', 'qeqtol', + 'qerfc', 'qgrid', 'qincrement', 'qiterations', 'qmmm', 'qonsas', 'qoverr2', 'qreaxff', 'qsolver', 'qtaper', 'qwolf', + 'radial_force', 'random', 'rangeforsmooth', 'rbins', 'rcartesian', 'rcell', 'rcspatial', 'rdirection', 'reaction', + 'reaxff0_bond', 'reaxff0_lonepair', 'reaxff0_over', 'reaxff0_penalty', 'reaxff0_torsion', 'reaxff0_valence', + 'reaxff0_vdw', 'reaxff1_angle', 'reaxff1_include_under', 'reaxff1_lonepair', 'reaxff1_morse', 'reaxff1_over', + 'reaxff1_radii', 'reaxff1_under', 'reaxff1_valence', 'reaxff2_bo', 'reaxff2_bond', 'reaxff2_morse', 'reaxff2_over', + 'reaxff2_pen', 'reaxff3_angle', 'reaxff3_conjugation', 'reaxff3_hbond', 'reaxff3_pen', 'reaxff4_torsion', + 'reaxff_chi', 'reaxff_gamma', 'reaxff_mu', 'reaxff_q0', 'reaxff_qshell', 'reaxff_r12', 'reaxfftol', 'region_1', + 'reldef', 'reperfc', 'resetvectors', 'rfractional', 'rmax', 'rspeed', 'rtol', 'ryckaert', 'rydberg', 'sample', + 'sasexclude', 'sasparticles', 'sbulkenergy', 'scale', 'scan_cell', 'scell', 'scmaxsearch', 'sdlc', 'seed', + 'segmentsperatom', 'sfinite', 'sfractional', 'shear_modulus', 'shellmass', 'shift', 'shrink', 'siginc', 'size', + 'slater', 'slower', 'smelectronegativity', 'solventepsilon', 'solventradius', 'solventrmax', 'spacegroup', + 'species', 'spline', 'split', 'spring', 'sqomega', 'squaredharmonic', 'srefractive_index', 'sregion2', 'srglue', + 'sshift', 'start', 'static', 'stepmx', 'stop', 'strain_derivative', 'stress', 'supercell', 'svectors', 'sw2', + 'sw2jb', 'sw3', 'sw3jb', 'switch_minimiser', 'switch_stepmx', 'symbol', 'symmetry_cell', 'symmetry_number', + 'symmetry_operator', 'synciterations', 'syncsteps', 'synctolerance', 'tau_barostat', 'tau_thermostat', + 'td_external_force', 'td_field', 'temperature', 'terse', 'tether', 'three-body', 'threshold', 'time', 'timestep', + 'title', 'torangle', 'torcosangle', 'torexp', 'torharm', 'torsion', 'tortaper', 'totalenergy', 'tournament', 'tpxo', + 'translate', 'tscale', 'tsuneyuki', 'ttol', 'twist', 'uff1', 'uff3', 'uff4', 'uff_bondorder', 'uffoop', 'unfreeze', + 'unique', 'units', 'update', 'urey-bradley', 'vacancy', 'variables', 'vbo_twobody', 'vdw', 'vectors', 'velocities', + 'volume', 'weight', 'wmax', 'wmin', 'write', 'xangleangle', 'xcosangleangle', 'xoutofplane', 'xtol', 'youngs_modulus', 'zbl') + # Note: 'static' should actually be 'static dielectric', and 'high-fq' 'high-fq dielectric' @@ -128,7 +111,7 @@ class PotentialWriterAbstract(object): @classmethod def get_description(cls): """return description of the potential type""" - return "" + return '' @classmethod def get_schema(cls): @@ -222,24 +205,21 @@ def create_content(self, data, species_filter=None, fitting_data=None): schema = self.get_schema() validate_against_schema(data, schema) # test that e.g. '1-2' and '2-1' aren't present - if "2body" in data: + if '2body' in data: bonds = [] - for indices in data["2body"]: - index_set = set(indices.split("-")) + for indices in data['2body']: + index_set = set(indices.split('-')) if index_set in bonds: - raise AssertionError( - "both {0}-{1} and {1}-{0} 2body keys exist in the data" - .format(*index_set)) + raise AssertionError('both {0}-{1} and {1}-{0} 2body keys exist in the data'.format(*index_set)) bonds.append(index_set) # test that e.g. '1-2-3' and '3-2-1' aren't present (2 is the pivot atom) - if "3body" in data: + if '3body' in data: angles = [] - for indices in data["3body"]: - i1, i2, i3 = indices.split("-") + for indices in data['3body']: + i1, i2, i3 = indices.split('-') if (i1, i2, i3) in angles: - raise AssertionError( - "both {0}-{1}-{2} and {2}-{1}-{0} 3body keys exist in the data" - .format(i1, i2, i3)) + raise AssertionError('both {0}-{1}-{2} and {2}-{1}-{0} 3body keys exist in the data'.format( + i1, i2, i3)) angles.append((i1, i2, i3)) angles.append((i3, i2, i1)) @@ -252,10 +232,9 @@ def create_content(self, data, species_filter=None, fitting_data=None): validate_against_schema(fitting_data, fit_schema) if species_filter is not None: fitting_data = filter_by_species(fitting_data, species_filter) - if fitting_data["species"] != data["species"]: - raise AssertionError( - "the fitting data species ({}) must be equal to the data species ({})" - .format(fitting_data["species"], data["species"])) + if fitting_data['species'] != data['species']: + raise AssertionError('the fitting data species ({}) must be equal to the data species ({})'.format( + fitting_data['species'], data['species'])) # TODO same checks as main data and possibly switch 2body/3body indices to line up with those for main data return self._make_string(data, fitting_data=fitting_data) @@ -283,10 +262,7 @@ def read_exising(self, lines): raise NotImplementedError @staticmethod - def read_atom_section(lines, - lineno, - number_atoms, - global_args=None): + def read_atom_section(lines, lineno, number_atoms, global_args=None): """read a section of a potential file, e.g. :: @@ -327,7 +303,7 @@ def read_atom_section(lines, line = lines[lineno] first_term = line.strip().split()[0] # ignore comment lines - if first_term == "#": + if first_term == '#': lineno += 1 continue # break if we find the next section @@ -337,48 +313,38 @@ def read_atom_section(lines, # TODO ignore comments at end of line # check for breaking lines - if line.strip().endswith(" &"): + if line.strip().endswith(' &'): lineno += 1 - line = line.strip()[:-2] + " " + lines[lineno].strip() + line = line.strip()[:-2] + ' ' + lines[lineno].strip() # check for lines containing both atom symbols and types (core/shell) match_sym_type = re.findall( - "^{}\\s+(.+)\\s*$".format("\\s+".join( - [RE_SYMBOL_TYPE for _ in range(number_atoms)])), - line.strip()) + '^{}\\s+(.+)\\s*$'.format('\\s+'.join([RE_SYMBOL_TYPE for _ in range(number_atoms)])), line.strip()) # check for lines containing only atom symbols (assume types to be core) - match_sym = re.findall( - "^{}\\s+(.+)\\s*$".format("\\s+".join( - [RE_SYMBOL for _ in range(number_atoms)])), line.strip()) + match_sym = re.findall('^{}\\s+(.+)\\s*$'.format('\\s+'.join([RE_SYMBOL for _ in range(number_atoms)])), + line.strip()) # TODO also match atomic numbers (and mixed type / no type) if match_sym_type: result = list(match_sym_type[0]) index = [] for _ in range(number_atoms): symbol = result[0] - stype = {"c": "core", "s": "shell"}[result[1][0]] - index.append("{} {}".format(symbol, stype)) + stype = {'c': 'core', 's': 'shell'}[result[1][0]] + index.append('{} {}'.format(symbol, stype)) result = result[2:] - results[tuple(index)] = { - "values": result[0], - "global": global_args - } + results[tuple(index)] = {'values': result[0], 'global': global_args} symbol_set.update(index) elif match_sym: result = list(match_sym[0]) index = [] for _ in range(number_atoms): symbol = result[0] - index.append("{} {}".format(symbol, "core")) + index.append('{} {}'.format(symbol, 'core')) result = result[1:] - results[tuple(index)] = { - "values": result[0], - "global": global_args - } + results[tuple(index)] = {'values': result[0], 'global': global_args} symbol_set.update(index) else: - raise IOError( - "expected line to be of form " - "'symbol1 symbol2 ... variables': {}".format(line)) + raise IOError('expected line to be of form ' + "'symbol1 symbol2 ... variables': {}".format(line)) lineno += 1 return lineno - 1, symbol_set, results diff --git a/aiida_crystal17/gulp/potentials/common.py b/aiida_crystal17/gulp/potentials/common.py index 633e5a0..57cf217 100644 --- a/aiida_crystal17/gulp/potentials/common.py +++ b/aiida_crystal17/gulp/potentials/common.py @@ -1,6 +1,21 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2019 Chris Sewell +# +# This file is part of aiida-crystal17. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms and conditions +# of version 3 of the GNU Lesser General Public License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. import copy -INDEX_SEP = "-" +INDEX_SEP = '-' def filter_by_species(data, species): @@ -28,22 +43,20 @@ def filter_by_species(data, species): """ species = sorted(list(set(species))) - if not set(species).issubset(data["species"]): - raise AssertionError( - "the filter set ({}) is not a subset of the available species ({})".format( - set(species), set(data["species"]) - )) + if not set(species).issubset(data['species']): + raise AssertionError('the filter set ({}) is not a subset of the available species ({})'.format( + set(species), set(data['species']))) data = copy.deepcopy(data) - indices = set([str(i) for i, s in enumerate(data["species"]) if s in species]) + indices = set([str(i) for i, s in enumerate(data['species']) if s in species]) def convert_indices(key): - return INDEX_SEP.join([str(species.index(data["species"][int(k)])) for k in key.split(INDEX_SEP)]) + return INDEX_SEP.join([str(species.index(data['species'][int(k)])) for k in key.split(INDEX_SEP)]) for key in ['1body', '2body', '3body', '4body']: if key not in data: continue data[key] = {convert_indices(k): v for k, v in data[key].items() if indices.issuperset(k.split(INDEX_SEP))} - data["species"] = species + data['species'] = species return data diff --git a/aiida_crystal17/gulp/potentials/lj.py b/aiida_crystal17/gulp/potentials/lj.py index f2fd2ea..23c4f93 100644 --- a/aiida_crystal17/gulp/potentials/lj.py +++ b/aiida_crystal17/gulp/potentials/lj.py @@ -1,3 +1,18 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2019 Chris Sewell +# +# This file is part of aiida-crystal17. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms and conditions +# of version 3 of the GNU Lesser General Public License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. from aiida_crystal17.gulp.potentials.base import PotentialWriterAbstract, PotentialContent from aiida_crystal17.gulp.potentials.common import INDEX_SEP from aiida_crystal17.validation import load_schema @@ -10,15 +25,15 @@ class PotentialWriterLJ(PotentialWriterAbstract): @classmethod def get_description(cls): - return "Lennard-Jones potential, of the form; E = A/r**m - B/r**n" + return 'Lennard-Jones potential, of the form; E = A/r**m - B/r**n' @classmethod def _get_schema(cls): - return load_schema("potential.lj.schema.json") + return load_schema('potential.lj.schema.json') @classmethod def _get_fitting_schema(cls): - return load_schema("fitting.lj.schema.json") + return load_schema('fitting.lj.schema.json') def _make_string(self, data, fitting_data=None): """write reaxff data in GULP input format @@ -42,35 +57,29 @@ def _make_string(self, data, fitting_data=None): total_flags = 0 num_fit = 0 - for indices in sorted(data["2body"]): - species = [ - "{:7s}".format(data["species"][int(i)]) - for i in indices.split(INDEX_SEP) - ] - values = data["2body"][indices] - lines.append("lennard {lj_m} {lj_n}".format( - lj_m=values.get("lj_m", 12), lj_n=values.get("lj_n", 6))) - if "lj_rmin" in values: - values_string = "{lj_A:.8E} {lj_B:.8E} {lj_rmin:8.5f} {lj_rmax:8.5f}".format( - **values) + for indices in sorted(data['2body']): + species = ['{:7s}'.format(data['species'][int(i)]) for i in indices.split(INDEX_SEP)] + values = data['2body'][indices] + lines.append('lennard {lj_m} {lj_n}'.format(lj_m=values.get('lj_m', 12), lj_n=values.get('lj_n', 6))) + if 'lj_rmin' in values: + values_string = '{lj_A:.8E} {lj_B:.8E} {lj_rmin:8.5f} {lj_rmax:8.5f}'.format(**values) else: - values_string = "{lj_A:.8E} {lj_B:.8E} {lj_rmax:8.5f}".format( - **values) + values_string = '{lj_A:.8E} {lj_B:.8E} {lj_rmax:8.5f}'.format(**values) total_flags += 2 if fitting_data is not None: flag_a = flag_b = 0 - if "lj_A" in fitting_data.get("2body", {}).get(indices, []): + if 'lj_A' in fitting_data.get('2body', {}).get(indices, []): flag_a = 1 - if "lj_B" in fitting_data.get("2body", {}).get(indices, []): + if 'lj_B' in fitting_data.get('2body', {}).get(indices, []): flag_b = 1 num_fit += flag_a + flag_b - values_string += " {} {}".format(flag_a, flag_b) + values_string += ' {} {}'.format(flag_a, flag_b) - lines.append(" ".join(species) + " " + values_string) + lines.append(' '.join(species) + ' ' + values_string) - return PotentialContent("\n".join(lines), total_flags, num_fit) + return PotentialContent('\n'.join(lines), total_flags, num_fit) def read_exising(self, lines): """read an existing potential file @@ -96,54 +105,46 @@ def read_exising(self, lines): while lineno < len(lines): line = lines[lineno] - if line.strip().startswith("lennard"): + if line.strip().startswith('lennard'): meta_values = line.strip().split() if len(meta_values) != 3: - raise IOError( - "expected `lennard` option to have only m & n variables: {}" - .format(line)) + raise IOError('expected `lennard` option to have only m & n variables: {}'.format(line)) try: lj_m = int(meta_values[1]) lj_n = int(meta_values[2]) except ValueError: - raise IOError( - "expected `lennard` option to have only (integer) m & n variables: {}" - .format(line)) + raise IOError('expected `lennard` option to have only (integer) m & n variables: {}'.format(line)) lineno, sset, results = self.read_atom_section( - lines, lineno + 1, - number_atoms=2, - global_args={ - "lj_m": lj_m, - "lj_n": lj_n + lines, lineno + 1, number_atoms=2, global_args={ + 'lj_m': lj_m, + 'lj_n': lj_n }) symbol_set.update(sset) terms.update(results) lineno += 1 - pot_data = {"species": sorted(symbol_set), "2body": {}} + pot_data = {'species': sorted(symbol_set), '2body': {}} for key, value in terms.items(): - indices = "-".join( - [str(pot_data["species"].index(term)) for term in key]) - variables = value["values"].split() + indices = '-'.join([str(pot_data['species'].index(term)) for term in key]) + variables = value['values'].split() if len(variables) in [3, 5]: - pot_data["2body"][indices] = { - "lj_m": value["global"]["lj_m"], - "lj_n": value["global"]["lj_n"], - "lj_A": float(variables[0]), - "lj_B": float(variables[1]), - "lj_rmax": float(variables[2]) + pot_data['2body'][indices] = { + 'lj_m': value['global']['lj_m'], + 'lj_n': value['global']['lj_n'], + 'lj_A': float(variables[0]), + 'lj_B': float(variables[1]), + 'lj_rmax': float(variables[2]) } elif len(variables) in [4, 6]: - pot_data["2body"][indices] = { - "lj_m": value["global"]["lj_m"], - "lj_n": value["global"]["lj_n"], - "lj_A": float(variables[0]), - "lj_B": float(variables[1]), - "lj_rmin": float(variables[2]), - "lj_rmax": float(variables[3]) + pot_data['2body'][indices] = { + 'lj_m': value['global']['lj_m'], + 'lj_n': value['global']['lj_n'], + 'lj_A': float(variables[0]), + 'lj_B': float(variables[1]), + 'lj_rmin': float(variables[2]), + 'lj_rmax': float(variables[3]) } else: - raise IOError( - "expected 3, 4, 5 or 6 variables: {}".format(value)) + raise IOError('expected 3, 4, 5 or 6 variables: {}'.format(value)) return pot_data diff --git a/aiida_crystal17/gulp/potentials/raw_reaxff.py b/aiida_crystal17/gulp/potentials/raw_reaxff.py index 07f1ded..cb10c2b 100644 --- a/aiida_crystal17/gulp/potentials/raw_reaxff.py +++ b/aiida_crystal17/gulp/potentials/raw_reaxff.py @@ -1,3 +1,18 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2019 Chris Sewell +# +# This file is part of aiida-crystal17. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms and conditions +# of version 3 of the GNU Lesser General Public License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. from collections import OrderedDict import textwrap @@ -5,73 +20,47 @@ from aiida_crystal17.validation import validate_against_schema from aiida_crystal17.gulp.potentials.common import INDEX_SEP - -KEYS_GLOBAL = ( - 'reaxff0_boc1', 'reaxff0_boc2', 'reaxff3_coa2', - 'Triple bond stabilisation 1', 'Triple bond stabilisation 2', - 'C2-correction', 'reaxff0_ovun6', 'Triple bond stabilisation', - 'reaxff0_ovun7', 'reaxff0_ovun8', - 'Triple bond stabilization energy', 'Lower Taper-radius', - 'Upper Taper-radius', 'reaxff2_pen2', 'reaxff0_val7', - 'reaxff0_lp1', 'reaxff0_val9', 'reaxff0_val10', - 'Not used 2', 'reaxff0_pen2', 'reaxff0_pen3', - 'reaxff0_pen4', 'Not used 3', 'reaxff0_tor2', - 'reaxff0_tor3', 'reaxff0_tor4', 'Not used 4', - 'reaxff0_cot2', 'reaxff0_vdw1', 'bond order cutoff', - 'reaxff3_coa4', 'reaxff0_ovun4', - 'reaxff0_ovun3', 'reaxff0_val8', 'Not used 5', - 'Not used 6', 'Not used 7', 'Not used 8', 'reaxff3_coa3' -) +KEYS_GLOBAL = ('reaxff0_boc1', 'reaxff0_boc2', 'reaxff3_coa2', 'Triple bond stabilisation 1', + 'Triple bond stabilisation 2', 'C2-correction', 'reaxff0_ovun6', 'Triple bond stabilisation', + 'reaxff0_ovun7', 'reaxff0_ovun8', 'Triple bond stabilization energy', 'Lower Taper-radius', + 'Upper Taper-radius', 'reaxff2_pen2', 'reaxff0_val7', 'reaxff0_lp1', 'reaxff0_val9', 'reaxff0_val10', + 'Not used 2', 'reaxff0_pen2', 'reaxff0_pen3', 'reaxff0_pen4', 'Not used 3', 'reaxff0_tor2', + 'reaxff0_tor3', 'reaxff0_tor4', 'Not used 4', 'reaxff0_cot2', 'reaxff0_vdw1', 'bond order cutoff', + 'reaxff3_coa4', 'reaxff0_ovun4', 'reaxff0_ovun3', 'reaxff0_val8', 'Not used 5', 'Not used 6', + 'Not used 7', 'Not used 8', 'reaxff3_coa3') # TODO some variables lammps sets as global are actually species dependant in GULP, how to handle these? -KEYS_1BODY = ( - 'reaxff1_radii1', 'reaxff1_valence1', 'mass', - 'reaxff1_morse3', 'reaxff1_morse2', 'reaxff_gamma', 'reaxff1_radii2', - 'reaxff1_valence3', 'reaxff1_morse1', 'reaxff1_morse4', 'reaxff1_valence4', - 'reaxff1_under', 'dummy1', 'reaxff_chi', 'reaxff_mu', 'dummy2', - 'reaxff1_radii3', 'reaxff1_lonepair2', 'dummy3', 'reaxff1_over2', - 'reaxff1_over1', 'reaxff1_over3', 'dummy4', 'dummy5', 'reaxff1_over4', - 'reaxff1_angle1', 'dummy11', 'reaxff1_valence2', 'reaxff1_angle2', - 'dummy6', 'dummy7', 'dummy8' -) - -KEYS_2BODY_BONDS = ( - 'reaxff2_bond1', 'reaxff2_bond2', 'reaxff2_bond3', - 'reaxff2_bond4', 'reaxff2_bo5', 'reaxff2_bo7', 'reaxff2_bo6', - 'reaxff2_over', 'reaxff2_bond5', 'reaxff2_bo3', 'reaxff2_bo4', 'dummy1', - 'reaxff2_bo1', 'reaxff2_bo2', 'reaxff2_bo8', 'reaxff2_pen1' -) +KEYS_1BODY = ('reaxff1_radii1', 'reaxff1_valence1', 'mass', 'reaxff1_morse3', 'reaxff1_morse2', 'reaxff_gamma', + 'reaxff1_radii2', 'reaxff1_valence3', 'reaxff1_morse1', 'reaxff1_morse4', 'reaxff1_valence4', + 'reaxff1_under', 'dummy1', 'reaxff_chi', 'reaxff_mu', 'dummy2', 'reaxff1_radii3', 'reaxff1_lonepair2', + 'dummy3', 'reaxff1_over2', 'reaxff1_over1', 'reaxff1_over3', 'dummy4', 'dummy5', 'reaxff1_over4', + 'reaxff1_angle1', 'dummy11', 'reaxff1_valence2', 'reaxff1_angle2', 'dummy6', 'dummy7', 'dummy8') + +KEYS_2BODY_BONDS = ('reaxff2_bond1', 'reaxff2_bond2', 'reaxff2_bond3', 'reaxff2_bond4', 'reaxff2_bo5', 'reaxff2_bo7', + 'reaxff2_bo6', 'reaxff2_over', 'reaxff2_bond5', 'reaxff2_bo3', 'reaxff2_bo4', 'dummy1', + 'reaxff2_bo1', 'reaxff2_bo2', 'reaxff2_bo8', 'reaxff2_pen1') KEYS_2BODY_OFFDIAG = [ - 'reaxff2_morse1', 'reaxff2_morse3', 'reaxff2_morse2', - 'reaxff2_morse4', 'reaxff2_morse5', 'reaxff2_morse6' + 'reaxff2_morse1', 'reaxff2_morse3', 'reaxff2_morse2', 'reaxff2_morse4', 'reaxff2_morse5', 'reaxff2_morse6' ] -KEYS_3BODY_ANGLES = ( - 'reaxff3_angle1', 'reaxff3_angle2', - 'reaxff3_angle3', 'reaxff3_coa1', 'reaxff3_angle5', 'reaxff3_penalty', - 'reaxff3_angle4' -) +KEYS_3BODY_ANGLES = ('reaxff3_angle1', 'reaxff3_angle2', 'reaxff3_angle3', 'reaxff3_coa1', 'reaxff3_angle5', + 'reaxff3_penalty', 'reaxff3_angle4') -KEYS_3BODY_HBOND = ( - 'reaxff3_hbond1', 'reaxff3_hbond2', - 'reaxff3_hbond3', 'reaxff3_hbond4' -) +KEYS_3BODY_HBOND = ('reaxff3_hbond1', 'reaxff3_hbond2', 'reaxff3_hbond3', 'reaxff3_hbond4') -KEYS_4BODY_TORSION = ( - 'reaxff4_torsion1', 'reaxff4_torsion2', - 'reaxff4_torsion3', 'reaxff4_torsion4', 'reaxff4_torsion5', 'dummy1', - 'dummy2' -) +KEYS_4BODY_TORSION = ('reaxff4_torsion1', 'reaxff4_torsion2', 'reaxff4_torsion3', 'reaxff4_torsion4', + 'reaxff4_torsion5', 'dummy1', 'dummy2') DEFAULT_TOLERANCES = { - "anglemin": 0.001, - "angleprod": 0.001, # Hard coded to 0.001 in original code. - "hbondmin": 0.01, # Hard coded to 0.01 in original code. - "hbonddist": 7.5, # Hard coded to 7.5 Ang in original code. - "torsionprod": 0.00001 + 'anglemin': 0.001, + 'angleprod': 0.001, # Hard coded to 0.001 in original code. + 'hbondmin': 0.01, # Hard coded to 0.01 in original code. + 'hbonddist': 7.5, # Hard coded to 7.5 Ang in original code. + 'torsionprod': 0.00001 } + # NOTE: torsionprod needs to be lower (0.001), to get comparable energy to lammps, # but then won't optimize (reaches maximum steps) @@ -79,13 +68,13 @@ def read_lammps_format(lines): """ read a reaxff file, in lammps format, to a standardised potential dictionary """ output = { - "description": lines[0], - "global": {}, - "species": ["X core"], # X is always first - "1body": {}, - "2body": {}, - "3body": {}, - "4body": {} + 'description': lines[0], + 'global': {}, + 'species': ['X core'], # X is always first + '1body': {}, + '2body': {}, + '3body': {}, + '4body': {} } lineno = 1 @@ -96,7 +85,7 @@ def read_lammps_format(lines): for key in KEYS_GLOBAL: lineno += 1 - output["global"][key] = float(lines[lineno].split()[0]) + output['global'][key] = float(lines[lineno].split()[0]) output['global']['reaxff2_pen3'] = 1.0 # this is not provided by lammps, but is used by GULP @@ -108,26 +97,25 @@ def read_lammps_format(lines): for i in range(num_species): lineno += 1 symbol, values = lines[lineno].split(None, 1) - if symbol == "X": + if symbol == 'X': species_idx = 0 # the X symbol is always assigned index 0 else: species_idx = idx idx += 1 - output["species"].append(symbol + " core") + output['species'].append(symbol + ' core') values = split_numbers(values) for _ in range(3): lineno += 1 values.extend(split_numbers(lines[lineno])) if len(values) != len(KEYS_1BODY): - raise Exception( - 'number of values different than expected for species {0}, ' - '{1} != {2}'.format(symbol, len(values), len(KEYS_1BODY))) + raise Exception('number of values different than expected for species {0}, ' + '{1} != {2}'.format(symbol, len(values), len(KEYS_1BODY))) key_map = {k: v for k, v in zip(KEYS_1BODY, values)} - key_map['reaxff1_lonepair1'] = 0.5 * (key_map["reaxff1_valence3"] - key_map["reaxff1_valence1"]) + key_map['reaxff1_lonepair1'] = 0.5 * (key_map['reaxff1_valence3'] - key_map['reaxff1_valence1']) - output["1body"][str(species_idx)] = key_map + output['1body'][str(species_idx)] = key_map # two-body bond parameters lineno += 1 @@ -137,15 +125,14 @@ def read_lammps_format(lines): values = split_numbers(lines[lineno]) + split_numbers(lines[lineno + 1]) species_idx1 = int(values.pop(0)) species_idx2 = int(values.pop(0)) - key_name = "{}-{}".format(species_idx1, species_idx2) + key_name = '{}-{}'.format(species_idx1, species_idx2) lineno += 2 if len(values) != len(KEYS_2BODY_BONDS): - raise Exception( - 'number of bond values different than expected for key {0}, ' - '{1} != {2}'.format(key_name, len(values), len(KEYS_2BODY_BONDS))) + raise Exception('number of bond values different than expected for key {0}, ' + '{1} != {2}'.format(key_name, len(values), len(KEYS_2BODY_BONDS))) - output["2body"][key_name] = {k: v for k, v in zip(KEYS_2BODY_BONDS, values)} + output['2body'][key_name] = {k: v for k, v in zip(KEYS_2BODY_BONDS, values)} # two-body off-diagonal parameters num_lines = int(lines[lineno].split()[0]) @@ -154,15 +141,14 @@ def read_lammps_format(lines): values = split_numbers(lines[lineno]) species_idx1 = int(values.pop(0)) species_idx2 = int(values.pop(0)) - key_name = "{}-{}".format(species_idx1, species_idx2) + key_name = '{}-{}'.format(species_idx1, species_idx2) lineno += 1 if len(values) != len(KEYS_2BODY_OFFDIAG): - raise Exception( - 'number of off-diagonal values different than expected for key {0} (line {1}), ' - '{2} != {3}'.format(key_name, lineno-1, len(values), len(KEYS_2BODY_OFFDIAG))) + raise Exception('number of off-diagonal values different than expected for key {0} (line {1}), ' + '{2} != {3}'.format(key_name, lineno - 1, len(values), len(KEYS_2BODY_OFFDIAG))) - output["2body"].setdefault(key_name, {}).update({k: v for k, v in zip(KEYS_2BODY_OFFDIAG, values)}) + output['2body'].setdefault(key_name, {}).update({k: v for k, v in zip(KEYS_2BODY_OFFDIAG, values)}) # three-body angle parameters num_lines = int(lines[lineno].split()[0]) @@ -172,15 +158,14 @@ def read_lammps_format(lines): species_idx1 = int(values.pop(0)) species_idx2 = int(values.pop(0)) species_idx3 = int(values.pop(0)) - key_name = "{}-{}-{}".format(species_idx1, species_idx2, species_idx3) + key_name = '{}-{}-{}'.format(species_idx1, species_idx2, species_idx3) lineno += 1 if len(values) != len(KEYS_3BODY_ANGLES): - raise Exception( - 'number of angle values different than expected for key {0} (line {1}), ' - '{2} != {3}'.format(key_name, lineno-1, len(values), len(KEYS_3BODY_ANGLES))) + raise Exception('number of angle values different than expected for key {0} (line {1}), ' + '{2} != {3}'.format(key_name, lineno - 1, len(values), len(KEYS_3BODY_ANGLES))) - output["3body"].setdefault(key_name, {}).update({k: v for k, v in zip(KEYS_3BODY_ANGLES, values)}) + output['3body'].setdefault(key_name, {}).update({k: v for k, v in zip(KEYS_3BODY_ANGLES, values)}) # four-body torsion parameters num_lines = int(lines[lineno].split()[0]) @@ -191,15 +176,14 @@ def read_lammps_format(lines): species_idx2 = int(values.pop(0)) species_idx3 = int(values.pop(0)) species_idx4 = int(values.pop(0)) - key_name = "{}-{}-{}-{}".format(species_idx1, species_idx2, species_idx3, species_idx4) + key_name = '{}-{}-{}-{}'.format(species_idx1, species_idx2, species_idx3, species_idx4) lineno += 1 if len(values) != len(KEYS_4BODY_TORSION): - raise Exception( - 'number of torsion values different than expected for key {0} (line {1}), ' - '{2} != {3}'.format(key_name, lineno-1, len(values), len(KEYS_4BODY_TORSION))) + raise Exception('number of torsion values different than expected for key {0} (line {1}), ' + '{2} != {3}'.format(key_name, lineno - 1, len(values), len(KEYS_4BODY_TORSION))) - output["4body"].setdefault(key_name, {}).update({k: v for k, v in zip(KEYS_4BODY_TORSION, values)}) + output['4body'].setdefault(key_name, {}).update({k: v for k, v in zip(KEYS_4BODY_TORSION, values)}) # three-body h-bond parameters num_lines = int(lines[lineno].split()[0]) @@ -209,139 +193,126 @@ def read_lammps_format(lines): species_idx1 = int(values.pop(0)) species_idx2 = int(values.pop(0)) species_idx3 = int(values.pop(0)) - key_name = "{}-{}-{}".format(species_idx1, species_idx2, species_idx3) + key_name = '{}-{}-{}'.format(species_idx1, species_idx2, species_idx3) lineno += 1 if len(values) != len(KEYS_3BODY_HBOND): - raise Exception( - 'number of h-bond values different than expected for key {0} (line {1}), ' - '{2} != {3}'.format(key_name, lineno-1, len(values), len(KEYS_3BODY_HBOND))) + raise Exception('number of h-bond values different than expected for key {0} (line {1}), ' + '{2} != {3}'.format(key_name, lineno - 1, len(values), len(KEYS_3BODY_HBOND))) - output["3body"].setdefault(key_name, {}).update({k: v for k, v in zip(KEYS_3BODY_HBOND, values)}) + output['3body'].setdefault(key_name, {}).update({k: v for k, v in zip(KEYS_3BODY_HBOND, values)}) return output def format_lammps_value(value): - return "{:.4f}".format(value) + return '{:.4f}'.format(value) def write_lammps_format(data): """ write a reaxff file, in lammps format, from a standardised potential dictionary """ # validate dictionary - validate_against_schema(data, "potential.reaxff.schema.json") + validate_against_schema(data, 'potential.reaxff.schema.json') - output = [ - data["description"] - ] + output = [data['description']] # Global parameters - output.append("{} ! Number of general parameters".format(len(KEYS_GLOBAL))) + output.append('{} ! Number of general parameters'.format(len(KEYS_GLOBAL))) for key in KEYS_GLOBAL: - output.append("{0:.4f} ! {1}".format(data["global"][key], key)) + output.append('{0:.4f} ! {1}'.format(data['global'][key], key)) # one-body parameters output.extend([ - '{0} ! Nr of atoms; cov.r; valency;a.m;Rvdw;Evdw;gammaEEM;cov.r2;#'.format(len(data["species"])), - 'alfa;gammavdW;valency;Eunder;Eover;chiEEM;etaEEM;n.u.', - 'cov r3;Elp;Heat inc.;n.u.;n.u.;n.u.;n.u.', + '{0} ! Nr of atoms; cov.r; valency;a.m;Rvdw;Evdw;gammaEEM;cov.r2;#'.format(len(data['species'])), + 'alfa;gammavdW;valency;Eunder;Eover;chiEEM;etaEEM;n.u.', 'cov r3;Elp;Heat inc.;n.u.;n.u.;n.u.;n.u.', 'ov/un;val1;n.u.;val3,vval4' ]) - for i, species in enumerate(data["species"]): - if species.endswith("shell"): - raise ValueError("only core species can be used for reaxff, not shell: {}".format(species)) + for i, species in enumerate(data['species']): + if species.endswith('shell'): + raise ValueError('only core species can be used for reaxff, not shell: {}'.format(species)) species = species[:-5] output.extend([ - species + " " + " ".join([format_lammps_value(data["1body"][str(i)][k]) for k in KEYS_1BODY[:8]]), - " ".join([format_lammps_value(data["1body"][str(i)][k]) for k in KEYS_1BODY[8:16]]), - " ".join([format_lammps_value(data["1body"][str(i)][k]) for k in KEYS_1BODY[16:24]]), - " ".join([format_lammps_value(data["1body"][str(i)][k]) for k in KEYS_1BODY[24:32]]) + species + ' ' + ' '.join([format_lammps_value(data['1body'][str(i)][k]) for k in KEYS_1BODY[:8]]), + ' '.join([format_lammps_value(data['1body'][str(i)][k]) for k in KEYS_1BODY[8:16]]), ' '.join([ + format_lammps_value(data['1body'][str(i)][k]) for k in KEYS_1BODY[16:24] + ]), ' '.join([format_lammps_value(data['1body'][str(i)][k]) for k in KEYS_1BODY[24:32]]) ]) # two-body angle parameters suboutout = [] - for key in sorted(data["2body"]): - subdata = data["2body"][key] + for key in sorted(data['2body']): + subdata = data['2body'][key] if not set(subdata.keys()).issuperset(KEYS_2BODY_BONDS): continue suboutout.extend([ - " ".join(key.split(INDEX_SEP)) + " " + " ".join( + ' '.join(key.split(INDEX_SEP)) + ' ' + ' '.join( [format_lammps_value(subdata[k]) for k in KEYS_2BODY_BONDS[:8]]), - " ".join([format_lammps_value(subdata[k]) for k in KEYS_2BODY_BONDS[8:16]]) + ' '.join([format_lammps_value(subdata[k]) for k in KEYS_2BODY_BONDS[8:16]]) ]) output.extend([ - '{0} ! Nr of bonds; Edis1;LPpen;n.u.;pbe1;pbo5;13corr;pbo6'.format(int(len(suboutout)/2)), + '{0} ! Nr of bonds; Edis1;LPpen;n.u.;pbe1;pbo5;13corr;pbo6'.format(int(len(suboutout) / 2)), 'pbe2;pbo3;pbo4;n.u.;pbo1;pbo2;ovcorr' ] + suboutout) # two-body off-diagonal parameters suboutout = [] - for key in sorted(data["2body"]): - subdata = data["2body"][key] + for key in sorted(data['2body']): + subdata = data['2body'][key] if not set(subdata.keys()).issuperset(KEYS_2BODY_OFFDIAG): continue suboutout.extend([ - " ".join(key.split(INDEX_SEP)) + " " + " ".join( + ' '.join(key.split(INDEX_SEP)) + ' ' + ' '.join( [format_lammps_value(subdata[k]) for k in KEYS_2BODY_OFFDIAG]), ]) - output.extend([ - '{0} ! Nr of off-diagonal terms; Ediss;Ro;gamma;rsigma;rpi;rpi2'.format(len(suboutout)) - ] + suboutout) + output.extend(['{0} ! Nr of off-diagonal terms; Ediss;Ro;gamma;rsigma;rpi;rpi2'.format(len(suboutout))] + suboutout) # three-body angle parameters suboutout = [] - for key in sorted(data["3body"]): - subdata = data["3body"][key] + for key in sorted(data['3body']): + subdata = data['3body'][key] if not set(subdata.keys()).issuperset(KEYS_3BODY_ANGLES): continue suboutout.extend([ - " ".join(key.split(INDEX_SEP)) + " " + " ".join( + ' '.join(key.split(INDEX_SEP)) + ' ' + ' '.join( [format_lammps_value(subdata[k]) for k in KEYS_3BODY_ANGLES]), ]) - output.extend([ - '{0} ! Nr of angles;at1;at2;at3;Thetao,o;ka;kb;pv1;pv2'.format(len(suboutout)) - ] + suboutout) + output.extend(['{0} ! Nr of angles;at1;at2;at3;Thetao,o;ka;kb;pv1;pv2'.format(len(suboutout))] + suboutout) # four-body torsion parameters suboutout = [] - for key in sorted(data["4body"]): - subdata = data["4body"][key] + for key in sorted(data['4body']): + subdata = data['4body'][key] if not set(subdata.keys()).issuperset(KEYS_4BODY_TORSION): continue suboutout.extend([ - " ".join(key.split(INDEX_SEP)) + " " + " ".join( + ' '.join(key.split(INDEX_SEP)) + ' ' + ' '.join( [format_lammps_value(subdata[k]) for k in KEYS_4BODY_TORSION]), ]) - output.extend([ - '{0} ! Nr of torsions;at1;at2;at3;at4;;V1;V2;V3;V2(BO);vconj;n.u;n'.format(len(suboutout)) - ] + suboutout) + output.extend(['{0} ! Nr of torsions;at1;at2;at3;at4;;V1;V2;V3;V2(BO);vconj;n.u;n'.format(len(suboutout))] + + suboutout) # three-body h-bond parameters suboutout = [] - for key in sorted(data["3body"]): - subdata = data["3body"][key] + for key in sorted(data['3body']): + subdata = data['3body'][key] if not set(subdata.keys()).issuperset(KEYS_3BODY_HBOND): continue suboutout.extend([ - " ".join(key.split(INDEX_SEP)) + " " + " ".join( - [format_lammps_value(subdata[k]) for k in KEYS_3BODY_HBOND]), + ' '.join(key.split(INDEX_SEP)) + ' ' + ' '.join([format_lammps_value(subdata[k]) for k in KEYS_3BODY_HBOND]) ]) - output.extend([ - '{0} ! Nr of hydrogen bonds;at1;at2;at3;Rhb;Dehb;vhb1'.format(len(suboutout)) - ] + suboutout) + output.extend(['{0} ! Nr of hydrogen bonds;at1;at2;at3;Rhb;Dehb;vhb1'.format(len(suboutout))] + suboutout) - output.append("") + output.append('') - return "\n".join(output) + return '\n'.join(output) -def write_gulp_format(data, fitting_data=None, - global_val_fmt="{:.5E}", species_val_fmt="{:.5E}"): +def write_gulp_format(data, fitting_data=None, global_val_fmt='{:.5E}', species_val_fmt='{:.5E}'): """ write a reaxff file, in GULP format, from a standardised potential dictionary NOTE: GULP only read a line up to ~80 characters, @@ -351,15 +322,15 @@ def write_gulp_format(data, fitting_data=None, energies should be supplied in kcal (the default of the lammps file format) """ # validate dictionary - validate_against_schema(data, "potential.reaxff.schema.json") + validate_against_schema(data, 'potential.reaxff.schema.json') if fitting_data is not None: - validate_against_schema(fitting_data, "fitting.reaxff.schema.json") + validate_against_schema(fitting_data, 'fitting.reaxff.schema.json') - for species in data["species"]: - if species.endswith("shell"): + for species in data['species']: + if species.endswith('shell'): # TODO is this true? - raise ValueError("only core species can be used for reaxff, not shell: {}".format(species)) + raise ValueError('only core species can be used for reaxff, not shell: {}'.format(species)) species = species[:-5] total_flags = 0 # total number of variables with a flag @@ -377,62 +348,56 @@ def write_gulp_format(data, fitting_data=None, '#', '# Parameters description:', '#', - '# {}'.format(data["description"]), + '# {}'.format(data['description']), '#', '# Cutoffs for VDW & Coulomb terms', '#', - 'reaxFFvdwcutoff {:14.6E}'.format(data["global"]['Upper Taper-radius']), - 'reaxFFqcutoff {:14.6E}'.format(data["global"]['Upper Taper-radius']), + 'reaxFFvdwcutoff {:14.6E}'.format(data['global']['Upper Taper-radius']), + 'reaxFFqcutoff {:14.6E}'.format(data['global']['Upper Taper-radius']), '#', '# Bond order threshold - check anglemin as this is cutof2 given in control file', '#', 'reaxFFtol {:.6E} {:.6E} {:.6E} &'.format( - data["global"]['bond order cutoff'] * 0.01, - *[data["global"].get(k, DEFAULT_TOLERANCES[k]) - for k in "anglemin angleprod".split()] - ), + data['global']['bond order cutoff'] * 0.01, + *[data['global'].get(k, DEFAULT_TOLERANCES[k]) for k in 'anglemin angleprod'.split()]), ' {:.6E} {:.6E} {:.6E}'.format( - *[data["global"].get(k, DEFAULT_TOLERANCES[k]) - for k in "hbondmin hbonddist torsionprod".split()] - ), + *[data['global'].get(k, DEFAULT_TOLERANCES[k]) for k in 'hbondmin hbonddist torsionprod'.split()]), '#', ] # global parameters - output.append("# Species independent parameters") - output.append("#") - - fields = OrderedDict([ - ("reaxff0_bond", ['reaxff0_boc1', 'reaxff0_boc2']), - ("reaxff0_over", ['reaxff0_ovun3', 'reaxff0_ovun4', 'reaxff0_ovun6', 'reaxff0_ovun7', 'reaxff0_ovun8']), - ("reaxff0_valence", ['reaxff0_val7', 'reaxff0_val8', 'reaxff0_val9', 'reaxff0_val10']), - ("reaxff0_penalty", ['reaxff0_pen2', 'reaxff0_pen3', 'reaxff0_pen4']), - ("reaxff0_torsion", ['reaxff0_tor2', 'reaxff0_tor3', 'reaxff0_tor4', 'reaxff0_cot2']), - ("reaxff0_vdw", ['reaxff0_vdw1']), - ("reaxff0_lonepair", ['reaxff0_lp1']) - ]) + output.append('# Species independent parameters') + output.append('#') + + fields = OrderedDict([('reaxff0_bond', ['reaxff0_boc1', 'reaxff0_boc2']), + ('reaxff0_over', + ['reaxff0_ovun3', 'reaxff0_ovun4', 'reaxff0_ovun6', 'reaxff0_ovun7', 'reaxff0_ovun8']), + ('reaxff0_valence', ['reaxff0_val7', 'reaxff0_val8', 'reaxff0_val9', 'reaxff0_val10']), + ('reaxff0_penalty', ['reaxff0_pen2', 'reaxff0_pen3', 'reaxff0_pen4']), + ('reaxff0_torsion', ['reaxff0_tor2', 'reaxff0_tor3', 'reaxff0_tor4', 'reaxff0_cot2']), + ('reaxff0_vdw', ['reaxff0_vdw1']), ('reaxff0_lonepair', ['reaxff0_lp1'])]) for field, variables in fields.items(): total_flags += len(variables) - string = "{:17}".format(field) + " ".join([global_val_fmt.format(data["global"][v]) for v in variables]) + string = '{:17}'.format(field) + ' '.join([global_val_fmt.format(data['global'][v]) for v in variables]) if fitting_data is not None: - fitting_flags += sum([1 if v in fitting_data.get("global", []) else 0 for v in variables]) - string += " " + " ".join(["1" if v in fitting_data.get("global", []) else "0" for v in variables]) + fitting_flags += sum([1 if v in fitting_data.get('global', []) else 0 for v in variables]) + string += ' ' + ' '.join(['1' if v in fitting_data.get('global', []) else '0' for v in variables]) lines = textwrap.wrap(string, 78) if len(lines) > 2: - raise IOError("the line cannot be coerced to fit within the 80 character limit: {}".format(string)) + raise IOError('the line cannot be coerced to fit within the 80 character limit: {}'.format(string)) elif len(lines) > 1: - output.append("{} &".format(lines[0])) - output.append(" {}".format(lines[1])) + output.append('{} &'.format(lines[0])) + output.append(' {}'.format(lines[1])) else: output.append(string) # one-body parameters - output.append("#") - output.append("# One-Body Parameters") - output.append("#") + output.append('#') + output.append('# One-Body Parameters') + output.append('#') fields = { 'reaxff1_radii': ['reaxff1_radii1', 'reaxff1_radii2', 'reaxff1_radii3'], @@ -447,35 +412,26 @@ def write_gulp_format(data, fitting_data=None, 'reaxff_gamma': ['reaxff_gamma'] } - arguments = { - 'reaxff1_under': ['kcal'], - 'reaxff1_lonepair': ['kcal'], - 'reaxff1_morse': ['kcal'] - } + arguments = {'reaxff1_under': ['kcal'], 'reaxff1_lonepair': ['kcal'], 'reaxff1_morse': ['kcal']} - field_lines, num_vars, num_fit = create_gulp_fields(data, "1body", fields, species_val_fmt, - arguments=arguments, fitting_data=fitting_data) + field_lines, num_vars, num_fit = create_gulp_fields( + data, '1body', fields, species_val_fmt, arguments=arguments, fitting_data=fitting_data) total_flags += num_vars fitting_flags += num_fit output.extend(field_lines) # two-body bond parameters - output.append("#") - output.append("# Two-Body Parameters") - output.append("#") + output.append('#') + output.append('# Two-Body Parameters') + output.append('#') fields = { - 'reaxff2_bo': [ - 'reaxff2_bo1', 'reaxff2_bo2', 'reaxff2_bo3', - 'reaxff2_bo4', 'reaxff2_bo5', 'reaxff2_bo6'], - 'reaxff2_bond': [ - 'reaxff2_bond1', 'reaxff2_bond2', 'reaxff2_bond3', - 'reaxff2_bond4', 'reaxff2_bond5'], + 'reaxff2_bo': ['reaxff2_bo1', 'reaxff2_bo2', 'reaxff2_bo3', 'reaxff2_bo4', 'reaxff2_bo5', 'reaxff2_bo6'], + 'reaxff2_bond': ['reaxff2_bond1', 'reaxff2_bond2', 'reaxff2_bond3', 'reaxff2_bond4', 'reaxff2_bond5'], 'reaxff2_over': ['reaxff2_over'], 'reaxff2_pen': ['reaxff2_pen1', 'global.reaxff2_pen2', 'global.reaxff2_pen'], - 'reaxff2_morse': [ - 'reaxff2_morse1', 'reaxff2_morse2', 'reaxff2_morse3', - 'reaxff2_morse4', 'reaxff2_morse5', 'reaxff2_morse6'] + 'reaxff2_morse': + ['reaxff2_morse1', 'reaxff2_morse2', 'reaxff2_morse3', 'reaxff2_morse4', 'reaxff2_morse5', 'reaxff2_morse6'] } def reaxff2_bo_args(bodata): @@ -498,28 +454,25 @@ def reaxff2_bo_args(bodata): conditions = {'reaxff2_pen': lambda s: s['reaxff2_pen1'] > 0.0} - field_lines, num_vars, num_fit = create_gulp_fields(data, "2body", fields, species_val_fmt, - conditions, arguments=arguments, fitting_data=fitting_data) + field_lines, num_vars, num_fit = create_gulp_fields( + data, '2body', fields, species_val_fmt, conditions, arguments=arguments, fitting_data=fitting_data) total_flags += num_vars fitting_flags += num_fit output.extend(field_lines) # three-body parameters - output.append("#") - output.append("# Three-Body Parameters") - output.append("#") + output.append('#') + output.append('# Three-Body Parameters') + output.append('#') fields = { - 'reaxff3_angle': [ - 'reaxff3_angle1', 'reaxff3_angle2', 'reaxff3_angle3', - 'reaxff3_angle4', 'reaxff3_angle5', 'reaxff3_angle6'], + 'reaxff3_angle': + ['reaxff3_angle1', 'reaxff3_angle2', 'reaxff3_angle3', 'reaxff3_angle4', 'reaxff3_angle5', 'reaxff3_angle6'], # TODO reaxff3_angle6 is taken from a global value, if not present, # need to find out what this value is, so it can be set in the input data 'reaxff3_penalty': ['reaxff3_penalty'], 'reaxff3_conjugation': ['reaxff3_coa1', 'global.reaxff3_coa2', 'global.reaxff3_coa3', 'global.reaxff3_coa4'], - 'reaxff3_hbond': [ - 'reaxff3_hbond1', 'reaxff3_hbond2', - 'reaxff3_hbond3', 'reaxff3_hbond4'] + 'reaxff3_hbond': ['reaxff3_hbond1', 'reaxff3_hbond2', 'reaxff3_hbond3', 'reaxff3_hbond4'] } arguments = { @@ -535,8 +488,8 @@ def reaxff2_bo_args(bodata): 'reaxff3_conjugation': lambda s: abs(s['reaxff3_coa1']) > 1.0E-4 } - field_lines, num_vars, num_fit = create_gulp_fields(data, "3body", fields, species_val_fmt, - conditions, arguments=arguments, fitting_data=fitting_data) + field_lines, num_vars, num_fit = create_gulp_fields( + data, '3body', fields, species_val_fmt, conditions, arguments=arguments, fitting_data=fitting_data) total_flags += num_vars fitting_flags += num_fit output.extend(field_lines) @@ -544,31 +497,29 @@ def reaxff2_bo_args(bodata): # four-body parameters # TODO there seems to be an issue when flagging more than one torsion variable for fitting # the dump file just shows the first flagged variable as 1, then subsequent as 0 - output.append("#") - output.append("# Four-Body Parameters") - output.append("#") + output.append('#') + output.append('# Four-Body Parameters') + output.append('#') fields = { - 'reaxff4_torsion': [ - 'reaxff4_torsion1', 'reaxff4_torsion2', 'reaxff4_torsion3', - 'reaxff4_torsion4', 'reaxff4_torsion5'], + 'reaxff4_torsion': + ['reaxff4_torsion1', 'reaxff4_torsion2', 'reaxff4_torsion3', 'reaxff4_torsion4', 'reaxff4_torsion5'], } arguments = {'reaxff4_torsion': ['kcal']} - field_lines, num_vars, num_fit = create_gulp_fields(data, "4body", fields, species_val_fmt, - arguments=arguments, fitting_data=fitting_data) + field_lines, num_vars, num_fit = create_gulp_fields( + data, '4body', fields, species_val_fmt, arguments=arguments, fitting_data=fitting_data) total_flags += num_vars fitting_flags += num_fit output.extend(field_lines) - output.append("") + output.append('') - return "\n".join(output), total_flags, fitting_flags + return '\n'.join(output), total_flags, fitting_flags -def create_gulp_fields(data, data_type, fields, species_val_fmt, - conditions=None, arguments=None, fitting_data=None): +def create_gulp_fields(data, data_type, fields, species_val_fmt, conditions=None, arguments=None, fitting_data=None): """ create a subsection of the gulp output file""" if conditions is None: conditions = {} @@ -584,8 +535,8 @@ def create_gulp_fields(data, data_type, fields, species_val_fmt, subdata = {} for indices in sorted(data[data_type]): num_of_variable += len(keys) - if not set(data[data_type][indices].keys()).issuperset( - [k for k in keys if not k.startswith("global.") and k != 'reaxff3_angle6']): + local_keys = [k for k in keys if not k.startswith('global.') and k != 'reaxff3_angle6'] + if not set(data[data_type][indices].keys()).issuperset(local_keys): continue if field in conditions: try: @@ -594,39 +545,40 @@ def create_gulp_fields(data, data_type, fields, species_val_fmt, continue if not satisfied: continue - species = ["{:7s}".format(data["species"][int(i)]) for i in indices.split(INDEX_SEP)] + species = ['{:7s}'.format(data['species'][int(i)]) for i in indices.split(INDEX_SEP)] if len(species) == 3: # NOTE Here species1 is the pivot atom of the three-body like term. # This is different to LAMMPS, where the pivot atom is the central one! species = [species[1], species[0], species[2]] - values = [format_gulp_value(data, data_type, indices, k, species_val_fmt) - for k in keys if k != "reaxff3_angle6"] + values = [ + format_gulp_value(data, data_type, indices, k, species_val_fmt) for k in keys if k != 'reaxff3_angle6' + ] if fitting_data is not None: num_fit += sum([1 if v in fitting_data.get(data_type, {}).get(indices, []) else 0 for v in keys]) - values += ["1" if v in fitting_data.get(data_type, {}).get(indices, []) else "0" for v in keys] + values += ['1' if v in fitting_data.get(data_type, {}).get(indices, []) else '0' for v in keys] if field in arguments and isinstance(arguments[field], list): - args = " ".join(arguments[field]) + args = ' '.join(arguments[field]) elif field in arguments: args = arguments[field](data[data_type][indices]) else: - args = "" + args = '' - line = " ".join(species + values) + line = ' '.join(species + values) lines = textwrap.wrap(line, 78) if len(lines) > 2: - raise IOError("the line cannot be coerced to fit within the 80 character limit: {}".format(line)) + raise IOError('the line cannot be coerced to fit within the 80 character limit: {}'.format(line)) elif len(lines) > 1: - subdata.setdefault(args, []).append("{} &".format(lines[0])) - subdata.setdefault(args, []).append(" {}".format(lines[1])) + subdata.setdefault(args, []).append('{} &'.format(lines[0])) + subdata.setdefault(args, []).append(' {}'.format(lines[1])) else: subdata.setdefault(args, []).append(line) for args in sorted(subdata.keys()): - output.append(field + " " + args if args else field) + output.append(field + ' ' + args if args else field) output.extend(subdata[args]) return output, num_of_variable, num_fit @@ -635,13 +587,13 @@ def create_gulp_fields(data, data_type, fields, species_val_fmt, def format_gulp_value(data, data_type, indices, key, species_val_fmt): """ some GULP specific conversions """ - if key.startswith("global."): - data_type, key = key.split(".") + if key.startswith('global.'): + data_type, key = key.split('.') value = data[data_type][key] else: value = data[data_type][indices][key] - if key == "reaxff2_bo3": + if key == 'reaxff2_bo3': # If reaxff2_bo3 = 1 needs to be set to 0 for GULP since this is a dummy value value = 0.0 if abs(value - 1) < 1e-12 else value diff --git a/aiida_crystal17/gulp/potentials/reaxff.py b/aiida_crystal17/gulp/potentials/reaxff.py index 9568844..c936a92 100644 --- a/aiida_crystal17/gulp/potentials/reaxff.py +++ b/aiida_crystal17/gulp/potentials/reaxff.py @@ -1,3 +1,18 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2019 Chris Sewell +# +# This file is part of aiida-crystal17. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms and conditions +# of version 3 of the GNU Lesser General Public License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. from aiida_crystal17.gulp.potentials.base import PotentialWriterAbstract, PotentialContent from aiida_crystal17.validation import load_schema from aiida_crystal17.gulp.potentials.raw_reaxff import write_gulp_format @@ -10,15 +25,15 @@ class PotentialWriterReaxff(PotentialWriterAbstract): @classmethod def get_description(cls): - return "ReaxFF potential" + return 'ReaxFF potential' @classmethod def get_schema(cls): - return load_schema("potential.reaxff.schema.json") + return load_schema('potential.reaxff.schema.json') @classmethod def _get_fitting_schema(cls): - return load_schema("fitting.reaxff.schema.json") + return load_schema('fitting.reaxff.schema.json') # pylint: disable=too-many-locals def _make_string(self, data, fitting_data=None): diff --git a/aiida_crystal17/gulp/unit_styles.py b/aiida_crystal17/gulp/unit_styles.py index 81a86e9..1b583e9 100644 --- a/aiida_crystal17/gulp/unit_styles.py +++ b/aiida_crystal17/gulp/unit_styles.py @@ -1,3 +1,18 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2019 Chris Sewell +# +# This file is part of aiida-crystal17. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms and conditions +# of version 3 of the GNU Lesser General Public License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. """ set unit styles that have a compatibility between LAMMPS """ @@ -67,7 +82,6 @@ def get_style_map(style): 'density': 'kilograms/meter^dim', }, 'cgs': { - 'mass': 'grams', 'distance': 'centimeters', 'time': 'seconds', @@ -84,7 +98,6 @@ def get_style_map(style): 'density': 'grams/cm^dim', }, 'electron': { - 'mass': 'amu', 'distance': 'Bohr', 'time': 'femtoseconds', @@ -98,7 +111,6 @@ def get_style_map(style): 'electric_field': 'volts/cm', }, 'micro': { - 'mass': 'picograms', 'distance': 'micrometers', 'time': 'microseconds', @@ -115,7 +127,6 @@ def get_style_map(style): 'density': 'picograms/micrometer^dim', }, 'nano': { - 'mass': 'attograms', 'distance': 'nanometers', 'time': 'nanoseconds', @@ -173,9 +184,9 @@ def get_units_dict(style, quantities): out_dict = {} for quantity in quantities: units = get_style_map(style)[quantity] - if units == "bar": + if units == 'bar': units = 'kbar' if quantity == 'energy': units = 'eV' - out_dict[quantity + "_units"] = units + out_dict[quantity + '_units'] = units return out_dict diff --git a/aiida_crystal17/immigration/create_calcjob.py b/aiida_crystal17/immigration/create_calcjob.py index baa67b1..3b35ff3 100644 --- a/aiida_crystal17/immigration/create_calcjob.py +++ b/aiida_crystal17/immigration/create_calcjob.py @@ -1,4 +1,18 @@ +#!/usr/bin/env python # -*- coding: utf-8 -*- +# +# Copyright 2019 Chris Sewell +# +# This file is part of aiida-crystal17. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms and conditions +# of version 3 of the GNU Lesser General Public License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. """ Immigrate a CalcJob that was not run using AiiDa. """ @@ -44,8 +58,7 @@ def immigrate_existing(builder, remote_data, seal=True): # link remote folder to calc_node if not remote_data.is_stored: remote_data.store() - remote_data.add_incoming( - calc_node, link_type=LinkType.CREATE, link_label='remote_folder') + remote_data.add_incoming(calc_node, link_type=LinkType.CREATE, link_label='remote_folder') calc_node.set_remote_workdir(remote_data.get_remote_path()) transport = remote_data.computer.get_transport() diff --git a/aiida_crystal17/immigration/create_inputs.py b/aiida_crystal17/immigration/create_inputs.py index a5a28ea..712dec7 100644 --- a/aiida_crystal17/immigration/create_inputs.py +++ b/aiida_crystal17/immigration/create_inputs.py @@ -1,3 +1,18 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2019 Chris Sewell +# +# This file is part of aiida-crystal17. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms and conditions +# of version 3 of the GNU Lesser General Public License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. """ module to create inputs from existing CRYSTAL17 runs """ @@ -51,43 +66,35 @@ def populate_builder(remote_data, code=None, metadata=None): remote_files = remote_data.listdir() if in_file_name not in remote_files: - raise IOError( - "The input file '{}' is not contained in the remote_data folder. " - "If it has a different name, change " - "metadata['options]['input_file_name']".format(in_file_name)) + raise IOError("The input file '{}' is not contained in the remote_data folder. " + 'If it has a different name, change ' + "metadata['options]['input_file_name']".format(in_file_name)) if out_file_name not in remote_files: - raise IOError( - "The output file '{}' is not contained in the remote_data folder. " - "If it has a different name, change " - "metadata['options]['output_main_file_name']".format(out_file_name)) + raise IOError("The output file '{}' is not contained in the remote_data folder. " + 'If it has a different name, change ' + "metadata['options]['output_main_file_name']".format(out_file_name)) with SandboxFolder() as folder: - remote_data.getfile(in_file_name, - os.path.join(folder.abspath, in_file_name)) + remote_data.getfile(in_file_name, os.path.join(folder.abspath, in_file_name)) with folder.open(in_file_name, mode='r') as handle: param_dict, basis_sets, atom_props = extract_data(handle.read()) - remote_data.getfile(out_file_name, - os.path.join(folder.abspath, out_file_name)) + remote_data.getfile(out_file_name, os.path.join(folder.abspath, out_file_name)) with folder.open(out_file_name, mode='r') as handle: try: data = crystal_stdout.read_crystal_stdout(handle.read()) except IOError as err: - raise OutputParsingError( - "Error in CRYSTAL 17 run output: {}".format(err)) + raise OutputParsingError('Error in CRYSTAL 17 run output: {}'.format(err)) # we retrieve the initial primitive geometry and symmetry - atoms = _create_atoms(data, "initial_geometry") + atoms = _create_atoms(data, 'initial_geometry') # convert fragment (i.e. unfixed) to fixed - if "fragment" in atom_props: - frag = atom_props.pop("fragment") - atom_props["fixed"] = [ - i + 1 for i in range(atoms.get_number_of_atoms()) - if i + 1 not in frag - ] + if 'fragment' in atom_props: + frag = atom_props.pop('fragment') + atom_props['fixed'] = [i + 1 for i in range(atoms.get_number_of_atoms()) if i + 1 not in frag] atoms.set_tags(_create_tags(atom_props, atoms)) @@ -95,19 +102,18 @@ def populate_builder(remote_data, code=None, metadata=None): if atom_props: kind_names = structure.get_kind_names() - kinds_dict = {"kind_names": kind_names} + kinds_dict = {'kind_names': kind_names} for key, atom_indexes in atom_props.items(): - kv_map = {kn: i + 1 in atom_indexes - for i, kn in enumerate(structure.get_site_kindnames())} + kv_map = {kn: i + 1 in atom_indexes for i, kn in enumerate(structure.get_site_kindnames())} kinds_dict[key] = [kv_map[kn] for kn in kind_names] kinds = kind_cls(data=kinds_dict) else: kinds = None symmetry = symmetry_cls(data={ - "operations": data["initial_geometry"]["primitive_symmops"], - "basis": "fractional", - "hall_number": None + 'operations': data['initial_geometry']['primitive_symmops'], + 'basis': 'fractional', + 'hall_number': None }) bases = {} @@ -115,10 +121,9 @@ def populate_builder(remote_data, code=None, metadata=None): bfile = tempfile.NamedTemporaryFile(delete=False) try: - with open(bfile.name, "w") as f: + with open(bfile.name, 'w') as f: f.write(bset) - bdata, _ = basis_cls.get_or_create( - bfile.name, use_first=False, store_basis=False) + bdata, _ = basis_cls.get_or_create(bfile.name, use_first=False, store_basis=False) # TODO report if bases created or retrieved finally: os.remove(bfile.name) @@ -126,24 +131,19 @@ def populate_builder(remote_data, code=None, metadata=None): bases[bdata.element] = bdata builder = calc_cls.create_builder( - param_dict, structure, bases, - symmetry=symmetry, kinds=kinds, code=code, metadata=metadata) + param_dict, structure, bases, symmetry=symmetry, kinds=kinds, code=code, metadata=metadata) return builder def _create_atoms(data, section): """create ase.Atoms from stdout parsed data""" - cell_data = data[section]["primitive_cell"] + cell_data = data[section]['primitive_cell'] cell_vectors = [] - for n in "a b c".split(): - cell_vectors.append(cell_data["cell_vectors"][n]) - ccoords = cell_data["ccoords"] - atoms = ase.Atoms( - cell=cell_vectors, - pbc=cell_data["pbc"], - symbols=cell_data["symbols"], - positions=ccoords) + for n in 'a b c'.split(): + cell_vectors.append(cell_data['cell_vectors'][n]) + ccoords = cell_data['ccoords'] + atoms = ase.Atoms(cell=cell_vectors, pbc=cell_data['pbc'], symbols=cell_data['symbols'], positions=ccoords) return atoms @@ -156,7 +156,7 @@ def _create_tags(atom_props, atoms): for key, val in atom_props.items(): if i + 1 in val: signature.append(key) - signature = ".".join(signature) + signature = '.'.join(signature) kinds[symbol][signature] = kinds[symbol].get(signature, []) + [i + 1] tags = [] for i, symbol in enumerate(atoms.get_chemical_symbols()): diff --git a/aiida_crystal17/parsers/cry_doss.py b/aiida_crystal17/parsers/cry_doss.py index 0382959..e78fabf 100644 --- a/aiida_crystal17/parsers/cry_doss.py +++ b/aiida_crystal17/parsers/cry_doss.py @@ -1,3 +1,18 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2019 Chris Sewell +# +# This file is part of aiida-crystal17. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms and conditions +# of version 3 of the GNU Lesser General Public License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. """ A parser to read output from a CRYSTAL17 DOSS run """ @@ -18,6 +33,7 @@ class CryDossParser(Parser): """ Parser class for parsing (stdout) output of a standard CRYSTAL17 run """ + def parse(self, **kwargs): """ Parse outputs, store results in database. @@ -27,37 +43,32 @@ def parse(self, **kwargs): except exceptions.NotExistent: return self.exit_codes.ERROR_NO_RETRIEVED_FOLDER - sterr_file = self.node.get_option("scheduler_stderr") + sterr_file = self.node.get_option('scheduler_stderr') if sterr_file in output_folder.list_object_names(): with output_folder.open(sterr_file) as fileobj: pbs_error = parse_pbs_stderr(fileobj) if pbs_error is not None: return self.exit_codes[pbs_error] - output_isovalue_fname = self.node.get_option("output_isovalue_fname") + output_isovalue_fname = self.node.get_option('output_isovalue_fname') if output_isovalue_fname not in output_folder.list_object_names(): return self.exit_codes.ERROR_ISOVALUE_FILE_MISSING - self.logger.info("parsing file: {}".format(output_isovalue_fname)) + self.logger.info('parsing file: {}'.format(output_isovalue_fname)) try: with output_folder.open(output_isovalue_fname) as handle: - data, arrays = parse_crystal_fort25_aiida( - handle, self.__class__.__name__) + data, arrays = parse_crystal_fort25_aiida(handle, self.__class__.__name__) except Exception: traceback.print_exc() return self.exit_codes.ERROR_PARSING_STDOUT - errors = data.get("errors", []) - parser_errors = data.get("parser_errors", []) + errors = data.get('errors', []) + parser_errors = data.get('parser_errors', []) if parser_errors: - self.logger.warning( - "the parser raised the following errors:\n{}".format( - "\n\t".join(parser_errors))) + self.logger.warning('the parser raised the following errors:\n{}'.format('\n\t'.join(parser_errors))) if errors: - self.logger.warning( - "the calculation raised the following errors:\n{}".format( - "\n\t".join(errors))) + self.logger.warning('the calculation raised the following errors:\n{}'.format('\n\t'.join(errors))) self.out('results', Dict(dict=data)) if arrays is not None: diff --git a/aiida_crystal17/parsers/cry_fermi.py b/aiida_crystal17/parsers/cry_fermi.py index 44536e2..1357e5a 100644 --- a/aiida_crystal17/parsers/cry_fermi.py +++ b/aiida_crystal17/parsers/cry_fermi.py @@ -1,3 +1,18 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2019 Chris Sewell +# +# This file is part of aiida-crystal17. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms and conditions +# of version 3 of the GNU Lesser General Public License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. """ A parser to read output from a CRYSTAL17 DOSS run """ @@ -16,6 +31,7 @@ class CryFermiParser(Parser): """ Parser class for parsing (stdout) output of a standard CRYSTAL17 run """ + def parse(self, **kwargs): """ Parse outputs, store results in database. @@ -25,18 +41,18 @@ def parse(self, **kwargs): except exceptions.NotExistent: return self.exit_codes.ERROR_NO_RETRIEVED_FOLDER - sterr_file = self.node.get_option("scheduler_stderr") + sterr_file = self.node.get_option('scheduler_stderr') if sterr_file in output_folder.list_object_names(): with output_folder.open(sterr_file) as fileobj: pbs_error = parse_pbs_stderr(fileobj) if pbs_error is not None: return self.exit_codes[pbs_error] - output_main_file_name = self.node.get_option("output_main_file_name") + output_main_file_name = self.node.get_option('output_main_file_name') if output_main_file_name not in output_folder.list_object_names(): return self.exit_codes.ERROR_OUTPUT_FILE_MISSING - self.logger.info("parsing file: {}".format(output_main_file_name)) + self.logger.info('parsing file: {}'.format(output_main_file_name)) try: with output_folder.open(output_main_file_name) as handle: @@ -45,21 +61,17 @@ def parse(self, **kwargs): traceback.print_exc() return self.exit_codes.ERROR_PARSING_STDOUT - errors = data.get("errors", []) - parser_errors = data.get("parser_errors", []) + errors = data.get('errors', []) + parser_errors = data.get('parser_errors', []) if parser_errors: - self.logger.warning( - "the parser raised the following errors:\n{}".format( - "\n\t".join(parser_errors))) + self.logger.warning('the parser raised the following errors:\n{}'.format('\n\t'.join(parser_errors))) if errors: - self.logger.warning( - "the calculation raised the following errors:\n{}".format( - "\n\t".join(errors))) + self.logger.warning('the calculation raised the following errors:\n{}'.format('\n\t'.join(errors))) self.out('results', Dict(dict=data)) - if "fermi_energy" in data: - self.out('fermi_energy', Float(data["fermi_energy"])) + if 'fermi_energy' in data: + self.out('fermi_energy', Float(data['fermi_energy'])) if parser_errors: return self.exit_codes.ERROR_PARSING_STDOUT diff --git a/aiida_crystal17/parsers/cry_main.py b/aiida_crystal17/parsers/cry_main.py index b077971..e70b9dc 100644 --- a/aiida_crystal17/parsers/cry_main.py +++ b/aiida_crystal17/parsers/cry_main.py @@ -1,3 +1,18 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2019 Chris Sewell +# +# This file is part of aiida-crystal17. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms and conditions +# of version 3 of the GNU Lesser General Public License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. """ A parser to read output from a standard CRYSTAL17 run """ @@ -12,7 +27,7 @@ from aiida_crystal17.parsers.raw.main_out import parse_main_out from aiida_crystal17.parsers.raw.pbs import parse_pbs_stderr -from aiida_crystal17.parsers.raw.gui_parse import gui_file_read +from aiida_crystal17.parsers.raw.parse_fort34 import parse_fort34 from aiida_crystal17.symmetry import convert_structure @@ -40,7 +55,7 @@ def parse(self, retrieved_temporary_folder=None, **kwargs): # parser scheduler's stderr scheduler_exit_code = None - sterr_file = self.node.get_option("scheduler_stderr") + sterr_file = self.node.get_option('scheduler_stderr') if sterr_file in self.retrieved.list_object_names(): with self.retrieved.open(sterr_file) as fileobj: pbs_error = parse_pbs_stderr(fileobj) @@ -48,15 +63,14 @@ def parse(self, retrieved_temporary_folder=None, **kwargs): scheduler_exit_code = self.exit_codes[pbs_error] # parse temporary folder - temp_folder_exit_code = self.parse_temporary_folder( - retrieved_temporary_folder) + temp_folder_exit_code = self.parse_temporary_folder(retrieved_temporary_folder) # parse the stdout file - stdout_fname = self.node.get_option("output_main_file_name") + stdout_fname = self.node.get_option('output_main_file_name') if stdout_fname not in self.retrieved.list_object_names(): stdout_exit_code = self.exit_codes.ERROR_OUTPUT_FILE_MISSING else: - self.logger.info("parsing stdout file") + self.logger.info('parsing stdout file') stdout_exit_code = self.parse_stdout(stdout_fname) if scheduler_exit_code is not None: @@ -72,23 +86,18 @@ def parse_stdout(self, file_name): """parse the main stdout file """ init_struct = None init_settings = None - if "structure" in self.node.inputs: + if 'structure' in self.node.inputs: init_struct = self.node.inputs.structure - if "symmetry" in self.node.inputs: + if 'symmetry' in self.node.inputs: init_settings = self.node.inputs.symmetry with self.retrieved.open(file_name) as fileobj: parser_result = parse_main_out( - fileobj, - parser_class=self.__class__.__name__, - init_struct=init_struct, - init_settings=init_settings) + fileobj, parser_class=self.__class__.__name__, init_struct=init_struct, init_settings=init_settings) - for etype in ["errors", "parser_errors", "parser_exceptions"]: + for etype in ['errors', 'parser_errors', 'parser_exceptions']: errors = parser_result.nodes.results.get_attribute(etype) if errors: - self.logger.warning( - "the calculation raised the following {0}:\n{1}".format( - etype, "\n\t".join(errors))) + self.logger.warning('the calculation raised the following {0}:\n{1}'.format(etype, '\n\t'.join(errors))) # add output nodes self.out('results', parser_result.nodes.results) @@ -108,37 +117,35 @@ def parse_temporary_folder(self, retrieved_temporary_folder): return self.exit_codes.ERROR_TEMP_FOLDER_MISSING # parse optimisation steps - if "structure" in self.node.inputs: + if 'structure' in self.node.inputs: in_symbols = self.node.inputs.structure.get_ase().get_chemical_symbols() structures = {} - for path in glob.iglob(os.path.join(retrieved_temporary_folder, "opt[ac][0-9][0-9][0-9]")): + for path in glob.iglob(os.path.join(retrieved_temporary_folder, 'opt[ac][0-9][0-9][0-9]')): opt_step = int(path[-3:]) try: with open(path) as handle: - struct_dict, sym = gui_file_read(handle.readlines()) + struct_dict, sym = parse_fort34(handle.readlines()) # TODO could also get energy from this file structure = convert_structure(struct_dict, 'aiida') - if "structure" in self.node.inputs: + if 'structure' in self.node.inputs: out_symbols = structure.get_ase().get_chemical_symbols() if out_symbols != in_symbols: - raise AssertionError( - "structure symbols are not compatible: " - "{} != {}".format(out_symbols, in_symbols)) + raise AssertionError('structure symbols are not compatible: ' + '{} != {}'.format(out_symbols, in_symbols)) new_structure = self.node.inputs.structure.clone() new_structure.reset_cell(structure.cell) - new_structure.reset_sites_positions( - [s.position for s in structure.sites]) + new_structure.reset_sites_positions([s.position for s in structure.sites]) structure = new_structure structures[opt_step] = structure except Exception: - self.logger.error("error parsing: {}".format(path)) + self.logger.error('error parsing: {}'.format(path)) traceback.print_exc() return self.exit_codes.ERROR_PARSING_OPTIMISATION_GEOMTRIES if not structures: return None sorted_steps = sorted(structures.keys()) - self.logger.debug("optimisations steps found: {}".format(sorted_steps)) + self.logger.debug('optimisations steps found: {}'.format(sorted_steps)) if sorted_steps != list(range(1, len(sorted_steps) + 1)): # this can occur when a step is rejected # (e.g. due to an energy change > 0), so shouldn't be raised as error @@ -147,8 +154,7 @@ def parse_temporary_folder(self, retrieved_temporary_folder): try: traj_data.set_structurelist([structures[s] for s in sorted_steps]) except Exception: - self.logger.error( - "an error occurred setting the optimisation trajectory") + self.logger.error('an error occurred setting the optimisation trajectory') traceback.print_exc() return self.exit_codes.ERROR_PARSING_OPTIMISATION_GEOMTRIES self.out('optimisation', traj_data) diff --git a/aiida_crystal17/parsers/raw/__init__.py b/aiida_crystal17/parsers/raw/__init__.py index e69de29..10f1044 100644 --- a/aiida_crystal17/parsers/raw/__init__.py +++ b/aiida_crystal17/parsers/raw/__init__.py @@ -0,0 +1,15 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2019 Chris Sewell +# +# This file is part of aiida-crystal17. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms and conditions +# of version 3 of the GNU Lesser General Public License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. diff --git a/aiida_crystal17/parsers/raw/crystal_fort25.py b/aiida_crystal17/parsers/raw/crystal_fort25.py index a3becc7..f63c81d 100644 --- a/aiida_crystal17/parsers/raw/crystal_fort25.py +++ b/aiida_crystal17/parsers/raw/crystal_fort25.py @@ -1,3 +1,18 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2019 Chris Sewell +# +# This file is part of aiida-crystal17. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms and conditions +# of version 3 of the GNU Lesser General Public License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. import traceback import numpy as np @@ -63,11 +78,11 @@ def parse_crystal_fort25(content): if system_type is None: system_type = line[3] elif not system_type == line[3]: - raise IOError("projection {0} has different system type ({1}) to previous ({2})".format( + raise IOError('projection {0} has different system type ({1}) to previous ({2})'.format( proj_number, line[3], system_type)) - if not line[4:8] == "DOSS": - raise IOError("projection {0} is not of type DOSS".format(proj_number)) + if not line[4:8] == 'DOSS': + raise IOError('projection {0} is not of type DOSS'.format(proj_number)) nrows, ncols, _, denergy, fermi = split_numbers(line[8:]) # nrows, ncols = (int(nrows), int(ncols)) @@ -75,12 +90,12 @@ def parse_crystal_fort25(content): if energy_delta is None: energy_delta = denergy elif not energy_delta == denergy: - raise IOError("projection {0} has different delta energy ({1}) to previous ({2})".format( + raise IOError('projection {0} has different delta energy ({1}) to previous ({2})'.format( proj_number, denergy, energy_delta)) if fermi_energy is None: fermi_energy = fermi elif not fermi_energy == fermi: - raise IOError("projection {0} has different fermi energy ({1}) to previous ({2})".format( + raise IOError('projection {0} has different fermi energy ({1}) to previous ({2})'.format( proj_number, fermi, fermi_energy)) lineno += 1 @@ -91,7 +106,7 @@ def parse_crystal_fort25(content): if initial_energy is None: initial_energy = ienergy elif not initial_energy == ienergy: - raise IOError("projection {0} has different initial energy ({1}) to previous ({2})".format( + raise IOError('projection {0} has different initial energy ({1}) to previous ({2})'.format( proj_number, ienergy, initial_energy)) lineno += 1 @@ -113,23 +128,23 @@ def parse_crystal_fort25(content): if len_dos is None: len_dos = len(dos) elif not len_dos == len(dos): - raise IOError("projection {0} has different dos value lengths ({1}) to previous ({2})".format( + raise IOError('projection {0} has different dos value lengths ({1}) to previous ({2})'.format( proj_number, len(dos), len_dos)) if projid not in alpha_projections: - alpha_projections[projid] = {"id": projid, "norbitals": norbitals, "dos": dos} + alpha_projections[projid] = {'id': projid, 'norbitals': norbitals, 'dos': dos} elif projid in beta_projections: - raise IOError("three data sets with same projid ({0}) were found".format(projid)) + raise IOError('three data sets with same projid ({0}) were found'.format(projid)) else: - beta_projections[projid] = {"id": projid, "norbitals": norbitals, "dos": dos} + beta_projections[projid] = {'id': projid, 'norbitals': norbitals, 'dos': dos} else: lineno += 1 system_type = IHFERM_MAP[int(system_type)] - fermi_energy = convert_units(float(fermi_energy), "hartree", "eV") + fermi_energy = convert_units(float(fermi_energy), 'hartree', 'eV') - energy_delta = convert_units(float(energy_delta), "hartree", "eV") - initial_energy = convert_units(float(initial_energy), "hartree", "eV") + energy_delta = convert_units(float(energy_delta), 'hartree', 'eV') + initial_energy = convert_units(float(initial_energy), 'hartree', 'eV') len_dos = int(len_dos) energies = np.linspace(initial_energy, initial_energy + len_dos * energy_delta, len_dos).tolist() @@ -141,77 +156,77 @@ def parse_crystal_fort25(content): total_beta = beta_projections.pop(max(beta_projections.keys())) return { - "units": { - "conversion": "CODATA2014", - "energy": "eV" + 'units': { + 'conversion': 'CODATA2014', + 'energy': 'eV' }, - "energy": energies, - "system_type": system_type, - "fermi_energy": fermi_energy, - "total_alpha": total_alpha, - "total_beta": total_beta, - "projections_alpha": list(alpha_projections.values()) if alpha_projections else None, - "projections_beta": list(beta_projections.values()) if beta_projections else None, + 'energy': energies, + 'system_type': system_type, + 'fermi_energy': fermi_energy, + 'total_alpha': total_alpha, + 'total_beta': total_beta, + 'projections_alpha': list(alpha_projections.values()) if alpha_projections else None, + 'projections_beta': list(beta_projections.values()) if beta_projections else None, } def parse_crystal_fort25_aiida(fileobj, parser_class): """ takes the result from `parse_crystal_fort25` and prepares it for AiiDA output""" results_data = { - "parser_version": str(__version__), - "parser_class": str(parser_class), - "parser_errors": [], - "parser_warnings": [], - "errors": [], - "warnings": [] + 'parser_version': str(__version__), + 'parser_class': str(parser_class), + 'parser_errors': [], + 'parser_warnings': [], + 'errors': [], + 'warnings': [] } try: read_data = parse_crystal_fort25(fileobj.read()) except IOError as err: traceback.print_exc() - results_data["parser_errors"].append("Error parsing CRYSTAL 17 main output: {0}".format(err)) + results_data['parser_errors'].append('Error parsing CRYSTAL 17 main output: {0}'.format(err)) return results_data, None - results_data["fermi_energy"] = read_data["fermi_energy"] - results_data["energy_units"] = read_data["units"]["energy"] - results_data["units_conversion"] = read_data["units"]["conversion"] - results_data["system_type"] = read_data["system_type"] + results_data['fermi_energy'] = read_data['fermi_energy'] + results_data['energy_units'] = read_data['units']['energy'] + results_data['units_conversion'] = read_data['units']['conversion'] + results_data['system_type'] = read_data['system_type'] array_data = {} - array_data["energies"] = read_data["energy"] - results_data["npts"] = len(array_data["energies"]) - results_data["energy_max"] = max(array_data["energies"]) - results_data["energy_min"] = min(array_data["energies"]) + array_data['energies'] = read_data['energy'] + results_data['npts'] = len(array_data['energies']) + results_data['energy_max'] = max(array_data['energies']) + results_data['energy_min'] = min(array_data['energies']) - total_alpha = read_data["total_alpha"]["dos"] - results_data["norbitals_total"] = read_data["total_alpha"]["norbitals"] - if read_data["total_beta"] is not None: - results_data["spin"] = True - total_beta = read_data["total_beta"]["dos"] + total_alpha = read_data['total_alpha']['dos'] + results_data['norbitals_total'] = read_data['total_alpha']['norbitals'] + if read_data['total_beta'] is not None: + results_data['spin'] = True + total_beta = read_data['total_beta']['dos'] assert len(total_alpha) == len(total_beta) else: - results_data["spin"] = False + results_data['spin'] = False - if read_data["projections_alpha"] is not None: - results_data["norbitals_projections"] = [p["norbitals"] for p in read_data["projections_alpha"]] - projected_alpha = [p["dos"] for p in read_data["projections_alpha"]] - if read_data["projections_beta"] is not None: - projected_beta = [p["dos"] for p in read_data["projections_beta"]] + if read_data['projections_alpha'] is not None: + results_data['norbitals_projections'] = [p['norbitals'] for p in read_data['projections_alpha']] + projected_alpha = [p['dos'] for p in read_data['projections_alpha']] + if read_data['projections_beta'] is not None: + projected_beta = [p['dos'] for p in read_data['projections_beta']] assert len(projected_alpha) == len(projected_beta) - if read_data["total_beta"] is None: - array_data["total"] = total_alpha + if read_data['total_beta'] is None: + array_data['total'] = total_alpha else: - array_data["total_alpha"] = total_alpha - array_data["total_beta"] = total_beta + array_data['total_alpha'] = total_alpha + array_data['total_beta'] = total_beta - if read_data["projections_alpha"] is not None: - if read_data["projections_beta"] is not None: - array_data["projections_alpha"] = total_alpha - array_data["projections_beta"] = total_beta + if read_data['projections_alpha'] is not None: + if read_data['projections_beta'] is not None: + array_data['projections_alpha'] = total_alpha + array_data['projections_beta'] = total_beta else: - array_data["projections"] = total_alpha + array_data['projections'] = total_alpha return results_data, array_data diff --git a/aiida_crystal17/parsers/raw/crystal_stdout.py b/aiida_crystal17/parsers/raw/crystal_stdout.py index 2daeb4f..600f95b 100644 --- a/aiida_crystal17/parsers/raw/crystal_stdout.py +++ b/aiida_crystal17/parsers/raw/crystal_stdout.py @@ -1,4 +1,18 @@ +#!/usr/bin/env python # -*- coding: utf-8 -*- +# +# Copyright 2019 Chris Sewell +# +# This file is part of aiida-crystal17. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms and conditions +# of version 3 of the GNU Lesser General Public License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. """ Basic outline of parsing sections: @@ -44,33 +58,33 @@ # a mapping of known error messages to exit codes, in order of importance KNOWN_ERRORS = ( - ("END OF DATA IN INPUT DECK", "ERROR_CRYSTAL_INPUT"), - ("FORMAT ERROR IN INPUT DECK", "ERROR_CRYSTAL_INPUT"), - ("GEOMETRY DATA FILE NOT FOUND", "ERROR_CRYSTAL_INPUT"), - ("Wavefunction file can not be found", "ERROR_WAVEFUNCTION_NOT_FOUND"), # restart error - ("SCF ENDED - TOO MANY CYCLES", "UNCONVERGED_SCF"), - ("SCF FAILED", "UNCONVERGED_SCF"), # usually found after: SCF ENDED - TOO MANY CYCLES - ("GEOMETRY OPTIMIZATION FAILED", "UNCONVERGED_GEOMETRY"), # usually because run out of steps - ("CONVERGENCE TESTS UNSATISFIED", "UNCONVERGED_GEOMETRY"), # usually found after: OPT END - FAILED - ("OPT END - FAILED", "UNCONVERGED_GEOMETRY"), - ("BASIS SET LINEARLY DEPENDENT", "BASIS_SET_LINEARLY_DEPENDENT"), # occurs during geometry optimisations - ("SCF abnormal end", "ERROR_SCF_ABNORMAL_END"), # catch all error - ("MPI_Abort", "ERROR_MPI_ABORT")) + ('END OF DATA IN INPUT DECK', 'ERROR_CRYSTAL_INPUT'), + ('FORMAT ERROR IN INPUT DECK', 'ERROR_CRYSTAL_INPUT'), + ('GEOMETRY DATA FILE NOT FOUND', 'ERROR_CRYSTAL_INPUT'), + ('Wavefunction file can not be found', 'ERROR_WAVEFUNCTION_NOT_FOUND'), # restart error + ('SCF ENDED - TOO MANY CYCLES', 'UNCONVERGED_SCF'), + ('SCF FAILED', 'UNCONVERGED_SCF'), # usually found after: SCF ENDED - TOO MANY CYCLES + ('GEOMETRY OPTIMIZATION FAILED', 'UNCONVERGED_GEOMETRY'), # usually because run out of steps + ('CONVERGENCE TESTS UNSATISFIED', 'UNCONVERGED_GEOMETRY'), # usually found after: OPT END - FAILED + ('OPT END - FAILED', 'UNCONVERGED_GEOMETRY'), + ('BASIS SET LINEARLY DEPENDENT', 'BASIS_SET_LINEARLY_DEPENDENT'), # occurs during geometry optimisations + ('SCF abnormal end', 'ERROR_SCF_ABNORMAL_END'), # catch all error + ('MPI_Abort', 'ERROR_MPI_ABORT')) def read_crystal_stdout(content): output = { - "units": { - "conversion": "CODATA2014", - "energy": "eV", - "length": "angstrom", - "angle": "degrees" + 'units': { + 'conversion': 'CODATA2014', + 'energy': 'eV', + 'length': 'angstrom', + 'angle': 'degrees' }, - "errors": [], - "warnings": [], - "parser_errors": [], - "parser_exceptions": [] + 'errors': [], + 'warnings': [], + 'parser_errors': [], + 'parser_exceptions': [] } # remove MPI statuses, @@ -81,33 +95,33 @@ def read_crystal_stdout(content): lines = content.splitlines() if not lines: - output["parser_errors"] += ["the file is empty"] + output['parser_errors'] += ['the file is empty'] return assign_exit_code(output) # make an initial parse to find all errors/warnings and start lines for sections errors, run_warnings, parser_errors, telapse_seconds, start_lines = initial_parse(lines) - output["errors"] += errors - output["warnings"] += run_warnings - output["parser_errors"] += errors + output['errors'] += errors + output['warnings'] += run_warnings + output['parser_errors'] += errors if telapse_seconds is not None: - output["execution_time_seconds"] = telapse_seconds + output['execution_time_seconds'] = telapse_seconds lineno = 0 # parse until the program header - outcome = parse_section(parse_pre_header, lines, lineno, output, "non_program") + outcome = parse_section(parse_pre_header, lines, lineno, output, 'non_program') if outcome is None or outcome.parser_error is not None: return assign_exit_code(output) lineno = outcome.next_lineno # parse the program header section - outcome = parse_section(parse_calculation_header, lines, lineno, output, "header") + outcome = parse_section(parse_calculation_header, lines, lineno, output, 'header') if outcome is None or outcome.parser_error is not None: return assign_exit_code(output) lineno = outcome.next_lineno # parse the initial geometry input - outcome = parse_section(parse_geometry_input, lines, lineno, output, "geometry_input") + outcome = parse_section(parse_geometry_input, lines, lineno, output, 'geometry_input') if outcome is None or outcome.parser_error is not None: return assign_exit_code(output) lineno = outcome.next_lineno @@ -119,13 +133,13 @@ def read_crystal_stdout(content): lineno = outcome.next_lineno # parse the initial SCF run - outcome = parse_section(parse_scf_section, lines, lineno, output, ("initial_scf", "cycles")) + outcome = parse_section(parse_scf_section, lines, lineno, output, ('initial_scf', 'cycles')) if outcome is None or outcome.parser_error is not None: return assign_exit_code(output) lineno = outcome.next_lineno # parse the final energy of the scf run - outcome = parse_section(parse_scf_final_energy, lines, lineno, output, ("initial_scf", "final_energy")) + outcome = parse_section(parse_scf_final_energy, lines, lineno, output, ('initial_scf', 'final_energy')) # Note: we don't abort on error if outcome is not None: lineno = outcome.next_lineno @@ -135,29 +149,29 @@ def read_crystal_stdout(content): # Note: in a few runs I observed, scf maxcycle was reached, but then the scf started again! # parse the optimisation (if present) - if "optimization" in start_lines: + if 'optimization' in start_lines: - outcome = parse_section(parse_optimisation, lines, start_lines["optimization"], output, "optimisation") + outcome = parse_section(parse_optimisation, lines, start_lines['optimization'], output, 'optimisation') # Note: we don't abort on error if outcome is not None: lineno = outcome.next_lineno if outcome is not None and outcome.parser_error is None: # TODO do band gaps only com after optimisation? - outcome = parse_section(parse_band_gaps, lines, lineno, output, "band_gaps") + outcome = parse_section(parse_band_gaps, lines, lineno, output, 'band_gaps') # Note: we don't abort on error if outcome is not None: lineno = outcome.next_lineno # parse the final optimized geometry (if present) - if "final_geometry" in start_lines: - outcome = parse_section(parse_final_geometry, lines, start_lines["final_geometry"], output, "final_geometry") + if 'final_geometry' in start_lines: + outcome = parse_section(parse_final_geometry, lines, start_lines['final_geometry'], output, 'final_geometry') # Note: we don't abort on error if outcome is not None: lineno = outcome.next_lineno - if "mulliken" in start_lines: - outcome = parse_section(parse_mulliken_analysis, lines, start_lines["mulliken"], output, "mulliken") + if 'mulliken' in start_lines: + outcome = parse_section(parse_mulliken_analysis, lines, start_lines['mulliken'], output, 'mulliken') # Note: we don't abort on error if outcome is not None: lineno = outcome.next_lineno @@ -188,7 +202,7 @@ def parse_section(func, lines, initial_lineno, output, key_name): outcome = func(lines, initial_lineno) except Exception as err: traceback.print_exc() - output["parser_exceptions"].append(str(err)) + output['parser_exceptions'].append(str(err)) return None if outcome.data: if key_name is None: @@ -201,9 +215,9 @@ def parse_section(func, lines, initial_lineno, output, key_name): key_name = key_name[-1] suboutput[key_name] = outcome.data if outcome.non_terminating_error is not None: - output["errors"].append(outcome.non_terminating_error) + output['errors'].append(outcome.non_terminating_error) if outcome.parser_error is not None: - output["parser_errors"].append(outcome.parser_error) + output['parser_errors'].append(outcome.parser_error) return outcome @@ -212,26 +226,26 @@ def assign_exit_code(output): exit_code = 0 - if output["errors"]: - exit_code = "ERROR_CRYSTAL_RUN" + if output['errors']: + exit_code = 'ERROR_CRYSTAL_RUN' for known_error_msg, code_name in KNOWN_ERRORS: found = False - for error_msg in output["errors"]: + for error_msg in output['errors']: if known_error_msg in error_msg: found = True break if found: exit_code = code_name break - elif output["parser_errors"]: - if any(["TESTGEOM DIRECTIVE" in msg for msg in output["warnings"]]): - exit_code = "TESTGEOM_DIRECTIVE" + elif output['parser_errors']: + if any(['TESTGEOM DIRECTIVE' in msg for msg in output['warnings']]): + exit_code = 'TESTGEOM_DIRECTIVE' else: - exit_code = "ERROR_PARSING_STDOUT" - elif output["parser_exceptions"]: - exit_code = "ERROR_PARSING_STDOUT" + exit_code = 'ERROR_PARSING_STDOUT' + elif output['parser_exceptions']: + exit_code = 'ERROR_PARSING_STDOUT' - output["exit_code"] = exit_code + output['exit_code'] = exit_code return output @@ -255,57 +269,57 @@ def initial_parse(lines): for lineno, line in enumerate(lines): - if "WARNING" in line.upper(): + if 'WARNING' in line.upper(): warnings.append(line.strip()) - elif "ERROR" in line: + elif 'ERROR' in line: errors.append(line.strip()) - elif "SCF abnormal end" in line: # only present when run using runcry + elif 'SCF abnormal end' in line: # only present when run using runcry errors.append(line.strip()) - elif "MPI_Abort" in line: + elif 'MPI_Abort' in line: # only record one mpi_abort event (to not clutter output) if not mpi_abort: errors.append(line.strip()) mpi_abort = True - elif "CONVERGENCE TESTS UNSATISFIED" in line.upper(): + elif 'CONVERGENCE TESTS UNSATISFIED' in line.upper(): errors.append(line.strip()) - elif "Note: The following floating-point exceptions are signalling:" in line: + elif 'Note: The following floating-point exceptions are signalling:' in line: warnings.append(line.strip()) - elif "TELAPSE" in line: + elif 'TELAPSE' in line: telapse_line = lineno # search for an optimisation - elif "OPTOPTOPTOPT" in line: - if "optimization" in start_lines: + elif 'OPTOPTOPTOPT' in line: + if 'optimization' in start_lines: if second_opt_line: - parser_errors.append("found two lines starting optimization section: " - "{0} and {1}".format(start_lines["optimization"], lineno)) + parser_errors.append('found two lines starting optimization section: ' + '{0} and {1}'.format(start_lines['optimization'], lineno)) else: second_opt_line = True - start_lines["optimization"] = lineno - elif "CONVERGENCE ON GRADIENTS SATISFIED AFTER THE FIRST OPTIMIZATION CYCLE" in line: - if "optimization" in start_lines: + start_lines['optimization'] = lineno + elif 'CONVERGENCE ON GRADIENTS SATISFIED AFTER THE FIRST OPTIMIZATION CYCLE' in line: + if 'optimization' in start_lines: if second_opt_line: - parser_errors.append("found two lines starting optimization section: " - "{0} and {1}".format(start_lines["optimization"], lineno)) + parser_errors.append('found two lines starting optimization section: ' + '{0} and {1}'.format(start_lines['optimization'], lineno)) else: second_opt_line = True - start_lines["optimization"] = lineno + start_lines['optimization'] = lineno # search for mulliken analysis - elif line.strip().startswith("MULLIKEN POPULATION ANALYSIS"): + elif line.strip().startswith('MULLIKEN POPULATION ANALYSIS'): # can have ALPHA+BETA ELECTRONS and ALPHA-BETA ELECTRONS (denoted in line above mulliken_starts) - start_lines.setdefault("mulliken", []).append(lineno) + start_lines.setdefault('mulliken', []).append(lineno) # search for final geometry - elif "FINAL OPTIMIZED GEOMETRY" in line: - if "final_geometry" in start_lines: + elif 'FINAL OPTIMIZED GEOMETRY' in line: + if 'final_geometry' in start_lines: parser_errors.append("found two lines starting 'FINAL OPTIMIZED GEOMETRY':" - " {0} and {1}".format(start_lines["final_geometry"], lineno)) - start_lines["final_geometry"] = lineno + ' {0} and {1}'.format(start_lines['final_geometry'], lineno)) + start_lines['final_geometry'] = lineno total_seconds = None if telapse_line: - total_seconds = int(split_numbers(lines[telapse_line].split("TELAPSE")[1])[0]) + total_seconds = int(split_numbers(lines[telapse_line].split('TELAPSE')[1])[0]) # m, s = divmod(total_seconds, 60) # h, m = divmod(m, 60) # elapsed_time = "%d:%02d:%02d" % (h, m, s) @@ -333,15 +347,15 @@ def parse_pre_header(lines, initial_lineno=0): num_lines = len(lines) line = lines[lineno] for i, line in enumerate(lines[initial_lineno:]): - if "************************" in line: + if '************************' in line: # found start of crystal binary stdout return ParsedSection(lineno, meta_data, None) - elif fnmatch(line, "date:*"): - meta_data["date"] = line.replace("date:", "").strip() + elif fnmatch(line, 'date:*'): + meta_data['date'] = line.replace('date:', '').strip() - elif fnmatch(line, "resources_used.ncpus =*"): - meta_data["nprocs"] = int(line.replace("resources_used.ncpus =", "")) + elif fnmatch(line, 'resources_used.ncpus =*'): + meta_data['nprocs'] = int(line.replace('resources_used.ncpus =', '')) lineno += 1 if lineno + 1 >= num_lines: @@ -366,12 +380,12 @@ def parse_calculation_header(lines, initial_lineno): """ data = {} for i, line in enumerate(lines[initial_lineno:]): - if line.strip().startswith("****************") and i != 0: + if line.strip().startswith('****************') and i != 0: return ParsedSection(initial_lineno + i, data, None) - if re.findall(r"\s\s\s\s\sCRYSTAL\d{2}(.*)\*", line): - data["crystal_version"] = int(re.findall(r"\s\s\s\s\sCRYSTAL(\d{2})", line)[0]) + if re.findall(r'\s\s\s\s\sCRYSTAL\d{2}(.*)\*', line): + data['crystal_version'] = int(re.findall(r'\s\s\s\s\sCRYSTAL(\d{2})', line)[0]) if re.findall('public\\s\\:\\s(.+)\\s\\-', line): - data["crystal_subversion"] = re.findall('public\\s\\:\\s(.+)\\s\\-', line)[0] + data['crystal_subversion'] = re.findall('public\\s\\:\\s(.+)\\s\\-', line)[0] return ParsedSection(initial_lineno + i, data, "couldn't find end of program header") @@ -391,7 +405,7 @@ def parse_geometry_input(lines, initial_lineno): lineno = initial_lineno data = {} for i, line in enumerate(lines[initial_lineno:]): - if line.strip().startswith("* GEOMETRY EDITING"): + if line.strip().startswith('* GEOMETRY EDITING'): return ParsedSection(lineno + i, data, None) # TODO parse relevant data return ParsedSection(lineno + i, data, "couldn't find end of geometry input (denoted * GEOMETRY EDITING)") @@ -410,47 +424,47 @@ def parse_calculation_setup(lines, initial_lineno): ParsedSection """ - data = {"calculation": {"spin": False}, "initial_geometry": {}} + data = {'calculation': {'spin': False}, 'initial_geometry': {}} end_lineno = None for i, line in enumerate(lines[initial_lineno:]): curr_lineno = initial_lineno + i line = line.strip() - if line.startswith("CRYSTAL - SCF - TYPE OF CALCULATION :"): + if line.startswith('CRYSTAL - SCF - TYPE OF CALCULATION :'): end_lineno = curr_lineno break - elif line.startswith("TYPE OF CALCULATION :"): - data["calculation"]["type"] = line.replace("TYPE OF CALCULATION :", "").strip().lower() - if "HAMILTONIAN" in lines[curr_lineno + 1]: - regex = r"\(EXCHANGE\)\[CORRELATION\] FUNCTIONAL:\((.*)\)\[(.*)\]" + elif line.startswith('TYPE OF CALCULATION :'): + data['calculation']['type'] = line.replace('TYPE OF CALCULATION :', '').strip().lower() + if 'HAMILTONIAN' in lines[curr_lineno + 1]: + regex = r'\(EXCHANGE\)\[CORRELATION\] FUNCTIONAL:\((.*)\)\[(.*)\]' string = lines[curr_lineno + 3].strip() if re.match(regex, string): - data["calculation"]["functional"] = { - "exchange": re.search(regex, string).group(1), - "correlation": re.search(regex, string).group(2) + data['calculation']['functional'] = { + 'exchange': re.search(regex, string).group(1), + 'correlation': re.search(regex, string).group(2) } - elif "SPIN POLARIZ" in line: - data["calculation"]["spin"] = True + elif 'SPIN POLARIZ' in line: + data['calculation']['spin'] = True - parse_geometry_section(data["initial_geometry"], curr_lineno, line, lines) - parse_symmetry_section(data["initial_geometry"], curr_lineno, line, lines) + parse_geometry_section(data['initial_geometry'], curr_lineno, line, lines) + parse_symmetry_section(data['initial_geometry'], curr_lineno, line, lines) if end_lineno is None: return ParsedSection(curr_lineno, data, "couldn't find start of initial scf calculation") regexes = { - 'n_atoms': re.compile(r"\sN. OF ATOMS PER CELL\s*(\d*)", re.DOTALL), - 'n_shells': re.compile(r"\sNUMBER OF SHELLS\s*(\d*)", re.DOTALL), - 'n_ao': re.compile(r"\sNUMBER OF AO\s*(\d*)", re.DOTALL), - 'n_electrons': re.compile(r"\sN. OF ELECTRONS PER CELL\s*(\d*)", re.DOTALL), - 'n_core_el': re.compile(r"\sCORE ELECTRONS PER CELL\s*(\d*)", re.DOTALL), - 'n_symops': re.compile(r"\sN. OF SYMMETRY OPERATORS\s*(\d*)", re.DOTALL), - 'n_kpoints_ibz': re.compile(r"\sNUMBER OF K POINTS IN THE IBZ\s*(\d*)", re.DOTALL), - 'n_kpoints_gilat': re.compile(r"\s NUMBER OF K POINTS\(GILAT NET\)\s*(\d*)", re.DOTALL), + 'n_atoms': re.compile(r'\sN. OF ATOMS PER CELL\s*(\d*)', re.DOTALL), + 'n_shells': re.compile(r'\sNUMBER OF SHELLS\s*(\d*)', re.DOTALL), + 'n_ao': re.compile(r'\sNUMBER OF AO\s*(\d*)', re.DOTALL), + 'n_electrons': re.compile(r'\sN. OF ELECTRONS PER CELL\s*(\d*)', re.DOTALL), + 'n_core_el': re.compile(r'\sCORE ELECTRONS PER CELL\s*(\d*)', re.DOTALL), + 'n_symops': re.compile(r'\sN. OF SYMMETRY OPERATORS\s*(\d*)', re.DOTALL), + 'n_kpoints_ibz': re.compile(r'\sNUMBER OF K POINTS IN THE IBZ\s*(\d*)', re.DOTALL), + 'n_kpoints_gilat': re.compile(r'\s NUMBER OF K POINTS\(GILAT NET\)\s*(\d*)', re.DOTALL), } - content = "\n".join(lines[initial_lineno:end_lineno]) + content = '\n'.join(lines[initial_lineno:end_lineno]) for name, regex in regexes.items(): num = regex.search(content) if num is not None: @@ -519,53 +533,53 @@ def parse_geometry_section(data, initial_lineno, line, lines): """ # check that units are correct (probably not needed) - if fnmatch(line, "LATTICE PARAMETERS*(*)"): - if not ("ANGSTROM" in line and "DEGREES" in line): - raise IOError("was expecting lattice parameters in angstroms and degrees on line:" - " {0}, got: {1}".format(initial_lineno, line)) + if fnmatch(line, 'LATTICE PARAMETERS*(*)'): + if not ('ANGSTROM' in line and 'DEGREES' in line): + raise IOError('was expecting lattice parameters in angstroms and degrees on line:' + ' {0}, got: {1}'.format(initial_lineno, line)) return - for pattern, field, pattern2 in [('PRIMITIVE*CELL*', "primitive_cell", "ATOMS IN THE ASYMMETRIC UNIT*"), - ('CRYSTALLOGRAPHIC*CELL*', "crystallographic_cell", - "COORDINATES IN THE CRYSTALLOGRAPHIC CELL")]: + for pattern, field, pattern2 in [('PRIMITIVE*CELL*', 'primitive_cell', 'ATOMS IN THE ASYMMETRIC UNIT*'), + ('CRYSTALLOGRAPHIC*CELL*', 'crystallographic_cell', + 'COORDINATES IN THE CRYSTALLOGRAPHIC CELL')]: if fnmatch(line, pattern): - if not fnmatch(lines[initial_lineno + 1].strip(), "A*B*C*ALPHA*BETA*GAMMA"): - raise IOError("was expecting A B C ALPHA BETA GAMMA on line:" - " {0}, got: {1}".format(initial_lineno + 1, lines[initial_lineno + 1])) + if not fnmatch(lines[initial_lineno + 1].strip(), 'A*B*C*ALPHA*BETA*GAMMA'): + raise IOError('was expecting A B C ALPHA BETA GAMMA on line:' + ' {0}, got: {1}'.format(initial_lineno + 1, lines[initial_lineno + 1])) data[field] = edict.merge([ data.get(field, {}), { - "cell_parameters": + 'cell_parameters': dict(zip(['a', 'b', 'c', 'alpha', 'beta', 'gamma'], split_numbers(lines[initial_lineno + 2]))) } ]) elif fnmatch(line, pattern2): periodic = [True, True, True] - if not fnmatch(lines[initial_lineno + 1].strip(), "ATOM*X/A*Y/B*Z/C"): + if not fnmatch(lines[initial_lineno + 1].strip(), 'ATOM*X/A*Y/B*Z/C'): # for 2d (slab) can get z in angstrom (and similar for 1d) - if fnmatch(lines[initial_lineno + 1].strip(), "ATOM*X/A*Y/B*Z(ANGSTROM)*"): + if fnmatch(lines[initial_lineno + 1].strip(), 'ATOM*X/A*Y/B*Z(ANGSTROM)*'): periodic = [True, True, False] - elif fnmatch(lines[initial_lineno + 1].strip(), "ATOM*X/A*Y(ANGSTROM)*Z(ANGSTROM)*"): + elif fnmatch(lines[initial_lineno + 1].strip(), 'ATOM*X/A*Y(ANGSTROM)*Z(ANGSTROM)*'): periodic = [True, False, False] - elif fnmatch(lines[initial_lineno + 1].strip(), "ATOM*X(ANGSTROM)*Y(ANGSTROM)*Z(ANGSTROM)*"): + elif fnmatch(lines[initial_lineno + 1].strip(), 'ATOM*X(ANGSTROM)*Y(ANGSTROM)*Z(ANGSTROM)*'): periodic = [False, False, False] cell_params = dict( zip(['a', 'b', 'c', 'alpha', 'beta', 'gamma'], [500., 500., 500., 90., 90., 90.])) - data[field] = edict.merge([data.get(field, {}), {"cell_parameters": cell_params}]) + data[field] = edict.merge([data.get(field, {}), {'cell_parameters': cell_params}]) else: - raise IOError("was expecting ATOM X Y Z (in units of ANGSTROM or fractional) on line:" - " {0}, got: {1}".format(initial_lineno + 1, lines[initial_lineno + 1])) - if not all(periodic) and "cell_parameters" not in data.get(field, {}): - raise IOError("require cell parameters to have been set for non-periodic directions in line" - " #{0} : {1}".format(initial_lineno + 1, lines[initial_lineno + 1])) + raise IOError('was expecting ATOM X Y Z (in units of ANGSTROM or fractional) on line:' + ' {0}, got: {1}'.format(initial_lineno + 1, lines[initial_lineno + 1])) + if not all(periodic) and 'cell_parameters' not in data.get(field, {}): + raise IOError('require cell parameters to have been set for non-periodic directions in line' + ' #{0} : {1}'.format(initial_lineno + 1, lines[initial_lineno + 1])) a, b, c, alpha, beta, gamma = [None] * 6 if not all(periodic): - cell = data[field]["cell_parameters"] - a, b, c, alpha, beta, gamma = [cell[p] for p in ["a", "b", "c", "alpha", "beta", "gamma"]] + cell = data[field]['cell_parameters'] + a, b, c, alpha, beta, gamma = [cell[p] for p in ['a', 'b', 'c', 'alpha', 'beta', 'gamma']] curr_lineno = initial_lineno + 3 - atom_data = {'ids': [], 'assymetric': [], 'atomic_numbers': [], 'symbols': [], "fcoords": []} - atom_data["pbc"] = periodic + atom_data = {'ids': [], 'assymetric': [], 'atomic_numbers': [], 'symbols': [], 'fcoords': []} + atom_data['pbc'] = periodic while lines[curr_lineno].strip() and not lines[curr_lineno].strip()[0].isalpha(): fields = lines[curr_lineno].strip().split() atom_data['ids'].append(fields[0]) @@ -579,19 +593,19 @@ def parse_geometry_section(data, initial_lineno, line, lines): # TODO other periodic types (1D, 0D) curr_lineno += 1 - if not atom_data["fcoords"]: - atom_data.pop("fcoords") + if not atom_data['fcoords']: + atom_data.pop('fcoords') data[field] = edict.merge([data.get(field, {}), atom_data]) # TODO These coordinates are present in initial and final optimized sections, # but DON'T work with lattice parameters - if fnmatch(line, "CARTESIAN COORDINATES - PRIMITIVE CELL*"): - if not fnmatch(lines[initial_lineno + 2].strip(), "*ATOM*X(ANGSTROM)*Y(ANGSTROM)*Z(ANGSTROM)"): - raise IOError("was expecting ATOM X(ANGSTROM) Y(ANGSTROM) Z(ANGSTROM) on line:" - " {0}, got: {1}".format(initial_lineno + 2, lines[initial_lineno + 2])) + if fnmatch(line, 'CARTESIAN COORDINATES - PRIMITIVE CELL*'): + if not fnmatch(lines[initial_lineno + 2].strip(), '*ATOM*X(ANGSTROM)*Y(ANGSTROM)*Z(ANGSTROM)'): + raise IOError('was expecting ATOM X(ANGSTROM) Y(ANGSTROM) Z(ANGSTROM) on line:' + ' {0}, got: {1}'.format(initial_lineno + 2, lines[initial_lineno + 2])) curr_lineno = initial_lineno + 4 - atom_data = {'ids': [], 'atomic_numbers': [], 'symbols': [], "ccoords": []} + atom_data = {'ids': [], 'atomic_numbers': [], 'symbols': [], 'ccoords': []} while lines[curr_lineno].strip() and not lines[curr_lineno].strip()[0].isalpha(): fields = lines[curr_lineno].strip().split() atom_data['ids'].append(fields[0]) @@ -599,27 +613,27 @@ def parse_geometry_section(data, initial_lineno, line, lines): atom_data['symbols'].append(fields[2].lower().capitalize()) atom_data['ccoords'].append([float(fields[3]), float(fields[4]), float(fields[5])]) curr_lineno += 1 - data["primitive_cell"] = edict.merge([data.get("primitive_cell", {}), atom_data]) - - elif fnmatch(line, "DIRECT LATTICE VECTORS CARTESIAN COMPONENTS*"): - if "ANGSTROM" not in line: - raise IOError("was expecting lattice vectors in angstroms on line:" - " {0}, got: {1}".format(initial_lineno, line)) - if not fnmatch(lines[initial_lineno + 1].strip(), "X*Y*Z"): - raise IOError("was expecting X Y Z on line:" - " {0}, got: {1}".format(initial_lineno + 1, lines[initial_lineno + 1])) - if "crystallographic_cell" not in data: - data["crystallographic_cell"] = {} - if "cell_vectors" in data["crystallographic_cell"]: - raise IOError("found multiple cell vectors on line:" - " {0}, got: {1}".format(initial_lineno + 1, lines[initial_lineno + 1])) + data['primitive_cell'] = edict.merge([data.get('primitive_cell', {}), atom_data]) + + elif fnmatch(line, 'DIRECT LATTICE VECTORS CARTESIAN COMPONENTS*'): + if 'ANGSTROM' not in line: + raise IOError('was expecting lattice vectors in angstroms on line:' + ' {0}, got: {1}'.format(initial_lineno, line)) + if not fnmatch(lines[initial_lineno + 1].strip(), 'X*Y*Z'): + raise IOError('was expecting X Y Z on line:' + ' {0}, got: {1}'.format(initial_lineno + 1, lines[initial_lineno + 1])) + if 'crystallographic_cell' not in data: + data['crystallographic_cell'] = {} + if 'cell_vectors' in data['crystallographic_cell']: + raise IOError('found multiple cell vectors on line:' + ' {0}, got: {1}'.format(initial_lineno + 1, lines[initial_lineno + 1])) vectors = { - "a": split_numbers(lines[initial_lineno + 2]), - "b": split_numbers(lines[initial_lineno + 3]), - "c": split_numbers(lines[initial_lineno + 4]) + 'a': split_numbers(lines[initial_lineno + 2]), + 'b': split_numbers(lines[initial_lineno + 3]), + 'c': split_numbers(lines[initial_lineno + 4]) } - data["primitive_cell"]["cell_vectors"] = vectors + data['primitive_cell']['cell_vectors'] = vectors def parse_symmetry_section(data, initial_lineno, line, lines): @@ -635,26 +649,26 @@ def parse_symmetry_section(data, initial_lineno, line, lines): lines: list[str] """ - if fnmatch(line, "*SYMMOPS - TRANSLATORS IN FRACTIONAL UNITS*"): + if fnmatch(line, '*SYMMOPS - TRANSLATORS IN FRACTIONAL UNITS*'): nums = split_numbers(line) if not len(nums) == 1: - raise IOError("was expecting a single number, representing the number of symmops, on this line:" - " {0}, got: {1}".format(initial_lineno, line)) + raise IOError('was expecting a single number, representing the number of symmops, on this line:' + ' {0}, got: {1}'.format(initial_lineno, line)) nsymmops = int(nums[0]) - if not fnmatch(lines[initial_lineno + 1], "*MATRICES AND TRANSLATORS IN THE CRYSTALLOGRAPHIC REFERENCE FRAME*"): - raise IOError("was expecting CRYSTALLOGRAPHIC REFERENCE FRAME on this line" - " {0}, got: {1}".format(initial_lineno + 1, lines[initial_lineno + 1].strip())) - if not fnmatch(lines[initial_lineno + 2], "*V*INV*ROTATION MATRICES*TRANSLATORS*"): - raise IOError("was expecting symmetry headers on this line" - " {0}, got: {1}".format(initial_lineno + 2, lines[initial_lineno + 2].strip())) + if not fnmatch(lines[initial_lineno + 1], '*MATRICES AND TRANSLATORS IN THE CRYSTALLOGRAPHIC REFERENCE FRAME*'): + raise IOError('was expecting CRYSTALLOGRAPHIC REFERENCE FRAME on this line' + ' {0}, got: {1}'.format(initial_lineno + 1, lines[initial_lineno + 1].strip())) + if not fnmatch(lines[initial_lineno + 2], '*V*INV*ROTATION MATRICES*TRANSLATORS*'): + raise IOError('was expecting symmetry headers on this line' + ' {0}, got: {1}'.format(initial_lineno + 2, lines[initial_lineno + 2].strip())) symmops = [] for j in range(nsymmops): values = split_numbers(lines[initial_lineno + 3 + j]) if not len(values) == 14: - raise IOError("was expecting 14 values for symmetry data on this line" - " {0}, got: {1}".format(initial_lineno + 3 + j, lines[initial_lineno + 3 + j].strip())) + raise IOError('was expecting 14 values for symmetry data on this line' + ' {0}, got: {1}'.format(initial_lineno + 3 + j, lines[initial_lineno + 3 + j].strip())) symmops.append(values[2:14]) - data["primitive_symmops"] = symmops + data['primitive_symmops'] = symmops def parse_scf_section(lines, initial_lineno, final_lineno=None): @@ -677,18 +691,18 @@ def parse_scf_section(lines, initial_lineno, final_lineno=None): for k, line in enumerate(lines[initial_lineno:]): curr_lineno = k + initial_lineno - if "SCF ENDED" in line or (final_lineno is not None and curr_lineno == final_lineno): + if 'SCF ENDED' in line or (final_lineno is not None and curr_lineno == final_lineno): # add last scf cycle if scf_cyc: scf.append(scf_cyc) - if "CONVERGE" not in line: + if 'CONVERGE' not in line: return ParsedSection(curr_lineno, scf, None, line.strip()) else: return ParsedSection(curr_lineno, scf, None) line = line.strip() - if fnmatch(line, "CYC*"): + if fnmatch(line, 'CYC*'): # start new cycle if scf_cyc is not None: @@ -700,17 +714,17 @@ def parse_scf_section(lines, initial_lineno, final_lineno=None): if last_cyc_num is not None: if cur_cyc_num != last_cyc_num + 1: return ParsedSection( - curr_lineno, scf, "was expecting the SCF cyle number to be {0} in line {1}: {2}".format( + curr_lineno, scf, 'was expecting the SCF cyle number to be {0} in line {1}: {2}'.format( int(last_cyc_num + 1), curr_lineno, line)) last_cyc_num = cur_cyc_num - if fnmatch(line, "*ETOT*"): - if not fnmatch(line, "*ETOT(AU)*"): - raise IOError("was expecting units in a.u. on line {0}, " "got: {1}".format(curr_lineno, line)) + if fnmatch(line, '*ETOT*'): + if not fnmatch(line, '*ETOT(AU)*'): + raise IOError('was expecting units in a.u. on line {0}, ' 'got: {1}'.format(curr_lineno, line)) # this is the initial energy of the configuration and so actually the energy of the previous run if scf: - scf[-1]["energy"] = scf[-1].get("energy", {}) - scf[-1]["energy"]["total"] = convert_units(split_numbers(line)[1], "hartree", "eV") + scf[-1]['energy'] = scf[-1].get('energy', {}) + scf[-1]['energy']['total'] = convert_units(split_numbers(line)[1], 'hartree', 'eV') elif scf_cyc is None: continue @@ -724,31 +738,31 @@ def parse_scf_section(lines, initial_lineno, final_lineno=None): # In a simple ferromagnetic material they should be equal (except possibly for an overall sign). # In simple antiferromagnets (like FeO) MT is zero and MA is twice the magnetization of each of the two atoms. - if line.startswith("CHARGE NORMALIZATION FACTOR"): - scf_cyc["CHARGE NORMALIZATION FACTOR".lower().replace(" ", "_")] = split_numbers(line)[0] - if line.startswith("SUMMED SPIN DENSITY"): - scf_cyc["spin_density_total"] = split_numbers(line)[0] + if line.startswith('CHARGE NORMALIZATION FACTOR'): + scf_cyc['CHARGE NORMALIZATION FACTOR'.lower().replace(' ', '_')] = split_numbers(line)[0] + if line.startswith('SUMMED SPIN DENSITY'): + scf_cyc['spin_density_total'] = split_numbers(line)[0] - if line.startswith("TOTAL ATOMIC CHARGES"): - scf_cyc["atomic_charges_peratom"] = [] + if line.startswith('TOTAL ATOMIC CHARGES'): + scf_cyc['atomic_charges_peratom'] = [] j = curr_lineno + 1 while len(lines[j].strip().split()) == len(split_numbers(lines[j])): - scf_cyc["atomic_charges_peratom"] += split_numbers(lines[j]) + scf_cyc['atomic_charges_peratom'] += split_numbers(lines[j]) j += 1 - if line.startswith("TOTAL ATOMIC SPINS"): - scf_cyc["spin_density_peratom"] = [] + if line.startswith('TOTAL ATOMIC SPINS'): + scf_cyc['spin_density_peratom'] = [] j = curr_lineno + 1 while len(lines[j].strip().split()) == len(split_numbers(lines[j])): - scf_cyc["spin_density_peratom"] += split_numbers(lines[j]) + scf_cyc['spin_density_peratom'] += split_numbers(lines[j]) j += 1 - scf_cyc["spin_density_absolute"] = sum([abs(s) for s in split_numbers(lines[curr_lineno + 1])]) + scf_cyc['spin_density_absolute'] = sum([abs(s) for s in split_numbers(lines[curr_lineno + 1])]) # add last scf cycle if scf_cyc: scf.append(scf_cyc) return ParsedSection(curr_lineno, scf, - "Did not find end of SCF section (starting on line {})".format(initial_lineno)) + 'Did not find end of SCF section (starting on line {})'.format(initial_lineno)) def parse_scf_final_energy(lines, initial_lineno, final_lineno=None): @@ -767,18 +781,18 @@ def parse_scf_final_energy(lines, initial_lineno, final_lineno=None): for i, line in enumerate(lines[initial_lineno:]): if final_lineno is not None and i + initial_lineno == final_lineno: return ParsedSection(final_lineno, scf_energy) - if line.strip().startswith("TTTTTTT") or line.strip().startswith("******"): + if line.strip().startswith('TTTTTTT') or line.strip().startswith('******'): return ParsedSection(final_lineno, scf_energy) - if fnmatch(line.strip(), "TOTAL ENERGY*DE*"): - if not fnmatch(line.strip(), "TOTAL ENERGY*AU*DE*"): - raise IOError("was expecting units in a.u. on line:" " {0}, got: {1}".format(initial_lineno + i, line)) - if "total_corrected" in scf_energy: - raise IOError("total corrected energy found twice, on line:" - " {0}, got: {1}".format(initial_lineno + i, line)) - scf_energy["total_corrected"] = convert_units(split_numbers(line)[1], "hartree", "eV") + if fnmatch(line.strip(), 'TOTAL ENERGY*DE*'): + if not fnmatch(line.strip(), 'TOTAL ENERGY*AU*DE*'): + raise IOError('was expecting units in a.u. on line:' ' {0}, got: {1}'.format(initial_lineno + i, line)) + if 'total_corrected' in scf_energy: + raise IOError('total corrected energy found twice, on line:' + ' {0}, got: {1}'.format(initial_lineno + i, line)) + scf_energy['total_corrected'] = convert_units(split_numbers(line)[1], 'hartree', 'eV') return ParsedSection(final_lineno, scf_energy, - "Did not find end of Post SCF section (starting on line {})".format(initial_lineno)) + 'Did not find end of Post SCF section (starting on line {})'.format(initial_lineno)) def parse_optimisation(lines, initial_lineno): @@ -794,16 +808,16 @@ def parse_optimisation(lines, initial_lineno): ParsedSection """ - if "CONVERGENCE ON GRADIENTS SATISFIED AFTER THE FIRST OPTIMIZATION CYCLE" in lines[initial_lineno]: + if 'CONVERGENCE ON GRADIENTS SATISFIED AFTER THE FIRST OPTIMIZATION CYCLE' in lines[initial_lineno]: for k, line in enumerate(lines[initial_lineno:]): curr_lineno = initial_lineno + k line = line.strip() - if "OPT END -" in line: + if 'OPT END -' in line: - if not fnmatch(line, "*E(AU)*"): - raise IOError("was expecting units in a.u. on line:" " {0}, got: {1}".format(curr_lineno, line)) - data = [{"energy": {"total_corrected": convert_units(split_numbers(lines[-1])[0], "hartree", "eV")}}] + if not fnmatch(line, '*E(AU)*'): + raise IOError('was expecting units in a.u. on line:' ' {0}, got: {1}'.format(curr_lineno, line)) + data = [{'energy': {'total_corrected': convert_units(split_numbers(lines[-1])[0], 'hartree', 'eV')}}] return ParsedSection(curr_lineno, data) @@ -819,12 +833,12 @@ def parse_optimisation(lines, initial_lineno): curr_lineno = initial_lineno + k line = line.strip() - if "OPT END -" in line: + if 'OPT END -' in line: if opt_cyc and not failed_opt_step: opt_cycles.append(opt_cyc) return ParsedSection(curr_lineno, opt_cycles) - if fnmatch(line, "*OPTIMIZATION*POINT*"): + if fnmatch(line, '*OPTIMIZATION*POINT*'): if opt_cyc is not None and not failed_opt_step: opt_cycles.append(opt_cyc) opt_cyc = {} @@ -834,37 +848,37 @@ def parse_optimisation(lines, initial_lineno): continue # when using ONELOG optimisation key word - if "CRYSTAL - SCF - TYPE OF CALCULATION :" in line: + if 'CRYSTAL - SCF - TYPE OF CALCULATION :' in line: if scf_start_no is not None: return ParsedSection( curr_lineno, opt_cycles, "found two lines starting scf ('CRYSTAL - SCF - ') in opt step {0}:".format( - len(opt_cycles)) + " {0} and {1}".format(scf_start_no, curr_lineno)) + len(opt_cycles)) + ' {0} and {1}'.format(scf_start_no, curr_lineno)) scf_start_no = curr_lineno - elif "SCF ENDED" in line: - if "CONVERGE" not in line: + elif 'SCF ENDED' in line: + if 'CONVERGE' not in line: pass # errors.append(line.strip()) outcome = parse_scf_section(lines, scf_start_no + 1, curr_lineno + 1) # TODO test if error - opt_cyc["scf"] = outcome.data + opt_cyc['scf'] = outcome.data parse_geometry_section(opt_cyc, curr_lineno, line, lines) # TODO move to read_post_scf? - if fnmatch(line, "TOTAL ENERGY*DE*"): - if not fnmatch(line, "TOTAL ENERGY*AU*DE*AU*"): - return ParsedSection(curr_lineno, opt_cycles, "was expecting units in a.u. on line:" - " {0}, got: {1}".format(curr_lineno, line)) - opt_cyc["energy"] = opt_cyc.get("energy", {}) - opt_cyc["energy"]["total_corrected"] = convert_units(split_numbers(line)[1], "hartree", "eV") - - for param in ["MAX GRADIENT", "RMS GRADIENT", "MAX DISPLAC", "RMS DISPLAC"]: - if fnmatch(line, "{}*CONVERGED*".format(param)): - if "convergence" not in opt_cyc: - opt_cyc["convergence"] = {} - opt_cyc["convergence"][param.lower().replace(" ", "_")] = bool(strtobool(line.split()[-1])) - - if fnmatch(line, "*SCF DID NOT CONVERGE. RETRYING WITH A SMALLER OPT STEP*"): + if fnmatch(line, 'TOTAL ENERGY*DE*'): + if not fnmatch(line, 'TOTAL ENERGY*AU*DE*AU*'): + return ParsedSection(curr_lineno, opt_cycles, 'was expecting units in a.u. on line:' + ' {0}, got: {1}'.format(curr_lineno, line)) + opt_cyc['energy'] = opt_cyc.get('energy', {}) + opt_cyc['energy']['total_corrected'] = convert_units(split_numbers(line)[1], 'hartree', 'eV') + + for param in ['MAX GRADIENT', 'RMS GRADIENT', 'MAX DISPLAC', 'RMS DISPLAC']: + if fnmatch(line, '{}*CONVERGED*'.format(param)): + if 'convergence' not in opt_cyc: + opt_cyc['convergence'] = {} + opt_cyc['convergence'][param.lower().replace(' ', '_')] = bool(strtobool(line.split()[-1])) + + if fnmatch(line, '*SCF DID NOT CONVERGE. RETRYING WITH A SMALLER OPT STEP*'): # TODO add failed optimisation steps with dummy energy and extra parameter? # for now discard this optimisation step failed_opt_step = True @@ -924,22 +938,22 @@ def parse_band_gaps(lines, initial_lineno): # TODO use regex: # re.compile(r"(DIRECT|INDIRECT) ENERGY BAND GAP:\s*([.\d]*)", # re.DOTALL), - if "BAND GAP" in line: - if fnmatch(line.strip(), "ALPHA BAND GAP:*eV"): + if 'BAND GAP' in line: + if fnmatch(line.strip(), 'ALPHA BAND GAP:*eV'): bgvalue = split_numbers(line)[0] - bgtype = "alpha" - elif fnmatch(line.strip(), "BETA BAND GAP:*eV"): + bgtype = 'alpha' + elif fnmatch(line.strip(), 'BETA BAND GAP:*eV'): bgvalue = split_numbers(line)[0] - bgtype = "beta" - elif fnmatch(line.strip(), "BAND GAP:*eV"): + bgtype = 'beta' + elif fnmatch(line.strip(), 'BAND GAP:*eV'): bgvalue = split_numbers(line)[0] - bgtype = "all" + bgtype = 'all' else: return ParsedSection(initial_lineno, band_gaps, - "found a band gap of unknown format at line {0}: {1}".format(curr_lineno, line)) + 'found a band gap of unknown format at line {0}: {1}'.format(curr_lineno, line)) if bgtype in band_gaps: return ParsedSection( - initial_lineno, band_gaps, "band gap data already contains {0} value before line {1}: {2}".format( + initial_lineno, band_gaps, 'band gap data already contains {0} value before line {1}: {2}'.format( bgtype, curr_lineno, line)) band_gaps[bgtype] = bgvalue @@ -963,12 +977,12 @@ def parse_mulliken_analysis(lines, mulliken_indices): for i, indx in enumerate(mulliken_indices): name = lines[indx - 1].strip().lower() - if not (name == "ALPHA+BETA ELECTRONS".lower() or name == "ALPHA-BETA ELECTRONS".lower()): + if not (name == 'ALPHA+BETA ELECTRONS'.lower() or name == 'ALPHA-BETA ELECTRONS'.lower()): return ParsedSection( - mulliken_indices[0], mulliken, "was expecting mulliken to be alpha+beta or alpha-beta on line:" - " {0}, got: {1}".format(indx - 1, lines[indx - 1])) + mulliken_indices[0], mulliken, 'was expecting mulliken to be alpha+beta or alpha-beta on line:' + ' {0}, got: {1}'.format(indx - 1, lines[indx - 1])) - mulliken[name.replace(" ", "_")] = {"ids": [], "symbols": [], "atomic_numbers": [], "charges": []} + mulliken[name.replace(' ', '_')] = {'ids': [], 'symbols': [], 'atomic_numbers': [], 'charges': []} if len(mulliken_indices) > i + 1: searchlines = lines[indx + 1:mulliken_indices[i + 1]] @@ -976,7 +990,7 @@ def parse_mulliken_analysis(lines, mulliken_indices): searchlines = lines[indx + 1:] charge_line = None for j, line in enumerate(searchlines): - if fnmatch(line.strip(), "*ATOM*Z*CHARGE*SHELL*POPULATION*"): + if fnmatch(line.strip(), '*ATOM*Z*CHARGE*SHELL*POPULATION*'): charge_line = j + 2 break if charge_line is None: @@ -986,10 +1000,10 @@ def parse_mulliken_analysis(lines, mulliken_indices): fields = searchlines[charge_line].strip().split() # shell population can wrap multiple lines, the one we want has the label in it if len(fields) != len(split_numbers(searchlines[charge_line])): - mulliken[name.replace(" ", "_")]["ids"].append(int(fields[0])) - mulliken[name.replace(" ", "_")]["symbols"].append(fields[1].lower().capitalize()) - mulliken[name.replace(" ", "_")]["atomic_numbers"].append(int(fields[2])) - mulliken[name.replace(" ", "_")]["charges"].append(float(fields[3])) + mulliken[name.replace(' ', '_')]['ids'].append(int(fields[0])) + mulliken[name.replace(' ', '_')]['symbols'].append(fields[1].lower().capitalize()) + mulliken[name.replace(' ', '_')]['atomic_numbers'].append(int(fields[2])) + mulliken[name.replace(' ', '_')]['charges'].append(float(fields[3])) charge_line += 1 @@ -1002,37 +1016,37 @@ def extract_final_info(parsed_data): (depending if it was an optimisation or not) """ data = {} - if "final_geometry" in parsed_data: - data = parsed_data["final_geometry"] - - if "primitive_cell" not in data: - if "optimisation" in parsed_data: - data["primitive_cell"] = copy.deepcopy(parsed_data["optimisation"][-1].get("primitive_cell", None)) - elif "initial_geometry" in parsed_data: - data["primitive_cell"] = copy.deepcopy(parsed_data["initial_geometry"].get("primitive_cell", None)) + if 'final_geometry' in parsed_data: + data = parsed_data['final_geometry'] + + if 'primitive_cell' not in data: + if 'optimisation' in parsed_data: + data['primitive_cell'] = copy.deepcopy(parsed_data['optimisation'][-1].get('primitive_cell', None)) + elif 'initial_geometry' in parsed_data: + data['primitive_cell'] = copy.deepcopy(parsed_data['initial_geometry'].get('primitive_cell', None)) else: - raise ValueError("no primitive_cell available in parsed data") - - if "energy" not in data: - if "optimisation" in parsed_data: - energies = parsed_data["optimisation"][-1].get("energy", {}) - if "total_corrected" not in energies: - raise ValueError("no optimised energy available in parsed data") - data["energy"] = energies["total_corrected"] - elif "initial_scf" in parsed_data: - energies = parsed_data["initial_scf"].get("final_energy", {}) - if "total_corrected" not in energies: - raise ValueError("no scf energy available in parsed data") - data["energy"] = energies["total_corrected"] + raise ValueError('no primitive_cell available in parsed data') + + if 'energy' not in data: + if 'optimisation' in parsed_data: + energies = parsed_data['optimisation'][-1].get('energy', {}) + if 'total_corrected' not in energies: + raise ValueError('no optimised energy available in parsed data') + data['energy'] = energies['total_corrected'] + elif 'initial_scf' in parsed_data: + energies = parsed_data['initial_scf'].get('final_energy', {}) + if 'total_corrected' not in energies: + raise ValueError('no scf energy available in parsed data') + data['energy'] = energies['total_corrected'] else: - raise ValueError("no energy available in parsed data") + raise ValueError('no energy available in parsed data') - if "primitive_symmops" not in data: - if "optimisation" in parsed_data: - raise ValueError("optimisation, but no primitive_symops specified in final_geometry") - if "initial_geometry" in parsed_data and "primitive_symmops" in parsed_data["initial_geometry"]: - data["primitive_symmops"] = copy.deepcopy(parsed_data["initial_geometry"]["primitive_symmops"]) + if 'primitive_symmops' not in data: + if 'optimisation' in parsed_data: + raise ValueError('optimisation, but no primitive_symops specified in final_geometry') + if 'initial_geometry' in parsed_data and 'primitive_symmops' in parsed_data['initial_geometry']: + data['primitive_symmops'] = copy.deepcopy(parsed_data['initial_geometry']['primitive_symmops']) else: - raise ValueError("no primitive_symops available in parsed data") + raise ValueError('no primitive_symops available in parsed data') return data diff --git a/aiida_crystal17/parsers/raw/doss_input.py b/aiida_crystal17/parsers/raw/doss_input.py index 4b96127..b31adc6 100644 --- a/aiida_crystal17/parsers/raw/doss_input.py +++ b/aiida_crystal17/parsers/raw/doss_input.py @@ -1,4 +1,18 @@ - +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2019 Chris Sewell +# +# This file is part of aiida-crystal17. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms and conditions +# of version 3 of the GNU Lesser General Public License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. from aiida_crystal17.validation import validate_against_schema @@ -6,43 +20,43 @@ def read_doss_contents(content): """ read the contents of a doss.d3 input file """ lines = content.splitlines() params = {} - assert lines[0].rstrip() == "NEWK" - params["shrink_is"] = int(lines[1].split()[0]) - params["shrink_isp"] = int(lines[1].split()[1]) - assert lines[2].rstrip() == "1 0" - assert lines[3].rstrip() == "DOSS" + assert lines[0].rstrip() == 'NEWK' + params['shrink_is'] = int(lines[1].split()[0]) + params['shrink_isp'] = int(lines[1].split()[1]) + assert lines[2].rstrip() == '1 0' + assert lines[3].rstrip() == 'DOSS' settings = lines[4].split() assert len(settings) >= 7 npro = int(settings[0]) - params["npoints"] = int(settings[1]) + params['npoints'] = int(settings[1]) band_first = int(settings[2]) band_last = int(settings[3]) iplo = int(settings[4]) # noqa: F841 - params["npoly"] = int(settings[5]) + params['npoly'] = int(settings[5]) npr = int(settings[6]) # noqa: F841 if band_first >= 0 and band_last >= 0: - params["band_minimum"] = band_first - params["band_maximum"] = band_last - params["band_units"] = "bands" + params['band_minimum'] = band_first + params['band_maximum'] = band_last + params['band_units'] = 'bands' proj_index = 5 else: - params["band_minimum"] = float(lines[5].split()[0]) - params["band_maximum"] = float(lines[5].split()[1]) - params["band_units"] = "hartree" + params['band_minimum'] = float(lines[5].split()[0]) + params['band_maximum'] = float(lines[5].split()[1]) + params['band_units'] = 'hartree' proj_index = 6 - params["atomic_projections"] = [] - params["orbital_projections"] = [] + params['atomic_projections'] = [] + params['orbital_projections'] = [] for line in lines[proj_index:proj_index + npro]: values = [int(i) for i in line.split()] if values[0] > 0: - params["orbital_projections"].append(values[1:]) + params['orbital_projections'].append(values[1:]) else: - params["atomic_projections"].append(values[1:]) - assert lines[proj_index + npro].rstrip() == "END" + params['atomic_projections'].append(values[1:]) + assert lines[proj_index + npro].rstrip() == 'END' - validate_against_schema(params, "doss_input.schema.json") + validate_against_schema(params, 'doss_input.schema.json') return params @@ -72,62 +86,63 @@ def create_doss_content(params): Unit of measurement: energy: hartree; DOSS: state/hartree/cell. """ - validate_against_schema(params, "doss_input.schema.json") + validate_against_schema(params, 'doss_input.schema.json') - lines = ["NEWK"] - if not params["shrink_isp"] >= 2 * params["shrink_is"]: - raise AssertionError( - "ISP<2*IS, low values of the ratio ISP/IS can lead to numerical instabilities.") - lines.append("{} {}".format(params["shrink_is"], params["shrink_isp"])) - lines.append("1 0") - lines.append("DOSS") + lines = ['NEWK'] + if not params['shrink_isp'] >= 2 * params['shrink_is']: + raise AssertionError('ISP<2*IS, low values of the ratio ISP/IS can lead to numerical instabilities.') + lines.append('{} {}'.format(params['shrink_is'], params['shrink_isp'])) + lines.append('1 0') + lines.append('DOSS') proj_atoms = [] proj_orbitals = [] - if params.get("atomic_projections", None) is not None: - proj_atoms = params["atomic_projections"] - if params.get("orbital_projections", None) is not None: - proj_orbitals = params["orbital_projections"] + if params.get('atomic_projections', None) is not None: + proj_atoms = params['atomic_projections'] + if params.get('orbital_projections', None) is not None: + proj_orbitals = params['orbital_projections'] npro = len(proj_atoms) + len(proj_orbitals) - units = params["band_units"] + units = params['band_units'] - if units == "bands": - inzb = int(params["band_minimum"]) - ifnb = int(params["band_maximum"]) + if units == 'bands': + inzb = int(params['band_minimum']) + ifnb = int(params['band_maximum']) assert inzb >= 0 and ifnb >= 0 erange = None - elif units == "hartree": + elif units == 'hartree': inzb = ifnb = -1 - bmin = params["band_minimum"] - bmax = params["band_maximum"] - erange = "{} {}" .format(bmin, bmax) - elif units == "eV": + bmin = params['band_minimum'] + bmax = params['band_maximum'] + erange = '{} {}'.format(bmin, bmax) + elif units == 'eV': inzb = ifnb = -1 - bmin = params["band_minimum"] / 27.21138602 - bmax = params["band_maximum"] / 27.21138602 - erange = "{0:.8f} {1:.8f}" .format(bmin, bmax) + bmin = params['band_minimum'] / 27.21138602 + bmax = params['band_maximum'] / 27.21138602 + erange = '{0:.8f} {1:.8f}'.format(bmin, bmax) else: - raise ValueError("band_units not recognised: {}".format(units)) + raise ValueError('band_units not recognised: {}'.format(units)) - lines.append("{npro} {npt} {inzb} {ifnb} {iplo} {npol} {npr}".format( + lines.append('{npro} {npt} {inzb} {ifnb} {iplo} {npol} {npr}'.format( npro=npro, - npt=params.get("npoints", 1000), - inzb=inzb, ifnb=ifnb, + npt=params.get('npoints', 1000), + inzb=inzb, + ifnb=ifnb, iplo=1, # output type (1=fort.25, 2=DOSS.DAT) - npol=params.get("npoly", 14), + npol=params.get('npoly', 14), npr=0, # number of printing options )) if erange is not None: lines.append(erange) + if len(proj_atoms) + len(proj_orbitals) > 15: + raise AssertionError('only 15 projections are allowed per calculation') + for atoms in proj_atoms: - lines.append("{} {}".format( - -1 * len(atoms), " ".join([str(a) for a in atoms]))) + lines.append('{} {}'.format(-1 * len(atoms), ' '.join([str(a) for a in atoms]))) for orbitals in proj_orbitals: - lines.append("{} {}".format( - len(orbitals), " ".join([str(o) for o in orbitals]))) + lines.append('{} {}'.format(len(orbitals), ' '.join([str(o) for o in orbitals]))) - lines.append("END") + lines.append('END') return lines diff --git a/aiida_crystal17/parsers/raw/inputd12_read.py b/aiida_crystal17/parsers/raw/inputd12_read.py index b0c2e84..08e0594 100644 --- a/aiida_crystal17/parsers/raw/inputd12_read.py +++ b/aiida_crystal17/parsers/raw/inputd12_read.py @@ -1,3 +1,18 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2019 Chris Sewell +# +# This file is part of aiida-crystal17. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms and conditions +# of version 3 of the GNU Lesser General Public License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. """ module for reading main.d12 (for immigration) """ @@ -31,7 +46,7 @@ def _split_line(line): val = float(val) out.append(val) if not out: - raise ValueError("blank line") + raise ValueError('blank line') if len(out) == 1: out = out[0] return out @@ -43,21 +58,20 @@ def _get_atom_prop(lines, ptype): natoms = int(line) line = _pop_line(lines) vals = [int(i) for i in line.split()] - if ptype in ["ghosts", "fragment"]: + if ptype in ['ghosts', 'fragment']: while len(vals) != natoms: line = _pop_line(lines) vals.extend([int(j) for j in line.split()]) return vals, line - elif ptype == "atomspin": + elif ptype == 'atomspin': while len(vals) / 2 != natoms: line = _pop_line(lines) vals.extend([int(j) for j in line.split()]) vals = np.reshape(vals, (natoms, 2)) - return (vals[vals[:, 1] == 1][:, 0].tolist(), - vals[vals[:, 1] == -1][:, 0].tolist()), line + return (vals[vals[:, 1] == 1][:, 0].tolist(), vals[vals[:, 1] == -1][:, 0].tolist()), line else: - raise ValueError("ptype: {}".format(ptype)) + raise ValueError('ptype: {}'.format(ptype)) def extract_data(input_string): @@ -76,25 +90,24 @@ def extract_data(input_string): """ lines = input_string.splitlines() - schema = load_schema("inputd12.schema.json") + schema = load_schema('inputd12.schema.json') output_dict = {} basis_sets = [] atom_props = {} - output_dict["title"] = _pop_line(lines) + output_dict['title'] = _pop_line(lines) _read_geom_block(lines, output_dict, schema) line = _pop_line(lines) - if line == "OPTGEOM": - line = _read_geomopt_block(atom_props, line, lines, output_dict, - schema) + if line == 'OPTGEOM': + line = _read_geomopt_block(atom_props, line, lines, output_dict, schema) - if line == "BASISSET": - raise NotImplementedError("key word basis set input (BASISSET)") - if not line == "END": - raise IOError("expecting end of geom block: {}".format(line)) + if line == 'BASISSET': + raise NotImplementedError('key word basis set input (BASISSET)') + if not line == 'END': + raise IOError('expecting end of geom block: {}'.format(line)) _read_basis_block(atom_props, basis_sets, lines, output_dict, schema) @@ -103,130 +116,102 @@ def extract_data(input_string): _read_hamiltonian_block(atom_props, lines, output_dict, schema) output_dict = unflatten_dict(output_dict) - validate_against_schema(output_dict, "inputd12.schema.json") + validate_against_schema(output_dict, 'inputd12.schema.json') return output_dict, basis_sets, atom_props def _read_hamiltonian_block(atom_props, lines, output_dict, schema): - sblock = ["properties", "scf", "properties"] + sblock = ['properties', 'scf', 'properties'] - while lines[0].strip() != "END": + while lines[0].strip() != 'END': line = _pop_line(lines) - if line == "DFT": + if line == 'DFT': _read_dft_block(lines, output_dict, schema) line = _pop_line(lines) - elif line == "SHRINK": + elif line == 'SHRINK': line = _pop_line(lines) try: kis, kisp = line.split() kis = int(kis) kisp = int(kisp) except ValueError: - raise IOError( - "expecting SHRINK in form 'is isp': {}".format(line)) - output_dict["scf.k_points"] = (kis, kisp) - elif line in get_keys( - schema, sblock + ["single", "enum"], raise_error=True): - output_dict["scf.single"] = line - elif line in get_keys( - schema, sblock + ["numerical", "properties"], - raise_error=True).keys(): + raise IOError("expecting SHRINK in form 'is isp': {}".format(line)) + output_dict['scf.k_points'] = (kis, kisp) + elif line in get_keys(schema, sblock + ['single', 'enum'], raise_error=True): + output_dict['scf.single'] = line + elif line in get_keys(schema, sblock + ['numerical', 'properties'], raise_error=True).keys(): key = line - if get_keys( - schema, - sblock + ["numerical", "properties", key, "type"], - raise_error=True) == "boolean": - output_dict["scf.numerical.{}".format(key)] = True + if get_keys(schema, sblock + ['numerical', 'properties', key, 'type'], raise_error=True) == 'boolean': + output_dict['scf.numerical.{}'.format(key)] = True else: line = _pop_line(lines) - output_dict["scf.numerical.{}".format(key)] = _split_line(line) - elif line in get_keys( - schema, sblock + ["post_scf", "items", "enum"], - raise_error=True): - _append_key(output_dict, "scf.post_scf", line) - elif line in get_keys( - schema, sblock + ["spinlock", "properties"], - raise_error=True).keys(): + output_dict['scf.numerical.{}'.format(key)] = _split_line(line) + elif line in get_keys(schema, sblock + ['post_scf', 'items', 'enum'], raise_error=True): + _append_key(output_dict, 'scf.post_scf', line) + elif line in get_keys(schema, sblock + ['spinlock', 'properties'], raise_error=True).keys(): key = line line = _pop_line(lines) - output_dict["scf.spinlock.{}".format(key)] = _split_line(line) - elif line in get_keys( - schema, - sblock + ["fock_mixing", "oneOf", 0, "enum"], - raise_error=True): - output_dict["scf.fock_mixing"] = line - elif line == "BROYDEN": + output_dict['scf.spinlock.{}'.format(key)] = _split_line(line) + elif line in get_keys(schema, sblock + ['fock_mixing', 'oneOf', 0, 'enum'], raise_error=True): + output_dict['scf.fock_mixing'] = line + elif line == 'BROYDEN': line = _pop_line(lines) - output_dict["scf.fock_mixing.BROYDEN"] = _split_line(line) - elif line == "ATOMSPIN": - val, line = _get_atom_prop(lines, "atomspin") - atom_props["spin_alpha"] = val[0] - atom_props["spin_beta"] = val[1] + output_dict['scf.fock_mixing.BROYDEN'] = _split_line(line) + elif line == 'ATOMSPIN': + val, line = _get_atom_prop(lines, 'atomspin') + atom_props['spin_alpha'] = val[0] + atom_props['spin_beta'] = val[1] else: - raise NotImplementedError("Hamiltonian Block: {}".format(line)) + raise NotImplementedError('Hamiltonian Block: {}'.format(line)) def _read_dft_block(lines, output_dict, schema): correlat = None exchange = None - while lines[0].strip() != "END": + while lines[0].strip() != 'END': line = _pop_line(lines) - if line == "SPIN": - output_dict["scf.dft.SPIN"] = True + if line == 'SPIN': + output_dict['scf.dft.SPIN'] = True elif line in get_keys( - schema, [ - "properties", "scf", "properties", "dft", "properties", - "xc", "oneOf", 1, "enum" - ], + schema, ['properties', 'scf', 'properties', 'dft', 'properties', 'xc', 'oneOf', 1, 'enum'], raise_error=True): - output_dict["scf.dft.xc"] = line - elif line == "CORRELAT": + output_dict['scf.dft.xc'] = line + elif line == 'CORRELAT': line = _pop_line(lines) correlat = line - elif line == "EXCHANGE": + elif line == 'EXCHANGE': line = _pop_line(lines) exchange = line - elif line == "LSRSH-PBE": + elif line == 'LSRSH-PBE': line = _pop_line(lines) - output_dict["scf.dft.xc.LSRSH-PBE"] = _split_line(line) + output_dict['scf.dft.xc.LSRSH-PBE'] = _split_line(line) elif line in get_keys( - schema, [ - "properties", "scf", "properties", "dft", "properties", - "grid", "enum" - ], - raise_error=True): - output_dict["scf.dft.grid"] = line + schema, ['properties', 'scf', 'properties', 'dft', 'properties', 'grid', 'enum'], raise_error=True): + output_dict['scf.dft.grid'] = line elif line in get_keys( - schema, [ - "properties", "scf", "properties", "dft", "properties", - "grid_weights", "enum" - ], + schema, ['properties', 'scf', 'properties', 'dft', 'properties', 'grid_weights', 'enum'], raise_error=True): - output_dict["scf.dft.grid_weights"] = line + output_dict['scf.dft.grid_weights'] = line elif line in get_keys( - schema, [ - "properties", "scf", "properties", "dft", "properties", - "numerical", "properties" - ], + schema, ['properties', 'scf', 'properties', 'dft', 'properties', 'numerical', 'properties'], raise_error=True).keys(): key = line line = _pop_line(lines) - output_dict["scf.dft.numerical.{}".format(key)] = _split_line(line) + output_dict['scf.dft.numerical.{}'.format(key)] = _split_line(line) else: - raise NotImplementedError("DFT Block: {}".format(line)) + raise NotImplementedError('DFT Block: {}'.format(line)) if (correlat, exchange) != (None, None): if None in (correlat, exchange): - raise IOError("found only one of CORRELAT EXCHANGE: {} {}".format( - correlat, exchange)) - output_dict["scf.dft.xc"] = (exchange, correlat) + raise IOError('found only one of CORRELAT EXCHANGE: {} {}'.format(correlat, exchange)) + output_dict['scf.dft.xc'] = (exchange, correlat) def _read_basis_block(atom_props, basis_sets, lines, output_dict, schema): basis_lines = [] - while not lines[0].startswith("99 "): + while not lines[0].startswith('99 '): line = _pop_line(lines) basis_lines.append(line) try: @@ -242,108 +227,72 @@ def _read_basis_block(atom_props, basis_sets, lines, output_dict, schema): btype, stype, nfuncs = [int(i) for i in [btype, stype, nfuncs]] # charge, scale = [float(i) for i in [charge, scale]] except ValueError: - raise IOError( - "expected 'btype, stype, nfuncs, charge, scale': {}". - format(line)) + raise IOError("expected 'btype, stype, nfuncs, charge, scale': {}".format(line)) if btype == 0: for _ in range(nfuncs): line = _pop_line(lines) basis_lines.append(line) - basis_sets.append("\n".join(basis_lines)) + basis_sets.append('\n'.join(basis_lines)) basis_lines = [] line = _pop_line(lines) - while lines[0].strip() != "END": + while lines[0].strip() != 'END': line = _pop_line(lines) - if line in get_keys( - schema, ["properties", "basis_set", "properties"], - raise_error=True).keys(): - output_dict["basis_set.{}".format(line)] = True - elif line == "GHOSTS": - val, line = _get_atom_prop(lines, "ghosts") - atom_props["ghosts"] = val + if line in get_keys(schema, ['properties', 'basis_set', 'properties'], raise_error=True).keys(): + output_dict['basis_set.{}'.format(line)] = True + elif line == 'GHOSTS': + val, line = _get_atom_prop(lines, 'ghosts') + atom_props['ghosts'] = val else: - raise NotImplementedError("Basis Set Block: {}".format(line)) + raise NotImplementedError('Basis Set Block: {}'.format(line)) def _read_geomopt_block(atom_props, line, lines, output_dict, schema): - if lines[0].strip().startswith("END"): - output_dict["geometry.optimise"] = True - while not lines[0].strip().startswith("END"): + if lines[0].strip().startswith('END'): + output_dict['geometry.optimise'] = True + while not lines[0].strip().startswith('END'): line = _pop_line(lines) - if line in ["EXTPRESS"]: - raise NotImplementedError("GeomOpt Block: {}".format(line)) - elif line in get_keys( - schema, [ - "properties", "geometry", "properties", "optimise", - "properties", "type", "enum" - ], - raise_error=True): - output_dict["geometry.optimise.type"] = line - elif line in get_keys( - schema, [ - "properties", "geometry", "properties", "optimise", - "properties", "hessian", "enum" - ], - raise_error=True): - output_dict["geometry.optimise.hessian"] = line - elif line in get_keys( - schema, [ - "properties", "geometry", "properties", "optimise", - "properties", "gradient", "enum" - ], - raise_error=True): - output_dict["geometry.optimise.gradient"] = line - elif line in get_keys( - schema, [ - "properties", "geometry", "properties", "optimise", - "properties", "info_print", "items", "enum" - ], - raise_error=True): - _append_key(output_dict, "geometry.optimise.info_print", line) - elif line in get_keys( - schema, [ - "properties", "geometry", "properties", "optimise", - "properties", "convergence", "properties" - ], - raise_error=True).keys(): - key = "geometry.optimise.convergence.{}".format(line) + opt_keys = ['properties', 'geometry', 'properties', 'optimise', 'properties'] + + if line in ['EXTPRESS']: + raise NotImplementedError('GeomOpt Block: {}'.format(line)) + elif line in get_keys(schema, opt_keys + ['type', 'enum'], raise_error=True): + output_dict['geometry.optimise.type'] = line + elif line in get_keys(schema, opt_keys + ['hessian', 'enum'], raise_error=True): + output_dict['geometry.optimise.hessian'] = line + elif line in get_keys(schema, opt_keys + ['gradient', 'enum'], raise_error=True): + output_dict['geometry.optimise.gradient'] = line + elif line in get_keys(schema, opt_keys + ['info_print', 'items', 'enum'], raise_error=True): + _append_key(output_dict, 'geometry.optimise.info_print', line) + elif line in get_keys(schema, opt_keys + ['convergence', 'properties'], raise_error=True).keys(): + key = 'geometry.optimise.convergence.{}'.format(line) line = _pop_line(lines) try: output_dict[key] = int(line) except ValueError: output_dict[key] = float(line) - elif line == "FRAGMENT": - val, line = _get_atom_prop(lines, "fragment") - atom_props["fragment"] = val + elif line == 'FRAGMENT': + val, line = _get_atom_prop(lines, 'fragment') + atom_props['fragment'] = val else: - raise NotImplementedError("OPTGEOM block: {}".format(line)) + raise NotImplementedError('OPTGEOM block: {}'.format(line)) line = _pop_line(lines, 2) return line def _read_geom_block(lines, output_dict, schema): - while lines[0].strip() not in ["OPTGEOM", "END"]: + while lines[0].strip() not in ['OPTGEOM', 'END']: line = _pop_line(lines) if line in [ - "FIELD", "FIELDCON", "CPHF", "ELASTCON", "EOS", "FREQCALC", - "ANHARM", "CONFCNT", "CONFRAND", "RUNCONFS", "MOLEBSSE", - "ATOMBSSE" + 'FIELD', 'FIELDCON', 'CPHF', 'ELASTCON', 'EOS', 'FREQCALC', 'ANHARM', 'CONFCNT', 'CONFRAND', 'RUNCONFS', + 'MOLEBSSE', 'ATOMBSSE' ]: - raise NotImplementedError("Geometry Block: {}".format(line)) + raise NotImplementedError('Geometry Block: {}'.format(line)) elif line in get_keys( - schema, [ - "properties", "geometry", "properties", "info_print", - "items", "enum" - ], - raise_error=True): - _append_key(output_dict, "geometry.info_print", line) + schema, ['properties', 'geometry', 'properties', 'info_print', 'items', 'enum'], raise_error=True): + _append_key(output_dict, 'geometry.info_print', line) elif line in get_keys( - schema, [ - "properties", "geometry", "properties", "info_external", - "items", "enum" - ], - raise_error=True): - _append_key(output_dict, "geometry.info_external", line) + schema, ['properties', 'geometry', 'properties', 'info_external', 'items', 'enum'], raise_error=True): + _append_key(output_dict, 'geometry.info_external', line) diff --git a/aiida_crystal17/parsers/raw/inputd12_write.py b/aiida_crystal17/parsers/raw/inputd12_write.py index 20afe3a..811c396 100644 --- a/aiida_crystal17/parsers/raw/inputd12_write.py +++ b/aiida_crystal17/parsers/raw/inputd12_write.py @@ -1,3 +1,18 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2019 Chris Sewell +# +# This file is part of aiida-crystal17. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms and conditions +# of version 3 of the GNU Lesser General Public License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. """ module to write CRYSTAL17 .d12 files """ @@ -24,22 +39,21 @@ def format_value(dct, keys): """return the value + a new line, or empty string if keys not found""" value = get_keys(dct, keys, None) if value is None: - return "" + return '' if isinstance(value, dict): - outstr = "" + outstr = '' for keyword in sorted(value.keys()): args = value[keyword] if isinstance(args, bool): if args: - outstr += "{}\n".format(keyword) + outstr += '{}\n'.format(keyword) elif isinstance(args, (list, tuple)): - outstr += "{0}\n{1}\n".format(keyword, - " ".join([str(a) for a in args])) + outstr += '{0}\n{1}\n'.format(keyword, ' '.join([str(a) for a in args])) else: - outstr += "{0}\n{1}\n".format(keyword, args) + outstr += '{0}\n{1}\n'.format(keyword, args) return outstr - return "{}\n".format(value) + return '{}\n'.format(value) def write_input(indict, basis_sets, atom_props=None): @@ -61,34 +75,25 @@ def write_input(indict, basis_sets, atom_props=None): """ # validation - validate_against_schema(indict, "inputd12.schema.json") + validate_against_schema(indict, 'inputd12.schema.json') if not basis_sets: - raise ValueError("there must be at least one basis set") - elif not all([isinstance(b, six.string_types) or hasattr(b, "content") - for b in basis_sets]): - raise ValueError("basis_sets must be either all strings" - "or all objects with a `content` property") + raise ValueError('there must be at least one basis set') + elif not all([isinstance(b, six.string_types) or hasattr(b, 'content') for b in basis_sets]): + raise ValueError('basis_sets must be either all strings' 'or all objects with a `content` property') if atom_props is None: atom_props = {} - if not set(atom_props.keys()).issubset( - ["spin_alpha", "spin_beta", "unfixed", "ghosts"]): - raise ValueError( - "atom_props should only contain: " - "'spin_alpha', 'spin_beta', 'unfixed', 'ghosts'" - ) + if not set(atom_props.keys()).issubset(['spin_alpha', 'spin_beta', 'unfixed', 'ghosts']): + raise ValueError('atom_props should only contain: ' "'spin_alpha', 'spin_beta', 'unfixed', 'ghosts'") # validate that a index isn't in both spin_alpha and spin_beta - allspin = atom_props.get("spin_alpha", []) + atom_props.get( - "spin_beta", []) + allspin = atom_props.get('spin_alpha', []) + atom_props.get('spin_beta', []) if len(set(allspin)) != len(allspin): - raise ValueError( - "a kind cannot be in both spin_alpha and spin_beta: {}".format( - allspin)) + raise ValueError('a kind cannot be in both spin_alpha and spin_beta: {}'.format(allspin)) - outstr = "" + outstr = '' # Title - title = get_keys(indict, ["title"], "CRYSTAL run") - outstr += "{}\n".format(" ".join(title.splitlines())) # must be one line + title = get_keys(indict, ['title'], 'CRYSTAL run') + outstr += '{}\n'.format(' '.join(title.splitlines())) # must be one line outstr = _geometry_block(outstr, indict, atom_props) @@ -101,114 +106,112 @@ def write_input(indict, basis_sets, atom_props=None): def _hamiltonian_block(outstr, indict, atom_props): # Hamiltonian Optional Keywords - outstr += format_value(indict, ["scf", "single"]) + outstr += format_value(indict, ['scf', 'single']) # DFT Optional Block - if get_keys(indict, ["scf", "dft"], False): + if get_keys(indict, ['scf', 'dft'], False): - outstr += "DFT\n" + outstr += 'DFT\n' - xc = get_keys(indict, ["scf", "dft", "xc"], raise_error=True) + xc = get_keys(indict, ['scf', 'dft', 'xc'], raise_error=True) if isinstance(xc, (tuple, list)): if len(xc) == 2: - outstr += "CORRELAT\n" - outstr += "{}\n".format(xc[0]) - outstr += "EXCHANGE\n" - outstr += "{}\n".format(xc[1]) + outstr += 'CORRELAT\n' + outstr += '{}\n'.format(xc[0]) + outstr += 'EXCHANGE\n' + outstr += '{}\n'.format(xc[1]) else: - outstr += format_value(indict, ["scf", "dft", "xc"]) + outstr += format_value(indict, ['scf', 'dft', 'xc']) - if get_keys(indict, ["scf", "dft", "SPIN"], False): - outstr += "SPIN\n" + if get_keys(indict, ['scf', 'dft', 'SPIN'], False): + outstr += 'SPIN\n' - outstr += format_value(indict, ["scf", "dft", "grid"]) - outstr += format_value(indict, ["scf", "dft", "grid_weights"]) - outstr += format_value(indict, ["scf", "dft", "numerical"]) + outstr += format_value(indict, ['scf', 'dft', 'grid']) + outstr += format_value(indict, ['scf', 'dft', 'grid_weights']) + outstr += format_value(indict, ['scf', 'dft', 'numerical']) - outstr += "END\n" + outstr += 'END\n' # # K-POINTS (SHRINK\nPMN Gilat) - k_is, k_isp = get_keys(indict, ["scf", "k_points"], raise_error=True) - outstr += "SHRINK\n" + k_is, k_isp = get_keys(indict, ['scf', 'k_points'], raise_error=True) + outstr += 'SHRINK\n' if isinstance(k_is, int): - outstr += "{0} {1}\n".format(k_is, k_isp) + outstr += '{0} {1}\n'.format(k_is, k_isp) else: - outstr += "0 {0}\n".format(k_isp) - outstr += "{0} {1} {2}\n".format(k_is[0], k_is[1], k_is[2]) + outstr += '0 {0}\n'.format(k_isp) + outstr += '{0} {1} {2}\n'.format(k_is[0], k_is[1], k_is[2]) # RESTART - if get_keys(indict, ["scf", "GUESSP"], False): - outstr += "GUESSP\n" + if get_keys(indict, ['scf', 'GUESSP'], False): + outstr += 'GUESSP\n' # ATOMSPIN spins = [] - for anum in atom_props.get("spin_alpha", []): + for anum in atom_props.get('spin_alpha', []): spins.append((anum, 1)) - for anum in atom_props.get("spin_beta", []): + for anum in atom_props.get('spin_beta', []): spins.append((anum, -1)) if spins: - outstr += "ATOMSPIN\n" - outstr += "{}\n".format(len(spins)) + outstr += 'ATOMSPIN\n' + outstr += '{}\n'.format(len(spins)) for anum, spin in sorted(spins): - outstr += "{0} {1}\n".format(anum, spin) + outstr += '{0} {1}\n'.format(anum, spin) # SCF/Other Optional Keywords - outstr += format_value(indict, ["scf", "numerical"]) - outstr += format_value(indict, ["scf", "fock_mixing"]) - outstr += format_value(indict, ["scf", "spinlock"]) - for keyword in sorted(get_keys(indict, ["scf", "post_scf"], [])): - outstr += "{}\n".format(keyword) + outstr += format_value(indict, ['scf', 'numerical']) + outstr += format_value(indict, ['scf', 'fock_mixing']) + outstr += format_value(indict, ['scf', 'spinlock']) + for keyword in sorted(get_keys(indict, ['scf', 'post_scf'], [])): + outstr += '{}\n'.format(keyword) # Hamiltonian and SCF End - outstr += "END\n" + outstr += 'END\n' return outstr def _geometry_block(outstr, indict, atom_props): # Geometry - outstr += "EXTERNAL\n" # we assume external geometry + outstr += 'EXTERNAL\n' # we assume external geometry # Geometry Optional Keywords (including optimisation) - for keyword in get_keys(indict, ["geometry", "info_print"], []): - outstr += "{}\n".format(keyword) - for keyword in get_keys(indict, ["geometry", "info_external"], []): - outstr += "{}\n".format(keyword) - if indict.get("geometry", {}).get("optimise", False): - outstr += "OPTGEOM\n" - outstr += format_value(indict, ["geometry", "optimise", "type"]) - unfixed = atom_props.get("unfixed", []) + for keyword in get_keys(indict, ['geometry', 'info_print'], []): + outstr += '{}\n'.format(keyword) + for keyword in get_keys(indict, ['geometry', 'info_external'], []): + outstr += '{}\n'.format(keyword) + if indict.get('geometry', {}).get('optimise', False): + outstr += 'OPTGEOM\n' + outstr += format_value(indict, ['geometry', 'optimise', 'type']) + unfixed = atom_props.get('unfixed', []) if unfixed: - outstr += "FRAGMENT\n" - outstr += "{}\n".format(len(unfixed)) - outstr += " ".join([str(a) for a in sorted(unfixed)]) + "\n" - outstr += format_value(indict, ["geometry", "optimise", "hessian"]) - outstr += format_value(indict, ["geometry", "optimise", "gradient"]) - for keyword in sorted(get_keys( - indict, ["geometry", "optimise", "info_print"], [])): - outstr += "{}\n".format(keyword) - outstr += format_value(indict, ["geometry", "optimise", "convergence"]) - outstr += "ENDOPT\n" + outstr += 'FRAGMENT\n' + outstr += '{}\n'.format(len(unfixed)) + outstr += ' '.join([str(a) for a in sorted(unfixed)]) + '\n' + outstr += format_value(indict, ['geometry', 'optimise', 'hessian']) + outstr += format_value(indict, ['geometry', 'optimise', 'gradient']) + for keyword in sorted(get_keys(indict, ['geometry', 'optimise', 'info_print'], [])): + outstr += '{}\n'.format(keyword) + outstr += format_value(indict, ['geometry', 'optimise', 'convergence']) + outstr += 'ENDOPT\n' # Geometry End - outstr += "END\n" + outstr += 'END\n' return outstr def _basis_set_block(outstr, indict, basis_sets, atom_props): # Basis Sets if isinstance(basis_sets[0], six.string_types): - outstr += "\n".join([basis_set.strip() for basis_set in basis_sets]) + outstr += '\n'.join([basis_set.strip() for basis_set in basis_sets]) else: - outstr += "\n".join( - [basis_set.content.strip() for basis_set in basis_sets]) - outstr += "\n99 0\n" + outstr += '\n'.join([basis_set.content.strip() for basis_set in basis_sets]) + outstr += '\n99 0\n' # GHOSTS - ghosts = atom_props.get("ghosts", []) + ghosts = atom_props.get('ghosts', []) if ghosts: - outstr += "GHOSTS\n" - outstr += "{}\n".format(len(ghosts)) - outstr += " ".join([str(a) for a in sorted(ghosts)]) + "\n" + outstr += 'GHOSTS\n' + outstr += '{}\n'.format(len(ghosts)) + outstr += ' '.join([str(a) for a in sorted(ghosts)]) + '\n' # Basis Sets Optional Keywords - outstr += format_value(indict, ["basis_set"]) + outstr += format_value(indict, ['basis_set']) # Basis Sets End - outstr += "END\n" + outstr += 'END\n' return outstr @@ -222,42 +225,30 @@ def create_atom_properties(structure, kinds_data=None): """ if kinds_data is None: - return { - "spin_alpha": [], - "spin_beta": [], - "ghosts": [] - } + return {'spin_alpha': [], 'spin_beta': [], 'ghosts': []} if set(kinds_data.data.kind_names) != set(structure.get_kind_names()): - raise AssertionError( - "kind names are different for structure data and kind data: " - "{0} != {1}".format(set(structure.get_kind_names()), - set(kinds_data.data.kind_names))) - - atom_props = { - "spin_alpha": [], - "spin_beta": [], - "fixed": [], - "unfixed": [], - "ghosts": [] - } + raise AssertionError('kind names are different for structure data and kind data: ' + '{0} != {1}'.format(set(structure.get_kind_names()), set(kinds_data.data.kind_names))) + + atom_props = {'spin_alpha': [], 'spin_beta': [], 'fixed': [], 'unfixed': [], 'ghosts': []} kind_dict = kinds_data.kind_dict for i, kind_name in enumerate(structure.get_site_kindnames()): - if kind_dict[kind_name].get("spin_alpha", False): - atom_props["spin_alpha"].append(i + 1) - if kind_dict[kind_name].get("spin_beta", False): - atom_props["spin_beta"].append(i + 1) - if kind_dict[kind_name].get("ghost", False): - atom_props["ghost"].append(i + 1) - if kind_dict[kind_name].get("fixed", False): - atom_props["fixed"].append(i + 1) - if not kind_dict[kind_name].get("fixed", False): - atom_props["unfixed"].append(i + 1) + if kind_dict[kind_name].get('spin_alpha', False): + atom_props['spin_alpha'].append(i + 1) + if kind_dict[kind_name].get('spin_beta', False): + atom_props['spin_beta'].append(i + 1) + if kind_dict[kind_name].get('ghost', False): + atom_props['ghost'].append(i + 1) + if kind_dict[kind_name].get('fixed', False): + atom_props['fixed'].append(i + 1) + if not kind_dict[kind_name].get('fixed', False): + atom_props['unfixed'].append(i + 1) # we only need unfixed if there are fixed - if not atom_props.pop("fixed"): - atom_props.pop("unfixed") + if not atom_props.pop('fixed'): + atom_props.pop('unfixed') return atom_props diff --git a/aiida_crystal17/parsers/raw/main_out.py b/aiida_crystal17/parsers/raw/main_out.py index 2e2932a..9d7fed3 100644 --- a/aiida_crystal17/parsers/raw/main_out.py +++ b/aiida_crystal17/parsers/raw/main_out.py @@ -1,3 +1,18 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2019 Chris Sewell +# +# This file is part of aiida-crystal17. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms and conditions +# of version 3 of the GNU Lesser General Public License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. """ parse the main output file and create the required output nodes """ @@ -16,36 +31,32 @@ class OutputNodes(Mapping): """ def __init__(self): - self._dict = { - "results": None, - "structure": None, - "symmetry": None - } + self._dict = {'results': None, 'structure': None, 'symmetry': None} def _get_results(self): - return self._dict["results"] + return self._dict['results'] def _set_results(self, value): assert isinstance(value, DataFactory('dict')) - self._dict["results"] = value + self._dict['results'] = value results = property(_get_results, _set_results) def _get_structure(self): - return self._dict["structure"] + return self._dict['structure'] def _set_structure(self, value): assert isinstance(value, DataFactory('structure')) - self._dict["structure"] = value + self._dict['structure'] = value structure = property(_get_structure, _set_structure) def _get_symmetry(self): - return self._dict["symmetry"] + return self._dict['symmetry'] def _set_symmetry(self, value): assert isinstance(value, DataFactory('crystal17.symmetry')) - self._dict["symmetry"] = value + self._dict['symmetry'] = value symmetry = property(_get_symmetry, _set_symmetry) @@ -65,15 +76,14 @@ def __len__(self): class ParserResult(object): + def __init__(self): self.exit_code = None self.nodes = OutputNodes() # pylint: disable=too-many-locals,too-many-statements -def parse_main_out(fileobj, parser_class, - init_struct=None, - init_settings=None): +def parse_main_out(fileobj, parser_class, init_struct=None, init_settings=None): """ parse the main output file and create the required output nodes :param fileobj: handle to main output file @@ -88,13 +98,13 @@ def parse_main_out(fileobj, parser_class, exit_codes = CryMainCalculation.exit_codes results_data = { - "parser_version": str(__version__), - "parser_class": str(parser_class), - "parser_errors": [], - "parser_warnings": [], - "parser_exceptions": [], - "errors": [], - "warnings": [] + 'parser_version': str(__version__), + 'parser_class': str(parser_class), + 'parser_errors': [], + 'parser_warnings': [], + 'parser_exceptions': [], + 'errors': [], + 'warnings': [] } try: @@ -103,9 +113,8 @@ def parse_main_out(fileobj, parser_class, # should never happen traceback.print_exc() parser_result.exit_code = exit_codes.ERROR_PARSING_STDOUT - results_data["parser_exceptions"].append( - "Error parsing CRYSTAL 17 main output: {0}".format(err)) - parser_result.nodes.results = DataFactory("dict")(dict=results_data) + results_data['parser_exceptions'].append('Error parsing CRYSTAL 17 main output: {0}'.format(err)) + parser_result.nodes.results = DataFactory('dict')(dict=results_data) return parser_result # TODO could also read .gui file for definitive final (primitive) geometry, @@ -124,36 +133,35 @@ def parse_main_out(fileobj, parser_class, traceback.print_exc() final_info = {} - results_data.pop("initial_geometry", None) - initial_scf = results_data.pop("initial_scf", None) - optimisation = results_data.pop("optimisation", None) - results_data.pop("final_geometry", None) - mulliken_analysis = results_data.pop("mulliken", None) - stdout_exit_code = results_data.pop("exit_code") + results_data.pop('initial_geometry', None) + initial_scf = results_data.pop('initial_scf', None) + optimisation = results_data.pop('optimisation', None) + results_data.pop('final_geometry', None) + mulliken_analysis = results_data.pop('mulliken', None) + stdout_exit_code = results_data.pop('exit_code') if initial_scf is not None: - results_data["scf_iterations"] = len(initial_scf.get("cycles", [])) + results_data['scf_iterations'] = len(initial_scf.get('cycles', [])) if optimisation is not None: # the first optimisation step is the initial scf - results_data["opt_iterations"] = len(optimisation) + 1 + results_data['opt_iterations'] = len(optimisation) + 1 # TODO read separate energy contributions - results_data["energy"] = final_info.get("energy", None) + results_data['energy'] = final_info.get('energy', None) # we include this for back compatibility - results_data["energy_units"] = results_data.get("units", {}).get("energy", "eV") + results_data['energy_units'] = results_data.get('units', {}).get('energy', 'eV') # TODO read from fort.34 (initial and final) file and check consistency of final cell/symmops structure = _extract_structure(final_info, init_struct, results_data, parser_result, exit_codes) if structure is not None and (optimisation is not None or not init_struct): parser_result.nodes.structure = structure - _extract_symmetry( - final_info, init_settings, results_data, parser_result, exit_codes) + _extract_symmetry(final_info, init_settings, results_data, parser_result, exit_codes) if mulliken_analysis is not None: _extract_mulliken(mulliken_analysis, results_data) - parser_result.nodes.results = DataFactory("dict")(dict=results_data) + parser_result.nodes.results = DataFactory('dict')(dict=results_data) if stdout_exit_code: parser_result.exit_code = exit_codes[stdout_exit_code] @@ -161,20 +169,17 @@ def parse_main_out(fileobj, parser_class, return parser_result -def _extract_symmetry(final_data, init_settings, param_data, - parser_result, exit_codes): +def _extract_symmetry(final_data, init_settings, param_data, parser_result, exit_codes): """extract symmetry operations""" - if "primitive_symmops" not in final_data: - param_data["parser_errors"].append( - "primitive symmops were not found in the output file") + if 'primitive_symmops' not in final_data: + param_data['parser_errors'].append('primitive symmops were not found in the output file') parser_result.exit_code = exit_codes.ERROR_SYMMETRY_NOT_FOUND return if init_settings: - if init_settings.num_symops != len(final_data["primitive_symmops"]): - param_data["parser_errors"].append( - "number of symops different") + if init_settings.num_symops != len(final_data['primitive_symmops']): + param_data['parser_errors'].append('number of symops different') parser_result.exit_code = exit_codes.ERROR_SYMMETRY_INCONSISTENCY # differences = init_settings.compare_operations( # final_data["primitive_symmops"]) @@ -186,61 +191,50 @@ def _extract_symmetry(final_data, init_settings, param_data, else: from aiida.plugins import DataFactory symmetry_data_cls = DataFactory('crystal17.symmetry') - data_dict = { - "operations": final_data["primitive_symmops"], - "basis": "fractional", - "hall_number": None - } + data_dict = {'operations': final_data['primitive_symmops'], 'basis': 'fractional', 'hall_number': None} parser_result.nodes.symmetry = symmetry_data_cls(data=data_dict) def _extract_structure(final_data, init_struct, results_data, parser_result, exit_codes): """create a StructureData object of the final configuration""" - if "primitive_cell" not in final_data: - results_data["parser_errors"].append( - "final primitive cell was not found in the output file") + if 'primitive_cell' not in final_data: + results_data['parser_errors'].append('final primitive cell was not found in the output file') parser_result.exit_code = exit_codes.ERROR_PARSING_STDOUT return None - cell_data = final_data["primitive_cell"] + cell_data = final_data['primitive_cell'] - results_data["number_of_atoms"] = len(cell_data["atomic_numbers"]) - results_data["number_of_assymetric"] = sum(cell_data["assymetric"]) + results_data['number_of_atoms'] = len(cell_data['atomic_numbers']) + results_data['number_of_assymetric'] = sum(cell_data['assymetric']) cell_vectors = [] - for n in "a b c".split(): - cell_vectors.append(cell_data["cell_vectors"][n]) + for n in 'a b c'.split(): + cell_vectors.append(cell_data['cell_vectors'][n]) # we want to reuse the kinds from the input structure, if available if not init_struct: - results_data["parser_warnings"].append( - "no initial structure available, creating new kinds for atoms") + results_data['parser_warnings'].append('no initial structure available, creating new kinds for atoms') kinds = None else: - kinds = [ - init_struct.get_kind(n) for n in init_struct.get_site_kindnames() - ] + kinds = [init_struct.get_kind(n) for n in init_struct.get_site_kindnames()] structure = convert_structure({ - "lattice": cell_vectors, - "pbc": cell_data["pbc"], - "symbols": cell_data["symbols"], - "ccoords": cell_data["ccoords"], - "kinds": kinds - }, "aiida") - results_data["volume"] = structure.get_cell_volume() + 'lattice': cell_vectors, + 'pbc': cell_data['pbc'], + 'symbols': cell_data['symbols'], + 'ccoords': cell_data['ccoords'], + 'kinds': kinds + }, 'aiida') + results_data['volume'] = structure.get_cell_volume() return structure def _extract_mulliken(data, param_data): """extract mulliken electronic charge partition data""" - if "alpha+beta_electrons" in data: - electrons = data["alpha+beta_electrons"]["charges"] - anum = data["alpha+beta_electrons"]["atomic_numbers"] - param_data["mulliken_electrons"] = electrons - param_data["mulliken_charges"] = [ - a - e for a, e in zip(anum, electrons) - ] - if "alpha-beta_electrons" in data: - param_data["mulliken_spins"] = data["alpha-beta_electrons"]["charges"] - param_data["mulliken_spin_total"] = sum( - param_data["mulliken_spins"]) + if 'alpha+beta_electrons' in data: + electrons = data['alpha+beta_electrons']['charges'] + anum = data['alpha+beta_electrons']['atomic_numbers'] + param_data['mulliken_electrons'] = electrons + param_data['mulliken_charges'] = [a - e for a, e in zip(anum, electrons)] + if 'alpha-beta_electrons' in data: + param_data['mulliken_spins'] = data['alpha-beta_electrons']['charges'] + param_data['mulliken_spin_total'] = sum(param_data['mulliken_spins']) diff --git a/aiida_crystal17/parsers/raw/newk_output.py b/aiida_crystal17/parsers/raw/newk_output.py index 76cef10..a6aadae 100644 --- a/aiida_crystal17/parsers/raw/newk_output.py +++ b/aiida_crystal17/parsers/raw/newk_output.py @@ -1,20 +1,35 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2019 Chris Sewell +# +# This file is part of aiida-crystal17. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms and conditions +# of version 3 of the GNU Lesser General Public License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. from aiida_crystal17 import __version__ def read_newk_content(fileobj, parser_class): results_data = { - "parser_version": str(__version__), - "parser_class": str(parser_class), - "parser_errors": [], - "parser_warnings": [], - "errors": [], - "warnings": [] + 'parser_version': str(__version__), + 'parser_class': str(parser_class), + 'parser_errors': [], + 'parser_warnings': [], + 'errors': [], + 'warnings': [] } fermi = None for line in fileobj: - if "FERMI ENERGY" in line: + if 'FERMI ENERGY' in line: # if fermi is not None: # results_data["parser_errors"].append( # "found multiple instances of 'FERMI ENERGY'") @@ -22,22 +37,21 @@ def read_newk_content(fileobj, parser_class): elements = line.split() indx = None for i, element in enumerate(elements): - if element == "ENERGY": + if element == 'ENERGY': indx = i + 1 break try: fermi = float(elements[indx]) except Exception: - results_data["parser_errors"].append( - "Could not extract fermi energy from line: {}".format(line)) + results_data['parser_errors'].append('Could not extract fermi energy from line: {}'.format(line)) break if fermi is None: - results_data["parser_errors"].append("could not find 'FERMI ENERGY'") + results_data['parser_errors'].append("could not find 'FERMI ENERGY'") else: - results_data["fermi_energy"] = fermi * 27.21138602 - results_data["energy_units"] = "eV" + results_data['fermi_energy'] = fermi * 27.21138602 + results_data['energy_units'] = 'eV' # TODO read more data diff --git a/aiida_crystal17/parsers/raw/parse_bases.py b/aiida_crystal17/parsers/raw/parse_bases.py new file mode 100644 index 0000000..e13db22 --- /dev/null +++ b/aiida_crystal17/parsers/raw/parse_bases.py @@ -0,0 +1,286 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2019 Chris Sewell +# +# This file is part of aiida-crystal17. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms and conditions +# of version 3 of the GNU Lesser General Public License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. +import copy + +from collections import namedtuple +from aiida_crystal17.common.atoms import (GAUSSIAN_ORBITALS, ELECTRON_CONFIGURATIONS, SYMBOLS, SYMBOLS_R) + + +def parse_bsets_stdin(content, allow_comments=False, isolated=False): + """parse basis sets from a crystal intput file + + Parameters + ---------- + content : str + file content to parse + allow_comments : bool + if True, comments will be stripped before parsing + isolated : bool + if the basis sets are not within the crystal input file + + Returns + ------- + dict + {'bs': {: [{'type': , 'functions': [...]}, ...]}, + 'ecp': {: [[...], ...]}} + + Raises + ------ + IOError + if an error occurs during the parsing + NotImplementedError + if more than 2 basis sets / pseudopotentials are set for one atom type + + Notes + ----- + + Standard Basis Set:: + + a_number_id n_shells + # for each shell + type shell_type n_functions charge scale_factor + # if type=0, for n_functions + exponent contraction_coefficient + + The atomic number Z is given by the remainder of the division of the + conventional atomic number by 100 (2 max per species in positions not symmetry-related): + + - a_number_id < 200 > 1000: all electron basis set + - a_number_id > 200 < 1000: valence electron basis set + + Valence-electron only calculations can be performed with the aid of + effective core pseudo-potentials (ECP). + The ECP input must be inserted into the basis set input of the atoms with + conventional atomic number>200. + + Effective core pseudo-potentials (ECP) section (p. 75):: + + INPUT / HAYWLC / HAYWSC / BARTHE / DURAND + # if INPUT insert + effective_core_charge M M0 M1 M2 M3 M4 + # insert M+M0+M1+M2+M3+M4 records + a_kl C_kl n_kl + + """ + gbasis = {} + + if not content: + raise IOError('content is empty') + + comment_signals = '#/* OrbitalResult + """compute data for all atomic orbitals in a structure, + given elemental representations by crystal basis sets + + Parameters + ---------- + atoms : list[str] or list[int] + list of atomic numbers or symbols which the structure comprises of + basis_sets : dict[str, dict] + basis set data, in the format returned from ``parse_bsets_stdin`` + + Returns + ------- + OrbitalResult + + """ + total_electrons = total_core_electrons = total_aos = 0 + aos_indices = {} + orbital_types = [] + + for atom_index, atom in enumerate(atoms): + try: + electrons = int(atom) + symbol = SYMBOLS[int(atom)] + except (TypeError, ValueError): + symbol = atom + electrons = SYMBOLS_R[atom] + if basis_sets[symbol]['type'] == 'valence-electron': + raise NotImplementedError('computing for bases with core pseudopotentials') + outer_electrons = sum([i for n, i in ELECTRON_CONFIGURATIONS[electrons]['outer']]) + total_electrons += electrons + total_core_electrons += electrons - outer_electrons + type_count = {} + for orbital in basis_sets[symbol]['bs']: + type_count.setdefault(orbital['type'], 0) + type_count[orbital['type']] += 1 + for i in range(GAUSSIAN_ORBITALS[orbital['type']]): + total_aos += 1 + if (symbol, orbital['type'], type_count[orbital['type']]) not in orbital_types: + orbital_types.append((symbol, orbital['type'], type_count[orbital['type']])) + aos_indices[total_aos] = { + 'atom': atom_index, + 'element': symbol, + 'type': orbital['type'], + 'index': type_count[orbital['type']] + } + + return OrbitalResult(total_electrons, total_core_electrons, total_aos, orbital_types, aos_indices) diff --git a/aiida_crystal17/parsers/raw/gui_parse.py b/aiida_crystal17/parsers/raw/parse_fort34.py similarity index 93% rename from aiida_crystal17/parsers/raw/gui_parse.py rename to aiida_crystal17/parsers/raw/parse_fort34.py index 3d8db61..090e175 100644 --- a/aiida_crystal17/parsers/raw/gui_parse.py +++ b/aiida_crystal17/parsers/raw/parse_fort34.py @@ -1,3 +1,18 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2019 Chris Sewell +# +# This file is part of aiida-crystal17. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms and conditions +# of version 3 of the GNU Lesser General Public License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. """ This module deals with reading/creating .gui files for use with the EXTERNAL keyword @@ -62,13 +77,16 @@ } -def gui_file_read(lines): - """read CRYSTAL geometry (.gui) file +def parse_fort34(lines, check_final_line=False): + """read CRYSTAL geometry fort.34 (aka .gui) file Parameters ---------- lines: list[str] list of lines in the file + check_final_line: bool + the final line should contain ' ', + but may also be '0 0' if for example generated by ``EXTPRT`` Returns ------- @@ -122,7 +140,7 @@ def gui_file_read(lines): final_line = lines[6 + nsymops * 4 + natoms].split() symmetry['space_group'] = int(final_line[0]) num_operations = int(final_line[1]) - if num_operations != nsymops: + if check_final_line and num_operations != nsymops: raise AssertionError('the number of symmetry operations, ' 'specified in the operation section ({0}) and at the bottom of ' 'the file ({1}), are inconsistent'.format(nsymops, num_operations)) diff --git a/aiida_crystal17/parsers/raw/pbs.py b/aiida_crystal17/parsers/raw/pbs.py index 0b4dc43..26fbeb2 100644 --- a/aiida_crystal17/parsers/raw/pbs.py +++ b/aiida_crystal17/parsers/raw/pbs.py @@ -1,11 +1,28 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2019 Chris Sewell +# +# This file is part of aiida-crystal17. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms and conditions +# of version 3 of the GNU Lesser General Public License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. + + def parse_pbs_stderr(file_handle): """look for errors originating from PBS pro std error messages""" for line in file_handle.readlines(): - if "PBS: job killed: mem" in line: - return "ERROR_OUT_OF_MEMORY" - if "PBS: job killed: vmem" in line: - return "ERROR_OUT_OF_VMEMORY" - if "PBS: job killed: walltime" in line: - return "ERROR_OUT_OF_WALLTIME" + if 'PBS: job killed: mem' in line: + return 'ERROR_OUT_OF_MEMORY' + if 'PBS: job killed: vmem' in line: + return 'ERROR_OUT_OF_VMEMORY' + if 'PBS: job killed: walltime' in line: + return 'ERROR_OUT_OF_WALLTIME' return None diff --git a/aiida_crystal17/symmetry/__init__.py b/aiida_crystal17/symmetry/__init__.py index 324257e..41d11d0 100644 --- a/aiida_crystal17/symmetry/__init__.py +++ b/aiida_crystal17/symmetry/__init__.py @@ -1 +1,16 @@ -from aiida_crystal17.symmetry.symmetry import * # noqa \ No newline at end of file +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2019 Chris Sewell +# +# This file is part of aiida-crystal17. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms and conditions +# of version 3 of the GNU Lesser General Public License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. +from aiida_crystal17.symmetry.symmetry import * # noqa diff --git a/aiida_crystal17/symmetry/symmetry.py b/aiida_crystal17/symmetry/symmetry.py index 66853d9..cbc7798 100644 --- a/aiida_crystal17/symmetry/symmetry.py +++ b/aiida_crystal17/symmetry/symmetry.py @@ -1,3 +1,18 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2019 Chris Sewell +# +# This file is part of aiida-crystal17. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms and conditions +# of version 3 of the GNU Lesser General Public License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. """ A module for computing the symmetry of an AiiDA StructureData object. @@ -37,8 +52,7 @@ def structure_info(structure, max_srows=None, round_dp=4): """ a, b, c = structure.cell_lengths l, m, n = structure.cell_angles - cell = [item for sublist in np.round(structure.cell, round_dp) - for item in sublist] + cell = [item for sublist in np.round(structure.cell, round_dp) for item in sublist] pa, pb, pc = structure.pbc header = dedent("""\ StructureData Summary @@ -52,23 +66,19 @@ def structure_info(structure, max_srows=None, round_dp=4): C : {16:5.4} {17:5.4} {18:5.4} Kind Symbols Position ---- ------- -------- - """.format(a, b, c, l, m, n, - structure.get_cell_volume(), - pa, pb, pc, *cell)) + """.format(a, b, c, l, m, n, structure.get_cell_volume(), pa, pb, pc, *cell)) slines = [] for site in structure.sites: name = site.kind_name kind = structure.get_kind(name) - slines.append( - "{0:5} {1:7} {2:<7.4} {3:<7.4} {4:<7.4}".format( - name, kind.get_symbols_string(), - *np.round(site.position, round_dp))) + slines.append('{0:5} {1:7} {2:<7.4} {3:<7.4} {4:<7.4}'.format(name, kind.get_symbols_string(), + *np.round(site.position, round_dp))) if max_srows is not None: if len(slines) > max_srows: - slines = slines[:max_srows] + ["..."] + slines = slines[:max_srows] + ['...'] - return header + "\n".join(slines) + return header + '\n'.join(slines) def print_structure(structure, max_srows=None, round_dp=4): @@ -109,7 +119,7 @@ def reset_kind_names(structure, kind_names): """ from aiida.orm.nodes.data.structure import Kind, Site if len(structure.sites) != len(kind_names): - raise AssertionError("lengths of sites & names not equal") + raise AssertionError('lengths of sites & names not equal') sites = structure.sites kinds = {k.name: k for k in structure.kinds} structure = structure.clone() @@ -120,16 +130,14 @@ def reset_kind_names(structure, kind_names): for site, name in zip(sites, kind_names): if name not in new_kinds: kind_dict = kinds[site.kind_name].get_raw() - kind_dict["name"] = name + kind_dict['name'] = name new_kind = Kind(raw=kind_dict) structure.append_kind(new_kind) new_kinds[name] = new_kind old_symbols = kinds[site.kind_name].symbols new_symbols = new_kinds[name].symbols if old_symbols != new_symbols: - raise AssertionError( - "inconsistent symbols: {} != {}".format( - old_symbols, new_symbols)) + raise AssertionError('inconsistent symbols: {} != {}'.format(old_symbols, new_symbols)) new_site = Site(kind_name=name, position=site.position) structure.append_site(new_site) @@ -152,7 +160,7 @@ def frac_to_cartesian(lattice, fcoords): Nx3 array of cartesian coordinate """ - return np.einsum("ij, jk -> ik", fcoords, lattice).tolist() + return np.einsum('ij, jk -> ik', fcoords, lattice).tolist() def cartesian_to_frac(lattice, ccoords): @@ -190,16 +198,14 @@ def prepare_for_spglib(structure): maps integer values in inequivalent list to AiiDa Kind objects """ - structure = convert_structure(structure, "aiida") + structure = convert_structure(structure, 'aiida') lattice = structure.cell ccoords = [s.position for s in structure.sites] fcoords = cartesian_to_frac(lattice, ccoords) - kind2int_map = {name: i - for i, name in enumerate(structure.get_kind_names())} + kind2int_map = {name: i for i, name in enumerate(structure.get_kind_names())} int2kind_map = {i: name for name, i in kind2int_map.items()} - inequivalent = [kind2int_map[name] - for name in structure.get_site_kindnames()] + inequivalent = [kind2int_map[name] for name in structure.get_site_kindnames()] return (lattice, fcoords, inequivalent), int2kind_map @@ -230,8 +236,7 @@ def compute_symmetry_dataset(structure, symprec, angle_tolerance): cell, int2kind_map = prepare_for_spglib(structure) dataset = spglib.get_symmetry_dataset( - cell, symprec=symprec, - angle_tolerance=-1 if angle_tolerance is None else angle_tolerance) + cell, symprec=symprec, angle_tolerance=-1 if angle_tolerance is None else angle_tolerance) return dataset @@ -262,32 +267,30 @@ def compute_symmetry_dict(structure, symprec, angle_tolerance): cell, int2kind_map = prepare_for_spglib(structure) dataset = spglib.get_symmetry_dataset( - cell, symprec=symprec, - angle_tolerance=-1 if angle_tolerance is None else angle_tolerance) + cell, symprec=symprec, angle_tolerance=-1 if angle_tolerance is None else angle_tolerance) operations = [] - for rotation, trans in zip(dataset["rotations"], dataset["translations"]): + for rotation, trans in zip(dataset['rotations'], dataset['translations']): operations.append(rotation.flatten().tolist() + trans.tolist()) data = { - "hall_number": dataset["hall_number"], - "basis": "fractional", - "operations": operations, - "equivalent_sites": dataset["equivalent_atoms"].tolist(), - "computation": { - "symmetry_program": "spglib", - "symmetry_version": spglib.__version__, - "computation_class": __name__, - "computation_version": __version__, - "symprec": symprec, - "angle_tolerance": angle_tolerance + 'hall_number': dataset['hall_number'], + 'basis': 'fractional', + 'operations': operations, + 'equivalent_sites': dataset['equivalent_atoms'].tolist(), + 'computation': { + 'symmetry_program': 'spglib', + 'symmetry_version': spglib.__version__, + 'computation_class': __name__, + 'computation_version': __version__, + 'symprec': symprec, + 'angle_tolerance': angle_tolerance } } return data -def get_hall_number_from_symmetry(operations, basis="fractional", - lattice=None, symprec=1e-5): +def get_hall_number_from_symmetry(operations, basis='fractional', lattice=None, symprec=1e-5): """obtain the Hall number from the symmetry operations Parameters @@ -302,14 +305,13 @@ def get_hall_number_from_symmetry(operations, basis="fractional", int """ - if basis == "cartesian": + if basis == 'cartesian': operations = operations_cart_to_frac(operations, lattice) - elif basis != "fractional": - raise ValueError("basis should be cartesian or fractional") + elif basis != 'fractional': + raise ValueError('basis should be cartesian or fractional') rotations = [[o[0:3], o[3:6], o[6:9]] for o in operations] translations = [o[9:12] for o in operations] - return spglib.get_hall_number_from_symmetry( - rotations, translations, symprec=symprec) + return spglib.get_hall_number_from_symmetry(rotations, translations, symprec=symprec) def find_primitive(structure, symprec, angle_tolerance): @@ -334,29 +336,26 @@ def find_primitive(structure, symprec, angle_tolerance): """ from aiida.orm.nodes.data.structure import Site - structure = convert_structure(structure, "aiida") + structure = convert_structure(structure, 'aiida') cell, int2kind_map = prepare_for_spglib(structure) new_cell = spglib.find_primitive( - cell, symprec=symprec, - angle_tolerance=-1 if angle_tolerance is None else angle_tolerance) + cell, symprec=symprec, angle_tolerance=-1 if angle_tolerance is None else angle_tolerance) if new_cell is None: - raise ValueError("standardization of cell failed") + raise ValueError('standardization of cell failed') new_structure = structure.clone() new_structure.clear_sites() new_structure.cell = new_cell[0].tolist() positions = frac_to_cartesian(new_structure.cell, new_cell[1]) for position, eid in zip(positions, new_cell[2].tolist()): - new_structure.append_site( - Site(kind_name=int2kind_map[eid], position=position)) + new_structure.append_site(Site(kind_name=int2kind_map[eid], position=position)) return new_structure -def standardize_cell(structure, symprec, angle_tolerance, - to_primitive=False, no_idealize=False): +def standardize_cell(structure, symprec, angle_tolerance, to_primitive=False, no_idealize=False): """ compute the standardised cell for an AiiDA structure When computing symmetry, atomic sites with the same **Kind** are treated as @@ -383,24 +382,25 @@ def standardize_cell(structure, symprec, angle_tolerance, """ from aiida.orm.nodes.data.structure import Site - structure = convert_structure(structure, "aiida") + structure = convert_structure(structure, 'aiida') cell, int2kind_map = prepare_for_spglib(structure) new_cell = spglib.standardize_cell( - cell, to_primitive=to_primitive, no_idealize=no_idealize, + cell, + to_primitive=to_primitive, + no_idealize=no_idealize, symprec=symprec, angle_tolerance=-1 if angle_tolerance is None else angle_tolerance) if new_cell is None: - raise ValueError("standardization of cell failed") + raise ValueError('standardization of cell failed') new_structure = structure.clone() new_structure.clear_sites() new_structure.cell = new_cell[0].tolist() positions = frac_to_cartesian(new_structure.cell, new_cell[1]) for position, eid in zip(positions, new_cell[2].tolist()): - new_structure.append_site( - Site(kind_name=int2kind_map[eid], position=position)) + new_structure.append_site(Site(kind_name=int2kind_map[eid], position=position)) return new_structure @@ -423,13 +423,13 @@ def in_range(i, j): return i <= sg_number <= j cs = { - "triclinic": (1, 2), - "monoclinic": (3, 15), - "orthorhombic": (16, 74), - "tetragonal": (75, 142), - "trigonal": (143, 167), - "hexagonal": (168, 194), - "cubic": (195, 230) + 'triclinic': (1, 2), + 'monoclinic': (3, 15), + 'orthorhombic': (16, 74), + 'tetragonal': (75, 142), + 'trigonal': (143, 167), + 'hexagonal': (168, 194), + 'cubic': (195, 230) } crystal_system = None @@ -440,9 +440,7 @@ def in_range(i, j): break if crystal_system is None: - raise ValueError( - "could not find crystal system of space group number: {}".format( - sg_number)) + raise ValueError('could not find crystal system of space group number: {}'.format(sg_number)) return crystal_system @@ -466,9 +464,9 @@ def get_lattice_type_name(sg_number): """ system = get_crystal_system_name(sg_number) if sg_number in [146, 148, 155, 160, 161, 166, 167]: - return "rhombohedral" - elif system == "trigonal": - return "hexagonal" + return 'rhombohedral' + elif system == 'trigonal': + return 'hexagonal' return system @@ -597,10 +595,9 @@ def operation_to_affine(operation): """ if not len(operation) == 12: - raise ValueError("operation should be of length 12") + raise ValueError('operation should be of length 12') affine_matrix = np.eye(4) - affine_matrix[0:3][:, 0:3] = [ - operation[0:3], operation[3:6], operation[6:9]] + affine_matrix[0:3][:, 0:3] = [operation[0:3], operation[3:6], operation[6:9]] affine_matrix[0:3][:, 3] = operation[9:12] return affine_matrix @@ -642,70 +639,66 @@ def convert_structure(structure, out_type): structure_data_cls = DataFactory('structure') if isinstance(structure, dict): - if "symbols" in structure and "atomic_numbers" not in structure: - structure["atomic_numbers"] = symbols2numbers(structure["symbols"]) - if ("fcoords" in structure and "lattice" in structure and "ccoords" not in structure): - structure["ccoords"] = frac_to_cartesian( - structure["lattice"], structure["fcoords"]) - required_keys = ["pbc", "lattice", "ccoords", "atomic_numbers"] + if 'symbols' in structure and 'atomic_numbers' not in structure: + structure['atomic_numbers'] = symbols2numbers(structure['symbols']) + if ('fcoords' in structure and 'lattice' in structure and 'ccoords' not in structure): + structure['ccoords'] = frac_to_cartesian(structure['lattice'], structure['fcoords']) + required_keys = ['pbc', 'lattice', 'ccoords', 'atomic_numbers'] if not set(structure.keys()).issuperset(required_keys): - raise AssertionError( - "dict keys are not a superset of: {}".format(required_keys)) + raise AssertionError('dict keys are not a superset of: {}'.format(required_keys)) - if out_type == "dict": + if out_type == 'dict': if isinstance(structure, dict): return structure if isinstance(structure, structure_data_cls): return structure_to_dict(structure) if isinstance(structure, Atoms): return { - "pbc": structure.pbc.tolist(), - "atomic_numbers": structure.get_atomic_numbers().tolist(), - "ccoords": structure.positions.tolist(), - "lattice": structure.cell.tolist(), - "equivalent": structure.get_tags().tolist() + 'pbc': structure.pbc.tolist(), + 'atomic_numbers': structure.get_atomic_numbers().tolist(), + 'ccoords': structure.positions.tolist(), + 'lattice': structure.cell.tolist(), + 'equivalent': structure.get_tags().tolist() } - raise TypeError("structure: {}".format(structure)) - elif out_type == "ase": + raise TypeError('structure: {}'.format(structure)) + elif out_type == 'ase': if isinstance(structure, Atoms): return structure if isinstance(structure, structure_data_cls): return structure.get_ase() if isinstance(structure, dict): return Atoms( - numbers=structure["atomic_numbers"], - cell=structure["lattice"], - positions=structure["ccoords"], - pbc=structure["pbc"], - tags=structure.get("equivalent", None)) - raise TypeError("structure: {}".format(structure)) - elif out_type == "aiida": + numbers=structure['atomic_numbers'], + cell=structure['lattice'], + positions=structure['ccoords'], + pbc=structure['pbc'], + tags=structure.get('equivalent', None)) + raise TypeError('structure: {}'.format(structure)) + elif out_type == 'aiida': if isinstance(structure, structure_data_cls): return structure if isinstance(structure, Atoms): return structure_data_cls(ase=structure) if isinstance(structure, dict): - if structure.get("kinds") is not None: + if structure.get('kinds') is not None: struct = structure_data_cls(cell=structure['lattice']) - struct.set_pbc(structure["pbc"]) - for kind, ccoord in zip(structure["kinds"], - structure['ccoords']): + struct.set_pbc(structure['pbc']) + for kind, ccoord in zip(structure['kinds'], structure['ccoords']): if not isinstance(kind, Kind): kind = Kind(raw=kind) if kind.name not in struct.get_site_kindnames(): struct.append_kind(kind) - struct.append_site(Site( - position=ccoord, kind_name=kind.name)) + struct.append_site(Site(position=ccoord, kind_name=kind.name)) return struct else: atoms = Atoms( - numbers=structure["atomic_numbers"], - cell=structure["lattice"], - positions=structure["ccoords"], - pbc=structure["pbc"], - tags=structure.get("equivalent", None)) + numbers=structure['atomic_numbers'], + cell=structure['lattice'], + positions=structure['ccoords'], + pbc=structure['pbc'], + tags=structure.get('equivalent', None)) return structure_data_cls(ase=atoms) - raise ValueError("out_type: {}".format(out_type)) + raise ValueError('out_type: {}'.format(out_type)) def structure_to_dict(structure): @@ -727,18 +720,13 @@ def structure_to_dict(structure): for kind in structure.kinds: if kind.is_alloy: - raise InputValidationError( - "Kind '{}' is an alloy. This is not allowed for CRYSTAL input structures." - "".format(kind.name)) + raise InputValidationError("Kind '{}' is an alloy. This is not allowed for CRYSTAL input structures." + ''.format(kind.name)) if kind.has_vacancies: - raise InputValidationError( - "Kind '{}' has vacancies. This is not allowed for CRYSTAL input structures." - "".format(kind.name)) + raise InputValidationError("Kind '{}' has vacancies. This is not allowed for CRYSTAL input structures." + ''.format(kind.name)) - kindname_symbol_map = { - kind.name: kind.symbols[0] - for kind in structure.kinds - } + kindname_symbol_map = {kind.name: kind.symbols[0] for kind in structure.kinds} kindname_id_map = {kind.name: i for i, kind in enumerate(structure.kinds)} id_kind_map = {i: kind for i, kind in enumerate(structure.kinds)} kind_names = [site.kind_name for site in structure.sites] @@ -747,12 +735,12 @@ def structure_to_dict(structure): kinds = [id_kind_map[e] for e in equivalent] sdata = { - "lattice": structure.cell, - "atomic_numbers": symbols2numbers(symbols), - "ccoords": [site.position for site in structure.sites], - "pbc": structure.pbc, - "equivalent": equivalent, - "kinds": kinds, + 'lattice': structure.cell, + 'atomic_numbers': symbols2numbers(symbols), + 'ccoords': [site.position for site in structure.sites], + 'pbc': structure.pbc, + 'equivalent': equivalent, + 'kinds': kinds, } return sdata diff --git a/aiida_crystal17/tests/raw_files/basis_sets/manual_examples/3-21g-star.basis b/aiida_crystal17/tests/raw_files/basis_sets/manual_examples/3-21g-star.basis new file mode 100644 index 0000000..090ddf3 --- /dev/null +++ b/aiida_crystal17/tests/raw_files/basis_sets/manual_examples/3-21g-star.basis @@ -0,0 +1,6 @@ +14 5 +2 0 3 2. 1. +2 1 3 8. 1. +2 1 2 4. 1. +2 1 1 0. 1. +2 3 1 0. 1. diff --git a/aiida_crystal17/tests/raw_files/basis_sets/manual_examples/3-21g.basis b/aiida_crystal17/tests/raw_files/basis_sets/manual_examples/3-21g.basis new file mode 100644 index 0000000..1369a59 --- /dev/null +++ b/aiida_crystal17/tests/raw_files/basis_sets/manual_examples/3-21g.basis @@ -0,0 +1,5 @@ +14 4 +2 0 3 2. 1. +2 1 3 8. 1. +2 1 2 4. 1. +2 1 1 0. 1. diff --git a/aiida_crystal17/tests/raw_files/basis_sets/manual_examples/barthe.basis b/aiida_crystal17/tests/raw_files/basis_sets/manual_examples/barthe.basis new file mode 100644 index 0000000..fc5d266 --- /dev/null +++ b/aiida_crystal17/tests/raw_files/basis_sets/manual_examples/barthe.basis @@ -0,0 +1,14 @@ +228 4 +BARTHE +0 1 2 2. 1. +1.55 .24985 1. +1.24 -.41636 1. +0 1 1 0. 1. +0.0818 1.0 1. +0 3 4 8. 1. +4.3842E+01 .03337 +1.2069E+01 .17443 +3.9173E+00 .42273 +1.1997E+00 .48809 +0 3 1 0. 1. +0.333 1. diff --git a/aiida_crystal17/tests/raw_files/basis_sets/manual_examples/free.basis b/aiida_crystal17/tests/raw_files/basis_sets/manual_examples/free.basis new file mode 100644 index 0000000..1dfa66d --- /dev/null +++ b/aiida_crystal17/tests/raw_files/basis_sets/manual_examples/free.basis @@ -0,0 +1,13 @@ +14 3 +0 0 6 2. 1. +16115.9 0.00195948 +2425.58 0.0149288 +553.867 0.0728478 +156.340 0.24613 +50.0683 0.485914 +17.0178 0.325002 +0 1 2 4. 1. +1.07913 -0.376108 0.0671030 +0.302422 1.25165 0.956883 +0 1 1 0. 1. +0.123 1. 1. diff --git a/aiida_crystal17/tests/raw_files/basis_sets/manual_examples/free_ecp.basis b/aiida_crystal17/tests/raw_files/basis_sets/manual_examples/free_ecp.basis new file mode 100644 index 0000000..95cbb05 --- /dev/null +++ b/aiida_crystal17/tests/raw_files/basis_sets/manual_examples/free_ecp.basis @@ -0,0 +1,32 @@ +228 5 +INPUT +10. 5 4 5 2 0 0 +344.84100 -18.00000 -1 +64.82281 -117.95937 0 +14.28477 -29.43970 0 +3.82101 -10.38626 0 +1.16976 -0.89249 0 +18.64238 3.00000 -2 +4.89161 19.24490 -1 +1.16606 23.93060 0 +0.95239 -9.35414 0 +30.60070 5.00000 -2 +14.30081 19.81155 -1 +15.03304 54.33856 0 +4.64601 54.08782 0 +0.98106 7.31027 0 +4.56008 0.26292 0 +0.67647 -0.43862 0 +0 1 1 2. 1. +1.257 1. 1. +0 1 1 0. 1. +1.052 1. 1. +0 1 1 0. 1. +0.0790 1.0 1. +0 3 4 8. 1. +4.3580E+01 .03204 +1.1997E+01 .17577 +3.8938E+00 .41461 +1.271 .46122 +0 3 1 0. 1. +0.385 1. diff --git a/aiida_crystal17/tests/raw_files/basis_sets/manual_examples/haywlc.basis b/aiida_crystal17/tests/raw_files/basis_sets/manual_examples/haywlc.basis new file mode 100644 index 0000000..8b0814b --- /dev/null +++ b/aiida_crystal17/tests/raw_files/basis_sets/manual_examples/haywlc.basis @@ -0,0 +1,14 @@ +228 4 +HAYWLC +0 1 2 2. 1. +1.257 1.1300E-01 2.6760E-02 +1.052 -1.7420E-01 -1.9610E-02 +0 1 1 0. 1. +0.0790 1.0 1. +0 3 4 8. 1. +4.3580E+01 .03204 +1.1997E+01 .17577 +3.8938E+00 .41461 +1.271 .46122 +0 3 1 0. 1. +0.385 1. diff --git a/aiida_crystal17/tests/raw_files/basis_sets/manual_examples/tzvp.basis b/aiida_crystal17/tests/raw_files/basis_sets/manual_examples/tzvp.basis new file mode 100644 index 0000000..a61c6f5 --- /dev/null +++ b/aiida_crystal17/tests/raw_files/basis_sets/manual_examples/tzvp.basis @@ -0,0 +1,50 @@ +28 14 +0 0 8 2.0 1.0 + 351535.72935 0.00022529386884 + 52695.809283 0.00174686162230 + 11992.468293 0.00908499921360 + 3394.5776689 0.03694074844700 + 1105.3594585 0.12032819950000 + 397.14677769 0.28596715057000 + 154.27542974 0.40983020196000 + 61.018723780 0.21620642851000 +0 0 4 2.0 1.0 + 384.45559739 -0.02465127926800 + 119.04879199 -0.11658505277000 + 19.137012223 0.54864126676000 + 8.1526718562 0.52640051122000 +0 0 2 2.0 1.0 + 12.579408642 -0.22797884293000 + 2.0870866081 0.70703738215000 +0 0 1 2.0 1.0 + 2.9480296700 1.00000000000000 +0 0 1 0.0 1.0 + 0.87308901 1.00000000000000 +0 0 1 0.0 1.0 + 0.16787354 1.00000000000000 +0 2 6 6.0 1.0 + 1883.0907486 0.00237482584430 + 445.95155320 0.01928945717200 + 143.08430815 0.09071821150700 + 53.372920722 0.26181414117000 + 21.321919357 0.42309149832000 + 8.6643561994 0.24641686015000 +0 2 3 6.0 1.0 + 34.144255211 -0.02967712916300 + 4.7122455921 0.55616824096000 + 1.8709231845 0.96357766460000 +0 2 1 0.0 1 + 1.0453650800 1.00000000000000 +0 2 1 0.0 1.0 + 0.4276468400 1.00000000000000 +0 3 4 8.0 1 + 74.591603465 0.01207745467200 + 21.590632752 0.07463726215400 + 7.6246142580 0.23236775502000 + 2.8632206762 0.39042651680000 +0 3 1 0.0 1 + 1.0075942000 1.00000000000000 +0 3 1 0.0 1 + 0.3167209900 1.00000000000000 +0 4 1 0.0 1.0 + 2.1740000000 1.00000000000000 diff --git a/aiida_crystal17/tests/test_calculations/test_cry_doss/test_run_mgo_scf.yml b/aiida_crystal17/tests/test_calculations/test_cry_doss/test_run_mgo_scf.yml index d0ff5c9..cc78a14 100644 --- a/aiida_crystal17/tests/test_calculations/test_cry_doss/test_run_mgo_scf.yml +++ b/aiida_crystal17/tests/test_calculations/test_cry_doss/test_run_mgo_scf.yml @@ -35,7 +35,7 @@ calc: symlink_wf: false version: core: 1.0.0b5 - plugin: 0.9.1b5 + plugin: 0.9.2b5 withmpi: false results: energy_max: 10.4040617 @@ -47,7 +47,7 @@ results: npts: 102 parser_class: CryDossParser parser_errors: [] - parser_version: 0.9.1b5 + parser_version: 0.9.2b5 parser_warnings: [] spin: false system_type: closed shell, insulating system diff --git a/aiida_crystal17/tests/test_calculations/test_cry_fermi/test_run_mgo_scf.yml b/aiida_crystal17/tests/test_calculations/test_cry_fermi/test_run_mgo_scf.yml index 20d1702..52a813e 100644 --- a/aiida_crystal17/tests/test_calculations/test_cry_fermi/test_run_mgo_scf.yml +++ b/aiida_crystal17/tests/test_calculations/test_cry_fermi/test_run_mgo_scf.yml @@ -28,7 +28,7 @@ calc: symlink_wf: false version: core: 1.0.0b5 - plugin: 0.9.1b5 + plugin: 0.9.2b5 withmpi: false fermi: -4.0000737 results: @@ -37,6 +37,6 @@ results: fermi_energy: -4.0000737 parser_class: CryFermiParser parser_errors: [] - parser_version: 0.9.1b5 + parser_version: 0.9.2b5 parser_warnings: [] warnings: [] diff --git a/aiida_crystal17/tests/test_cmndline/test_data.py b/aiida_crystal17/tests/test_cmndline/test_data.py index 1e4e8ec..b1f1df2 100644 --- a/aiida_crystal17/tests/test_cmndline/test_data.py +++ b/aiida_crystal17/tests/test_cmndline/test_data.py @@ -54,8 +54,9 @@ def test_basis_show(db_test_app): class: sto3g element: O filename: sto3g_O.basis - md5: 73a9c7315dc6edf6ab8bd4427a66f31c + md5: 1ca6e23f7f1b1f5517117bec1d581ca2 num_shells: 2 + orbital_types: [S, SP] year: 1999 """ diff --git a/aiida_crystal17/tests/test_cmndline/test_parser.py b/aiida_crystal17/tests/test_cmndline/test_parser.py index ffc6858..c1048a7 100644 --- a/aiida_crystal17/tests/test_cmndline/test_parser.py +++ b/aiida_crystal17/tests/test_cmndline/test_parser.py @@ -1,28 +1,34 @@ import os from click.testing import CliRunner -from aiida_crystal17.cmndline.cmd_parser import parse +from aiida_crystal17.cmndline.cmd_parser import stdin, stdout, doss_f25 from aiida_crystal17.tests import TEST_FILES def test_parse_stdin_fail(): runner = CliRunner() - result = runner.invoke(parse, ['stdin', '']) + result = runner.invoke(stdin, ['']) assert result.exit_code != 0, result.stdout def test_parse_stdin(): runner = CliRunner() - result = runner.invoke(parse, ['stdin', os.path.join(TEST_FILES, 'crystal', 'mgo_sto3g_scf', 'INPUT')]) + result = runner.invoke(stdin, [os.path.join(TEST_FILES, 'crystal', 'mgo_sto3g_scf', 'INPUT')]) assert result.exit_code == 0, result.stdout def test_parse_stdout_fail(): runner = CliRunner() - result = runner.invoke(parse, ['stdout', '']) + result = runner.invoke(stdout, ['']) assert result.exit_code != 0, result.stdout def test_parse_stdout(): runner = CliRunner() - result = runner.invoke(parse, ['stdout', os.path.join(TEST_FILES, 'crystal', 'mgo_sto3g_scf', 'main.out')]) + result = runner.invoke(stdout, [os.path.join(TEST_FILES, 'crystal', 'mgo_sto3g_scf', 'main.out')]) + assert result.exit_code == 0, result.stdout + + +def test_parse_doss_f25(): + runner = CliRunner() + result = runner.invoke(doss_f25, [os.path.join(TEST_FILES, 'doss', 'mgo_sto3g_scf', 'fort.25')]) assert result.exit_code == 0, result.stdout diff --git a/aiida_crystal17/tests/test_data/test_basis_set.py b/aiida_crystal17/tests/test_data/test_basis_set.py index 438160b..be8375f 100644 --- a/aiida_crystal17/tests/test_data/test_basis_set.py +++ b/aiida_crystal17/tests/test_data/test_basis_set.py @@ -12,11 +12,9 @@ def test_create_single(db_test_app): db_test_app.get_or_create_computer() - basisset_data_cls = DataFactory("crystal17.basisset") + basisset_data_cls = DataFactory('crystal17.basisset') - basis = basisset_data_cls( - filepath=os.path.join( - TEST_FILES, "basis_sets", "sto3g", 'sto3g_Mg.basis')) + basis = basisset_data_cls(filepath=os.path.join(TEST_FILES, 'basis_sets', 'sto3g', 'sto3g_Mg.basis')) print(basis.filename) @@ -29,12 +27,14 @@ def test_create_single(db_test_app): 'year': 1999, 'basis_type': 'all-electron', 'class': 'sto3g', - 'md5': '0731ecc3339d2b8736e61add113d0c6f' + 'md5': '0731ecc3339d2b8736e61add113d0c6f', + 'orbital_types': ['S', 'SP', 'SP'] } assert basis.metadata == expected_meta - expected_content = """12 3 + expected_content = """\ +12 3 1 0 3 2. 0. 1 1 3 8. 0. 1 1 3 2. 0.""" @@ -44,72 +44,67 @@ def test_create_single(db_test_app): # try retrieving a pre-existing (stored) basis basis, created = basisset_data_cls.get_or_create( - filepath=os.path.join(TEST_FILES, "basis_sets", "sto3g", - 'sto3g_Mg.basis')) + filepath=os.path.join(TEST_FILES, 'basis_sets', 'sto3g', 'sto3g_Mg.basis')) assert not created def test_create_group(db_test_app): db_test_app.get_or_create_computer() - basisset_data_cls = DataFactory("crystal17.basisset") + basisset_data_cls = DataFactory('crystal17.basisset') upload_basisset_family = basisset_data_cls.upload_basisset_family nfiles, nuploaded = upload_basisset_family( - os.path.join(TEST_FILES, "basis_sets", "sto3g"), "sto3g", - "group of sto3g basis sets") + os.path.join(TEST_FILES, 'basis_sets', 'sto3g'), 'sto3g', 'group of sto3g basis sets') assert (nfiles, nuploaded) == (3, 3) - group = basisset_data_cls.get_basis_group("sto3g") + group = basisset_data_cls.get_basis_group('sto3g') - assert group.description == "group of sto3g basis sets" + assert group.description == 'group of sto3g basis sets' - groups = basisset_data_cls.get_basis_groups(filter_elements="O") + groups = basisset_data_cls.get_basis_groups(filter_elements='O') # print(groups) assert len(groups) == 1 # try uploading the files to a second group with pytest.raises(ValueError): upload_basisset_family( - os.path.join(TEST_FILES, "basis_sets", "sto3g"), - "another_sto3g", - "another group of sto3g basis sets", + os.path.join(TEST_FILES, 'basis_sets', 'sto3g'), + 'another_sto3g', + 'another group of sto3g basis sets', stop_if_existing=True) nfiles, nuploaded = upload_basisset_family( - os.path.join(TEST_FILES, "basis_sets", "sto3g"), - "another_sto3g", - "another group of sto3g basis sets", + os.path.join(TEST_FILES, 'basis_sets', 'sto3g'), + 'another_sto3g', + 'another group of sto3g basis sets', stop_if_existing=False) assert (nfiles, nuploaded) == (3, 0) def test_bases_from_struct(db_test_app): db_test_app.get_or_create_computer() - basisset_data_cls = DataFactory("crystal17.basisset") + basisset_data_cls = DataFactory('crystal17.basisset') upload_basisset_family = basisset_data_cls.upload_basisset_family nfiles, nuploaded = upload_basisset_family( - os.path.join(TEST_FILES, "basis_sets", "sto3g"), "sto3g", - "group of sto3g basis sets") + os.path.join(TEST_FILES, 'basis_sets', 'sto3g'), 'sto3g', 'group of sto3g basis sets') # MgO import ase # noqa: F401 from ase.spacegroup import crystal atoms = crystal( - symbols=[12, 8], - basis=[[0, 0, 0], [0.5, 0.5, 0.5]], - spacegroup=225, - cellpar=[4.21, 4.21, 4.21, 90, 90, 90]) # type: ase.Atoms + symbols=[12, 8], basis=[[0, 0, 0], [0.5, 0.5, 0.5]], spacegroup=225, cellpar=[4.21, 4.21, 4.21, 90, 90, + 90]) # type: ase.Atoms # atoms[0].tag = 1 # atoms[1].tag = 1 atoms.set_tags([1, 1, 0, 0, 0, 0, 0, 0]) - structure_data_cls = DataFactory("structure") + structure_data_cls = DataFactory('structure') struct = structure_data_cls(ase=atoms) - bases_dict = basisset_data_cls.get_basissets_by_kind(struct, "sto3g") + bases_dict = basisset_data_cls.get_basissets_by_kind(struct, 'sto3g') # print(bases_dict) - assert set(bases_dict.keys()) == set(["Mg", "Mg1", "O"]) + assert set(bases_dict.keys()) == set(['Mg', 'Mg1', 'O']) diff --git a/aiida_crystal17/tests/test_gulp/test_parsers/test_parse_output_fit/test_parse_file_lj.yml b/aiida_crystal17/tests/test_gulp/test_parsers/test_parse_output_fit/test_parse_file_lj.yml index 566271f..3a483c7 100644 --- a/aiida_crystal17/tests/test_gulp/test_parsers/test_parse_output_fit/test_parse_file_lj.yml +++ b/aiida_crystal17/tests/test_gulp/test_parsers/test_parse_output_fit/test_parse_file_lj.yml @@ -93,7 +93,7 @@ gulp_version: 4.5.3 num_cycles: 6 parser_class: null parser_errors: [] -parser_version: 0.9.1b5 +parser_version: 0.9.2b5 parser_warnings: [] peak_dynamic_memory_mb: 0.52 total_configurations: 3 diff --git a/aiida_crystal17/tests/test_gulp/test_parsers/test_parse_output_fit/test_parse_file_reaxff.yml b/aiida_crystal17/tests/test_gulp/test_parsers/test_parse_output_fit/test_parse_file_reaxff.yml index 2cafdee..aaa2118 100644 --- a/aiida_crystal17/tests/test_gulp/test_parsers/test_parse_output_fit/test_parse_file_reaxff.yml +++ b/aiida_crystal17/tests/test_gulp/test_parsers/test_parse_output_fit/test_parse_file_reaxff.yml @@ -93,7 +93,7 @@ gulp_version: 4.5.3 num_cycles: 1 parser_class: null parser_errors: [] -parser_version: 0.9.1b5 +parser_version: 0.9.2b5 parser_warnings: [] peak_dynamic_memory_mb: 0.52 total_configurations: 3 diff --git a/aiida_crystal17/tests/test_immigration/test_immigrate/test_full_mgo_opt.yml b/aiida_crystal17/tests/test_immigration/test_immigrate/test_full_mgo_opt.yml index a51b24c..d6f24bd 100644 --- a/aiida_crystal17/tests/test_immigration/test_immigrate/test_full_mgo_opt.yml +++ b/aiida_crystal17/tests/test_immigration/test_immigrate/test_full_mgo_opt.yml @@ -28,5 +28,5 @@ scheduler_stdout: _scheduler-stdout.txt sealed: true version: core: 1.0.0b5 - plugin: 0.9.1b5 + plugin: 0.9.2b5 withmpi: false diff --git a/aiida_crystal17/tests/test_immigration/test_immigrate/test_full_nio_afm.yml b/aiida_crystal17/tests/test_immigration/test_immigrate/test_full_nio_afm.yml index badc7c8..1a199e0 100644 --- a/aiida_crystal17/tests/test_immigration/test_immigrate/test_full_nio_afm.yml +++ b/aiida_crystal17/tests/test_immigration/test_immigrate/test_full_nio_afm.yml @@ -28,5 +28,5 @@ scheduler_stdout: _scheduler-stdout.txt sealed: true version: core: 1.0.0b5 - plugin: 0.9.1b5 + plugin: 0.9.2b5 withmpi: false diff --git a/aiida_crystal17/tests/test_parsers/test_cry_doss/test_success_crystal17_doss_.yml b/aiida_crystal17/tests/test_parsers/test_cry_doss/test_success_crystal17_doss_.yml index ec030e8..8543122 100644 --- a/aiida_crystal17/tests/test_parsers/test_cry_doss/test_success_crystal17_doss_.yml +++ b/aiida_crystal17/tests/test_parsers/test_cry_doss/test_success_crystal17_doss_.yml @@ -27,7 +27,7 @@ results: npts: 1002 parser_class: CryDossParser parser_errors: [] - parser_version: 0.9.1b5 + parser_version: 0.9.2b5 parser_warnings: [] spin: true system_type: open shell, conducting system diff --git a/aiida_crystal17/tests/test_parsers/test_cry_newk/test_success_crystal17_fermi_.yml b/aiida_crystal17/tests/test_parsers/test_cry_newk/test_success_crystal17_fermi_.yml index f42f6f9..bd1219a 100644 --- a/aiida_crystal17/tests/test_parsers/test_cry_newk/test_success_crystal17_fermi_.yml +++ b/aiida_crystal17/tests/test_parsers/test_cry_newk/test_success_crystal17_fermi_.yml @@ -5,6 +5,6 @@ results: fermi_energy: -3.4014233 parser_class: CryFermiParser parser_errors: [] - parser_version: 0.9.1b5 + parser_version: 0.9.2b5 parser_warnings: [] warnings: [] diff --git a/aiida_crystal17/tests/test_parsers/test_raw/test_crystal_fermi/test_read_newk_out_file.yml b/aiida_crystal17/tests/test_parsers/test_raw/test_crystal_fermi/test_read_newk_out_file.yml index a8082b9..8de7690 100644 --- a/aiida_crystal17/tests/test_parsers/test_raw/test_crystal_fermi/test_read_newk_out_file.yml +++ b/aiida_crystal17/tests/test_parsers/test_raw/test_crystal_fermi/test_read_newk_out_file.yml @@ -3,6 +3,6 @@ errors: [] fermi_energy: -3.4014233 parser_class: dummy_parser_class parser_errors: [] -parser_version: 0.9.1b5 +parser_version: 0.9.2b5 parser_warnings: [] warnings: [] diff --git a/aiida_crystal17/tests/test_parsers/test_raw/test_crystal_fort25/test_read_crystal_fort25.yml b/aiida_crystal17/tests/test_parsers/test_raw/test_crystal_fort25/test_read_crystal_fort25.yml index 0441075..21ae753 100644 --- a/aiida_crystal17/tests/test_parsers/test_raw/test_crystal_fort25/test_read_crystal_fort25.yml +++ b/aiida_crystal17/tests/test_parsers/test_raw/test_crystal_fort25/test_read_crystal_fort25.yml @@ -5032,7 +5032,7 @@ results: npts: 1002 parser_class: dummy_parser_class parser_errors: [] - parser_version: 0.9.1b5 + parser_version: 0.9.2b5 parser_warnings: [] spin: true system_type: open shell, conducting system diff --git a/aiida_crystal17/tests/test_parsers/test_raw/test_gui_parse.py b/aiida_crystal17/tests/test_parsers/test_raw/test_gui_parse.py deleted file mode 100644 index a529cf4..0000000 --- a/aiida_crystal17/tests/test_parsers/test_raw/test_gui_parse.py +++ /dev/null @@ -1,148 +0,0 @@ -import os -import numpy as np -import pytest - -from aiida_crystal17.tests import TEST_FILES -from aiida_crystal17.parsers.raw.gui_parse import ( - gui_file_read, gui_file_write, get_centering_code, - get_crystal_type_code, structure_to_symmetry) - - -@pytest.mark.parametrize( - "gui_filename,num_symops,space_group", - ( - ('cubic-rocksalt.crystal.gui', 48, 225), - ('cubic-zincblende.crystal.gui', 24, 216), - ('greigite.crystal.gui', 48, 227), - ('mackinawite.crystal.gui', 16, 129), - ('marcasite.crystal.gui', 8, 58), - ('pyrite.crystal.gui', 24, 205), - ('pyrrhotite-4c-monoclinic.crystal.gui', 4, 15), - ('troilite-hex-p63mc.crystal.gui', 12, 186), - ('troilite-hex-p63mmc.crystal.gui', 24, 194), - ('troilite-hexagonal.crystal.gui', 12, 190), - ('troilite-mnp.crystal.gui', 8, 62) - ) -) -def test_gui_file_read(gui_filename, num_symops, space_group): - path = os.path.join(TEST_FILES, "gui", "out", gui_filename) - with open(path) as handle: - lines = handle.read().splitlines() - structdata, symmdata = gui_file_read(lines) - assert len(symmdata["operations"]) == num_symops - assert symmdata["space_group"] == space_group - - -@pytest.mark.parametrize( - "hall_number,centering_code,crystal_code", - [ - (90, 4, 2), # pyrrhotite-4c 15, 'C2/c' - (501, 1, 6), # pyrite 205, 'Pa3' - (275, 1, 3), # marcasite 58, 'Pnnm' - (484, 1, 5), # troilite 190 'P-62c' - (409, 1, 4), # mackinawite 129, 'P4/nmm' - (526, 5, 6) # greigite 227, 'Fd3m' - ]) -def test_symmetry_codes(hall_number, centering_code, crystal_code): - assert get_crystal_type_code(hall_number=hall_number) == crystal_code - assert get_centering_code(hall_number) == centering_code - - -@pytest.mark.parametrize( - "gui_filename,num_symops,space_group", - ( - ('cubic-rocksalt.crystal.gui', 48, 225), - ('cubic-zincblende.crystal.gui', 24, 216), - ('greigite.crystal.gui', 48, 227), - ('mackinawite.crystal.gui', 16, 129), - ('marcasite.crystal.gui', 8, 58), - ('pyrite.crystal.gui', 24, 205), - ('pyrrhotite-4c-monoclinic.crystal.gui', 4, 15), - ('troilite-hex-p63mc.crystal.gui', 12, 186), - ('troilite-hex-p63mmc.crystal.gui', 24, 194), - ('troilite-hexagonal.crystal.gui', 12, 190), - ('troilite-mnp.crystal.gui', 8, 62) - ) -) -def test_structure_to_symmetry(db_test_app, gui_filename, - num_symops, space_group): - """ we test that we can go round trip, - reading a gui file and comparing the parsed symmetry to the computed one - """ - path = os.path.join(TEST_FILES, "gui", "out", gui_filename) - with open(path) as handle: - lines = handle.read().splitlines() - structdata, symmdata = gui_file_read(lines) - - symmdata2 = structure_to_symmetry(structdata) - assert len(symmdata["operations"]) == len(symmdata2["operations"]) - assert symmdata["space_group"] == symmdata2["space_group"] - assert symmdata["crystal_type_code"] == symmdata2["crystal_type_code"] - assert symmdata["centring_code"] == symmdata2["centring_code"] - - -@pytest.mark.parametrize( - "gui_filename,num_symops,space_group", - ( - ('cubic-rocksalt.crystal.gui', 48, 225), - ('cubic-zincblende.crystal.gui', 24, 216), - ('greigite.crystal.gui', 48, 227), - ('mackinawite.crystal.gui', 16, 129), - ('marcasite.crystal.gui', 8, 58), - ('pyrite.crystal.gui', 24, 205), - ('pyrrhotite-4c-monoclinic.crystal.gui', 4, 15), - ('troilite-hex-p63mc.crystal.gui', 12, 186), - ('troilite-hex-p63mmc.crystal.gui', 24, 194), - ('troilite-hexagonal.crystal.gui', 12, 190), - ('troilite-mnp.crystal.gui', 8, 62) - ) -) -def test_structure_to_symmetry_operations( - db_test_app, gui_filename, num_symops, space_group): - """ we test that we can go round trip, - reading a gui file and comparing the parsed symmetry to the computed one - """ - path = os.path.join(TEST_FILES, "gui", "out", gui_filename) - with open(path) as handle: - lines = handle.read().splitlines() - structdata, symmdata = gui_file_read(lines) - - symmdata2 = structure_to_symmetry(structdata, as_cartesian=True) - assert len(symmdata["operations"]) == len(symmdata2["operations"]) - ops1 = np.sort(symmdata["operations"], axis=0) - ops2 = np.sort(symmdata2["operations"], axis=0) - assert np.allclose(ops1, ops2) - - -def test_gui_file_write(): - structure_data = { - "lattice": [[1, 0, 0], [0, 1, 0], [0, 0, 1]], - "ccoords": [[0, 0, 0]], - "atomic_numbers": [1], - "pbc": [True, True, True] - } - symmetry_data = { - "operations": [[1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0]], - "basis": "cartesian", - "space_group": 4, - "crystal_type_code": 5, - "centring_code": 6 - } - - outstr = gui_file_write(structure_data, symmetry_data) - - print(outstr) - - expected = ['3 6 5', - ' 1.000000000E+00 0.000000000E+00 0.000000000E+00', - ' 0.000000000E+00 1.000000000E+00 0.000000000E+00', - ' 0.000000000E+00 0.000000000E+00 1.000000000E+00', - '1', - ' 1.000000000E+00 0.000000000E+00 0.000000000E+00', - ' 0.000000000E+00 1.000000000E+00 0.000000000E+00', - ' 0.000000000E+00 0.000000000E+00 1.000000000E+00', - ' 0.000000000E+00 0.000000000E+00 0.000000000E+00', - '1', - ' 1 0.000000000E+00 0.000000000E+00 0.000000000E+00', - '4 1', ''] - assert outstr == expected diff --git a/aiida_crystal17/tests/test_parsers/test_raw/test_parse_bases.py b/aiida_crystal17/tests/test_parsers/test_raw/test_parse_bases.py new file mode 100644 index 0000000..7e54a9b --- /dev/null +++ b/aiida_crystal17/tests/test_parsers/test_raw/test_parse_bases.py @@ -0,0 +1,42 @@ +import os +import pytest + +from aiida_crystal17.parsers.raw import parse_bases +from aiida_crystal17.tests import TEST_FILES +from aiida_crystal17.common import recursive_round + + +@pytest.mark.parametrize('filename', ('3-21g', '3-21g-star', 'tzvp', 'free', 'barthe', 'free_ecp', 'haywlc')) +def test_single_bases(filename, data_regression): + path = os.path.join(TEST_FILES, 'basis_sets', 'manual_examples', filename + '.basis') + with open(path) as handle: + content = handle.read() + output = parse_bases.parse_bsets_stdin(content, isolated=True) + output = recursive_round(output, 12) + data_regression.check(output) + + +@pytest.mark.parametrize('name,filepath', ( + ('mgo_sto3g', ('crystal', 'mgo_sto3g_scf', 'INPUT')), + ('nio_sto3g', ('crystal', 'nio_sto3g_afm_scf', 'INPUT')), +)) +def test_full_files(name, filepath, data_regression): + path = os.path.join(TEST_FILES, *filepath) + with open(path) as handle: + content = handle.read() + output = parse_bases.parse_bsets_stdin(content, isolated=False) + output = recursive_round(output, 12) + data_regression.check(output) + + +@pytest.mark.parametrize('name,filepath,atoms', ( + ('mgo_sto3g', ('crystal', 'mgo_sto3g_scf', 'INPUT'), (12, 8)), + ('nio_sto3g', ('crystal', 'nio_sto3g_afm_scf', 'INPUT'), (28, 28, 8, 8)), +)) +def test_compute_orbitals(name, filepath, atoms, data_regression): + path = os.path.join(TEST_FILES, *filepath) + with open(path) as handle: + content = handle.read() + basis_sets = parse_bases.parse_bsets_stdin(content, isolated=False) + orbitals = parse_bases.compute_orbitals(atoms, basis_sets) + data_regression.check(dict(orbitals._asdict())) diff --git a/aiida_crystal17/tests/test_parsers/test_raw/test_parse_bases/test_compute_orbitals_mgo_sto3g_filepath0_atoms0_.yml b/aiida_crystal17/tests/test_parsers/test_raw/test_parse_bases/test_compute_orbitals_mgo_sto3g_filepath0_atoms0_.yml new file mode 100644 index 0000000..4c106c0 --- /dev/null +++ b/aiida_crystal17/tests/test_parsers/test_raw/test_parse_bases/test_compute_orbitals_mgo_sto3g_filepath0_atoms0_.yml @@ -0,0 +1,90 @@ +ao_indices: + 1: + atom: 0 + element: Mg + index: 1 + type: S + 2: + atom: 0 + element: Mg + index: 1 + type: SP + 3: + atom: 0 + element: Mg + index: 1 + type: SP + 4: + atom: 0 + element: Mg + index: 1 + type: SP + 5: + atom: 0 + element: Mg + index: 1 + type: SP + 6: + atom: 0 + element: Mg + index: 2 + type: SP + 7: + atom: 0 + element: Mg + index: 2 + type: SP + 8: + atom: 0 + element: Mg + index: 2 + type: SP + 9: + atom: 0 + element: Mg + index: 2 + type: SP + 10: + atom: 1 + element: O + index: 1 + type: S + 11: + atom: 1 + element: O + index: 1 + type: SP + 12: + atom: 1 + element: O + index: 1 + type: SP + 13: + atom: 1 + element: O + index: 1 + type: SP + 14: + atom: 1 + element: O + index: 1 + type: SP +core_electrons: 12 +electrons: 20 +number_ao: 14 +orbital_types: +- - Mg + - S + - 1 +- - Mg + - SP + - 1 +- - Mg + - SP + - 2 +- - O + - S + - 1 +- - O + - SP + - 1 diff --git a/aiida_crystal17/tests/test_parsers/test_raw/test_parse_bases/test_compute_orbitals_nio_sto3g_filepath1_atoms1_.yml b/aiida_crystal17/tests/test_parsers/test_raw/test_parse_bases/test_compute_orbitals_nio_sto3g_filepath1_atoms1_.yml new file mode 100644 index 0000000..097da05 --- /dev/null +++ b/aiida_crystal17/tests/test_parsers/test_raw/test_parse_bases/test_compute_orbitals_nio_sto3g_filepath1_atoms1_.yml @@ -0,0 +1,256 @@ +ao_indices: + 1: + atom: 0 + element: Ni + index: 1 + type: S + 2: + atom: 0 + element: Ni + index: 1 + type: SP + 3: + atom: 0 + element: Ni + index: 1 + type: SP + 4: + atom: 0 + element: Ni + index: 1 + type: SP + 5: + atom: 0 + element: Ni + index: 1 + type: SP + 6: + atom: 0 + element: Ni + index: 2 + type: SP + 7: + atom: 0 + element: Ni + index: 2 + type: SP + 8: + atom: 0 + element: Ni + index: 2 + type: SP + 9: + atom: 0 + element: Ni + index: 2 + type: SP + 10: + atom: 0 + element: Ni + index: 3 + type: SP + 11: + atom: 0 + element: Ni + index: 3 + type: SP + 12: + atom: 0 + element: Ni + index: 3 + type: SP + 13: + atom: 0 + element: Ni + index: 3 + type: SP + 14: + atom: 0 + element: Ni + index: 1 + type: D + 15: + atom: 0 + element: Ni + index: 1 + type: D + 16: + atom: 0 + element: Ni + index: 1 + type: D + 17: + atom: 0 + element: Ni + index: 1 + type: D + 18: + atom: 0 + element: Ni + index: 1 + type: D + 19: + atom: 1 + element: Ni + index: 1 + type: S + 20: + atom: 1 + element: Ni + index: 1 + type: SP + 21: + atom: 1 + element: Ni + index: 1 + type: SP + 22: + atom: 1 + element: Ni + index: 1 + type: SP + 23: + atom: 1 + element: Ni + index: 1 + type: SP + 24: + atom: 1 + element: Ni + index: 2 + type: SP + 25: + atom: 1 + element: Ni + index: 2 + type: SP + 26: + atom: 1 + element: Ni + index: 2 + type: SP + 27: + atom: 1 + element: Ni + index: 2 + type: SP + 28: + atom: 1 + element: Ni + index: 3 + type: SP + 29: + atom: 1 + element: Ni + index: 3 + type: SP + 30: + atom: 1 + element: Ni + index: 3 + type: SP + 31: + atom: 1 + element: Ni + index: 3 + type: SP + 32: + atom: 1 + element: Ni + index: 1 + type: D + 33: + atom: 1 + element: Ni + index: 1 + type: D + 34: + atom: 1 + element: Ni + index: 1 + type: D + 35: + atom: 1 + element: Ni + index: 1 + type: D + 36: + atom: 1 + element: Ni + index: 1 + type: D + 37: + atom: 2 + element: O + index: 1 + type: S + 38: + atom: 2 + element: O + index: 1 + type: SP + 39: + atom: 2 + element: O + index: 1 + type: SP + 40: + atom: 2 + element: O + index: 1 + type: SP + 41: + atom: 2 + element: O + index: 1 + type: SP + 42: + atom: 3 + element: O + index: 1 + type: S + 43: + atom: 3 + element: O + index: 1 + type: SP + 44: + atom: 3 + element: O + index: 1 + type: SP + 45: + atom: 3 + element: O + index: 1 + type: SP + 46: + atom: 3 + element: O + index: 1 + type: SP +core_electrons: 40 +electrons: 72 +number_ao: 46 +orbital_types: +- - Ni + - S + - 1 +- - Ni + - SP + - 1 +- - Ni + - SP + - 2 +- - Ni + - SP + - 3 +- - Ni + - D + - 1 +- - O + - S + - 1 +- - O + - SP + - 1 diff --git a/aiida_crystal17/tests/test_parsers/test_raw/test_parse_bases/test_full_files_mgo_sto3g_filepath0_.yml b/aiida_crystal17/tests/test_parsers/test_raw/test_parse_bases/test_full_files_mgo_sto3g_filepath0_.yml new file mode 100644 index 0000000..e22311d --- /dev/null +++ b/aiida_crystal17/tests/test_parsers/test_raw/test_parse_bases/test_full_files_mgo_sto3g_filepath0_.yml @@ -0,0 +1,21 @@ +Mg: + bs: + - functions: + - STO-nG(nd) type 3-21G core shell + type: S + - functions: + - STO-nG(nd) type 3-21G core shell + type: SP + - functions: + - STO-nG(nd) type 3-21G core shell + type: SP + type: all-electron +O: + bs: + - functions: + - STO-nG(nd) type 3-21G core shell + type: S + - functions: + - STO-nG(nd) type 3-21G core shell + type: SP + type: all-electron diff --git a/aiida_crystal17/tests/test_parsers/test_raw/test_parse_bases/test_full_files_nio_sto3g_filepath1_.yml b/aiida_crystal17/tests/test_parsers/test_raw/test_parse_bases/test_full_files_nio_sto3g_filepath1_.yml new file mode 100644 index 0000000..a367e5d --- /dev/null +++ b/aiida_crystal17/tests/test_parsers/test_raw/test_parse_bases/test_full_files_nio_sto3g_filepath1_.yml @@ -0,0 +1,27 @@ +Ni: + bs: + - functions: + - STO-nG(nd) type 3-21G core shell + type: S + - functions: + - STO-nG(nd) type 3-21G core shell + type: SP + - functions: + - STO-nG(nd) type 3-21G core shell + type: SP + - functions: + - STO-nG(nd) type 3-21G core shell + type: SP + - functions: + - STO-nG(nd) type 3-21G core shell + type: D + type: all-electron +O: + bs: + - functions: + - STO-nG(nd) type 3-21G core shell + type: S + - functions: + - STO-nG(nd) type 3-21G core shell + type: SP + type: all-electron diff --git a/aiida_crystal17/tests/test_parsers/test_raw/test_parse_bases/test_single_bases_3_21g_.yml b/aiida_crystal17/tests/test_parsers/test_raw/test_parse_bases/test_single_bases_3_21g_.yml new file mode 100644 index 0000000..001cc1a --- /dev/null +++ b/aiida_crystal17/tests/test_parsers/test_raw/test_parse_bases/test_single_bases_3_21g_.yml @@ -0,0 +1,15 @@ +Si: + bs: + - functions: + - 3(6)-21G(nd) type 3-21G core shell + type: S + - functions: + - 3(6)-21G(nd) type 3-21G core shell + type: SP + - functions: + - 3(6)-21G(nd) type n-21G inner valence shell + type: SP + - functions: + - 3(6)-21G(nd) type n-21G outer valence shell + type: SP + type: all-electron diff --git a/aiida_crystal17/tests/test_parsers/test_raw/test_parse_bases/test_single_bases_3_21g_star_.yml b/aiida_crystal17/tests/test_parsers/test_raw/test_parse_bases/test_single_bases_3_21g_star_.yml new file mode 100644 index 0000000..0f8b108 --- /dev/null +++ b/aiida_crystal17/tests/test_parsers/test_raw/test_parse_bases/test_single_bases_3_21g_star_.yml @@ -0,0 +1,18 @@ +Si: + bs: + - functions: + - 3(6)-21G(nd) type 3-21G core shell + type: S + - functions: + - 3(6)-21G(nd) type 3-21G core shell + type: SP + - functions: + - 3(6)-21G(nd) type n-21G inner valence shell + type: SP + - functions: + - 3(6)-21G(nd) type n-21G outer valence shell + type: SP + - functions: + - 3(6)-21G(nd) type n-21G outer valence shell + type: D + type: all-electron diff --git a/aiida_crystal17/tests/test_parsers/test_raw/test_parse_bases/test_single_bases_barthe_.yml b/aiida_crystal17/tests/test_parsers/test_raw/test_parse_bases/test_single_bases_barthe_.yml new file mode 100644 index 0000000..729d461 --- /dev/null +++ b/aiida_crystal17/tests/test_parsers/test_raw/test_parse_bases/test_single_bases_barthe_.yml @@ -0,0 +1,32 @@ +Ni: + bs: + - functions: + - - 1.55 + - 0.24985 + - 1.0 + - - 1.24 + - -0.41636 + - 1.0 + type: SP + - functions: + - - 0.0818 + - 1.0 + - 1.0 + type: SP + - functions: + - - 43.842 + - 0.03337 + - - 12.069 + - 0.17443 + - - 3.9173 + - 0.42273 + - - 1.1997 + - 0.48809 + type: D + - functions: + - - 0.333 + - 1.0 + type: D + ecp: + - Durand-Barthelat + type: valence-electron diff --git a/aiida_crystal17/tests/test_parsers/test_raw/test_parse_bases/test_single_bases_free_.yml b/aiida_crystal17/tests/test_parsers/test_raw/test_parse_bases/test_single_bases_free_.yml new file mode 100644 index 0000000..f43d72e --- /dev/null +++ b/aiida_crystal17/tests/test_parsers/test_raw/test_parse_bases/test_single_bases_free_.yml @@ -0,0 +1,30 @@ +Si: + bs: + - functions: + - - 16115.9 + - 0.00195948 + - - 2425.58 + - 0.0149288 + - - 553.867 + - 0.0728478 + - - 156.34 + - 0.24613 + - - 50.0683 + - 0.485914 + - - 17.0178 + - 0.325002 + type: S + - functions: + - - 1.07913 + - -0.376108 + - 0.067103 + - - 0.302422 + - 1.25165 + - 0.956883 + type: SP + - functions: + - - 0.123 + - 1.0 + - 1.0 + type: SP + type: all-electron diff --git a/aiida_crystal17/tests/test_parsers/test_raw/test_parse_bases/test_single_bases_free_ecp_.yml b/aiida_crystal17/tests/test_parsers/test_raw/test_parse_bases/test_single_bases_free_ecp_.yml new file mode 100644 index 0000000..6882da3 --- /dev/null +++ b/aiida_crystal17/tests/test_parsers/test_raw/test_parse_bases/test_single_bases_free_ecp_.yml @@ -0,0 +1,85 @@ +Ni: + bs: + - functions: + - - 1.257 + - 1.0 + - 1.0 + type: SP + - functions: + - - 1.052 + - 1.0 + - 1.0 + type: SP + - functions: + - - 0.079 + - 1.0 + - 1.0 + type: SP + - functions: + - - 43.58 + - 0.03204 + - - 11.997 + - 0.17577 + - - 3.8938 + - 0.41461 + - - 1.271 + - 0.46122 + type: D + - functions: + - - 0.385 + - 1.0 + type: D + ecp: + - - W0 + - - 344.841 + - -18.0 + - -1.0 + - - 64.82281 + - -117.95937 + - 0.0 + - - 14.28477 + - -29.4397 + - 0.0 + - - 3.82101 + - -10.38626 + - 0.0 + - - 1.16976 + - -0.89249 + - 0.0 + - - P0 + - - 18.64238 + - 3.0 + - -2.0 + - - 4.89161 + - 19.2449 + - -1.0 + - - 1.16606 + - 23.9306 + - 0.0 + - - 0.95239 + - -9.35414 + - 0.0 + - - P1 + - - 30.6007 + - 5.0 + - -2.0 + - - 14.30081 + - 19.81155 + - -1.0 + - - 15.03304 + - 54.33856 + - 0.0 + - - 4.64601 + - 54.08782 + - 0.0 + - - 0.98106 + - 7.31027 + - 0.0 + - - P2 + - - 4.56008 + - 0.26292 + - 0.0 + - - 0.67647 + - -0.43862 + - 0.0 + type: valence-electron diff --git a/aiida_crystal17/tests/test_parsers/test_raw/test_parse_bases/test_single_bases_haywlc_.yml b/aiida_crystal17/tests/test_parsers/test_raw/test_parse_bases/test_single_bases_haywlc_.yml new file mode 100644 index 0000000..3057e7a --- /dev/null +++ b/aiida_crystal17/tests/test_parsers/test_raw/test_parse_bases/test_single_bases_haywlc_.yml @@ -0,0 +1,32 @@ +Ni: + bs: + - functions: + - - 1.257 + - 0.113 + - 0.02676 + - - 1.052 + - -0.1742 + - -0.01961 + type: SP + - functions: + - - 0.079 + - 1.0 + - 1.0 + type: SP + - functions: + - - 43.58 + - 0.03204 + - - 11.997 + - 0.17577 + - - 3.8938 + - 0.41461 + - - 1.271 + - 0.46122 + type: D + - functions: + - - 0.385 + - 1.0 + type: D + ecp: + - Hay-Wadt large core + type: valence-electron diff --git a/aiida_crystal17/tests/test_parsers/test_raw/test_parse_bases/test_single_bases_tzvp_.yml b/aiida_crystal17/tests/test_parsers/test_raw/test_parse_bases/test_single_bases_tzvp_.yml new file mode 100644 index 0000000..1691ed0 --- /dev/null +++ b/aiida_crystal17/tests/test_parsers/test_raw/test_parse_bases/test_single_bases_tzvp_.yml @@ -0,0 +1,101 @@ +Ni: + bs: + - functions: + - - 351535.72935 + - 0.000225293869 + - - 52695.809283 + - 0.001746861622 + - - 11992.468293 + - 0.009084999214 + - - 3394.5776689 + - 0.036940748447 + - - 1105.3594585 + - 0.1203281995 + - - 397.14677769 + - 0.28596715057 + - - 154.27542974 + - 0.40983020196 + - - 61.01872378 + - 0.21620642851 + type: S + - functions: + - - 384.45559739 + - -0.024651279268 + - - 119.04879199 + - -0.11658505277 + - - 19.137012223 + - 0.54864126676 + - - 8.1526718562 + - 0.52640051122 + type: S + - functions: + - - 12.579408642 + - -0.22797884293 + - - 2.0870866081 + - 0.70703738215 + type: S + - functions: + - - 2.94802967 + - 1.0 + type: S + - functions: + - - 0.87308901 + - 1.0 + type: S + - functions: + - - 0.16787354 + - 1.0 + type: S + - functions: + - - 1883.0907486 + - 0.002374825844 + - - 445.9515532 + - 0.019289457172 + - - 143.08430815 + - 0.090718211507 + - - 53.372920722 + - 0.26181414117 + - - 21.321919357 + - 0.42309149832 + - - 8.6643561994 + - 0.24641686015 + type: P + - functions: + - - 34.144255211 + - -0.029677129163 + - - 4.7122455921 + - 0.55616824096 + - - 1.8709231845 + - 0.9635776646 + type: P + - functions: + - - 1.04536508 + - 1.0 + type: P + - functions: + - - 0.42764684 + - 1.0 + type: P + - functions: + - - 74.591603465 + - 0.012077454672 + - - 21.590632752 + - 0.074637262154 + - - 7.624614258 + - 0.23236775502 + - - 2.8632206762 + - 0.3904265168 + type: D + - functions: + - - 1.0075942 + - 1.0 + type: D + - functions: + - - 0.31672099 + - 1.0 + type: D + - functions: + - - 2.174 + - 1.0 + type: F + type: all-electron diff --git a/aiida_crystal17/tests/test_parsers/test_raw/test_parse_fort34.py b/aiida_crystal17/tests/test_parsers/test_raw/test_parse_fort34.py new file mode 100644 index 0000000..1552d81 --- /dev/null +++ b/aiida_crystal17/tests/test_parsers/test_raw/test_parse_fort34.py @@ -0,0 +1,116 @@ +import os +import numpy as np +import pytest + +from aiida_crystal17.tests import TEST_FILES +from aiida_crystal17.parsers.raw.parse_fort34 import (parse_fort34, gui_file_write, get_centering_code, + get_crystal_type_code, structure_to_symmetry) + + +@pytest.mark.parametrize( + 'gui_filename,num_symops,space_group', + (('cubic-rocksalt.crystal.gui', 48, 225), ('cubic-zincblende.crystal.gui', 24, 216), + ('greigite.crystal.gui', 48, 227), ('mackinawite.crystal.gui', 16, 129), ('marcasite.crystal.gui', 8, 58), + ('pyrite.crystal.gui', 24, 205), ('pyrrhotite-4c-monoclinic.crystal.gui', 4, 15), + ('troilite-hex-p63mc.crystal.gui', 12, 186), ('troilite-hex-p63mmc.crystal.gui', 24, 194), + ('troilite-hexagonal.crystal.gui', 12, 190), ('troilite-mnp.crystal.gui', 8, 62))) +def test_gui_file_read(gui_filename, num_symops, space_group): + path = os.path.join(TEST_FILES, 'gui', 'out', gui_filename) + with open(path) as handle: + lines = handle.read().splitlines() + structdata, symmdata = parse_fort34(lines) + assert len(symmdata['operations']) == num_symops + assert symmdata['space_group'] == space_group + + +@pytest.mark.parametrize( + 'hall_number,centering_code,crystal_code', + [ + (90, 4, 2), # pyrrhotite-4c 15, 'C2/c' + (501, 1, 6), # pyrite 205, 'Pa3' + (275, 1, 3), # marcasite 58, 'Pnnm' + (484, 1, 5), # troilite 190 'P-62c' + (409, 1, 4), # mackinawite 129, 'P4/nmm' + (526, 5, 6) # greigite 227, 'Fd3m' + ]) +def test_symmetry_codes(hall_number, centering_code, crystal_code): + assert get_crystal_type_code(hall_number=hall_number) == crystal_code + assert get_centering_code(hall_number) == centering_code + + +@pytest.mark.parametrize( + 'gui_filename,num_symops,space_group', + (('cubic-rocksalt.crystal.gui', 48, 225), ('cubic-zincblende.crystal.gui', 24, 216), + ('greigite.crystal.gui', 48, 227), ('mackinawite.crystal.gui', 16, 129), ('marcasite.crystal.gui', 8, 58), + ('pyrite.crystal.gui', 24, 205), ('pyrrhotite-4c-monoclinic.crystal.gui', 4, 15), + ('troilite-hex-p63mc.crystal.gui', 12, 186), ('troilite-hex-p63mmc.crystal.gui', 24, 194), + ('troilite-hexagonal.crystal.gui', 12, 190), ('troilite-mnp.crystal.gui', 8, 62))) +def test_structure_to_symmetry(db_test_app, gui_filename, num_symops, space_group): + """ we test that we can go round trip, + reading a gui file and comparing the parsed symmetry to the computed one + """ + path = os.path.join(TEST_FILES, 'gui', 'out', gui_filename) + with open(path) as handle: + lines = handle.read().splitlines() + structdata, symmdata = parse_fort34(lines) + + symmdata2 = structure_to_symmetry(structdata) + assert len(symmdata['operations']) == len(symmdata2['operations']) + assert symmdata['space_group'] == symmdata2['space_group'] + assert symmdata['crystal_type_code'] == symmdata2['crystal_type_code'] + assert symmdata['centring_code'] == symmdata2['centring_code'] + + +@pytest.mark.parametrize( + 'gui_filename,num_symops,space_group', + (('cubic-rocksalt.crystal.gui', 48, 225), ('cubic-zincblende.crystal.gui', 24, 216), + ('greigite.crystal.gui', 48, 227), ('mackinawite.crystal.gui', 16, 129), ('marcasite.crystal.gui', 8, 58), + ('pyrite.crystal.gui', 24, 205), ('pyrrhotite-4c-monoclinic.crystal.gui', 4, 15), + ('troilite-hex-p63mc.crystal.gui', 12, 186), ('troilite-hex-p63mmc.crystal.gui', 24, 194), + ('troilite-hexagonal.crystal.gui', 12, 190), ('troilite-mnp.crystal.gui', 8, 62))) +def test_structure_to_symmetry_operations(db_test_app, gui_filename, num_symops, space_group): + """ we test that we can go round trip, + reading a gui file and comparing the parsed symmetry to the computed one + """ + path = os.path.join(TEST_FILES, 'gui', 'out', gui_filename) + with open(path) as handle: + lines = handle.read().splitlines() + structdata, symmdata = parse_fort34(lines) + + symmdata2 = structure_to_symmetry(structdata, as_cartesian=True) + assert len(symmdata['operations']) == len(symmdata2['operations']) + ops1 = np.sort(symmdata['operations'], axis=0) + ops2 = np.sort(symmdata2['operations'], axis=0) + assert np.allclose(ops1, ops2) + + +def test_gui_file_write(): + structure_data = { + 'lattice': [[1, 0, 0], [0, 1, 0], [0, 0, 1]], + 'ccoords': [[0, 0, 0]], + 'atomic_numbers': [1], + 'pbc': [True, True, True] + } + symmetry_data = { + 'operations': [[1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0]], + 'basis': 'cartesian', + 'space_group': 4, + 'crystal_type_code': 5, + 'centring_code': 6 + } + + outstr = gui_file_write(structure_data, symmetry_data) + + print(outstr) + + expected = [ + '3 6 5', ' 1.000000000E+00 0.000000000E+00 0.000000000E+00', + ' 0.000000000E+00 1.000000000E+00 0.000000000E+00', + ' 0.000000000E+00 0.000000000E+00 1.000000000E+00', '1', + ' 1.000000000E+00 0.000000000E+00 0.000000000E+00', + ' 0.000000000E+00 1.000000000E+00 0.000000000E+00', + ' 0.000000000E+00 0.000000000E+00 1.000000000E+00', + ' 0.000000000E+00 0.000000000E+00 0.000000000E+00', '1', + ' 1 0.000000000E+00 0.000000000E+00 0.000000000E+00', '4 1', '' + ] + assert outstr == expected diff --git a/aiida_crystal17/tests/test_workflows/test_symmetrise_3d_struct.py b/aiida_crystal17/tests/test_workflows/test_symmetrise_3d_struct.py index cc2badd..50c2bd7 100644 --- a/aiida_crystal17/tests/test_workflows/test_symmetrise_3d_struct.py +++ b/aiida_crystal17/tests/test_workflows/test_symmetrise_3d_struct.py @@ -9,23 +9,20 @@ def test_no_settings(db_test_app): """test no settings dict """ with pytest.raises(ValueError): - wflow_cls = WorkflowFactory("crystal17.sym3d") + wflow_cls = WorkflowFactory('crystal17.sym3d') results, node = run_get_node(wflow_cls) def test_bad_settings(db_test_app): """test bad settings dict """ with pytest.raises(ValidationError): - results, node = run_get_node( - WorkflowFactory("crystal17.sym3d"), - settings=DataFactory("dict")(dict={"a": 1})) + results, node = run_get_node(WorkflowFactory('crystal17.sym3d'), settings=DataFactory('dict')(dict={'a': 1})) def test_no_structure(db_test_app): """test no StructureData or CifData """ - wflow_cls = WorkflowFactory("crystal17.sym3d") - results, node = run_get_node( - wflow_cls, settings=DataFactory("dict")(dict={"symprec": 0.01})) + wflow_cls = WorkflowFactory('crystal17.sym3d') + results, node = run_get_node(wflow_cls, settings=DataFactory('dict')(dict={'symprec': 0.01})) assert node.is_failed, node.exit_status assert node.exit_status == wflow_cls.exit_codes.ERROR_INVALID_INPUT_RESOURCES.status @@ -33,79 +30,88 @@ def test_no_structure(db_test_app): def test_with_structure(db_test_app, get_structure, data_regression): """test computing symmetry from StructureData """ results, node = run_get_node( - WorkflowFactory("crystal17.sym3d"), - settings=DataFactory("dict")(dict={"symprec": 0.01}), - structure=get_structure("pyrite")) + WorkflowFactory('crystal17.sym3d'), + settings=DataFactory('dict')(dict={ + 'symprec': 0.01 + }), + structure=get_structure('pyrite')) assert node.is_finished_ok, node.exit_status - assert "symmetry" in results - assert "structure" not in results - data_regression.check(results["symmetry"].attributes) + assert 'symmetry' in results + assert 'structure' not in results + attributes = results['symmetry'].attributes + attributes['computation'].pop('symmetry_version') + attributes['computation'].pop('computation_version') + data_regression.check(attributes) def test_with_cif(db_test_app, get_cif, data_regression): """test computing symmetry from CifData """ results, node = run_get_node( - WorkflowFactory("crystal17.sym3d"), - settings=DataFactory("dict")(dict={"symprec": 0.01}), - cif=get_cif("pyrite")) + WorkflowFactory('crystal17.sym3d'), settings=DataFactory('dict')(dict={ + 'symprec': 0.01 + }), cif=get_cif('pyrite')) assert node.is_finished_ok, node.exit_status - assert "symmetry" in results - assert "structure" in results - data_regression.check(results["symmetry"].attributes) - - -@pytest.mark.parametrize('compute_primitive,standardize_cell', [ - (True, False), - (False, True), - (True, True) -]) -def test_symmetrise_structure(db_test_app, get_structure, - compute_primitive, standardize_cell, data_regression): + assert 'symmetry' in results + assert 'structure' in results + attributes = results['symmetry'].attributes + attributes['computation'].pop('symmetry_version') + attributes['computation'].pop('computation_version') + data_regression.check(attributes) + + +@pytest.mark.parametrize('compute_primitive,standardize_cell', [(True, False), (False, True), (True, True)]) +def test_symmetrise_structure(db_test_app, get_structure, compute_primitive, standardize_cell, data_regression): """symmetrising structure with different options """ results, node = run_get_node( - WorkflowFactory("crystal17.sym3d"), - settings=DataFactory("dict")(dict={ - "symprec": 0.01, - "compute_primitive": compute_primitive, - "standardize_cell": standardize_cell + WorkflowFactory('crystal17.sym3d'), + settings=DataFactory('dict')(dict={ + 'symprec': 0.01, + 'compute_primitive': compute_primitive, + 'standardize_cell': standardize_cell }), - structure=get_structure("zincblende")) + structure=get_structure('zincblende')) # data_regression.check(node.attributes) assert node.is_finished_ok, node.exit_status - assert "symmetry" in results - assert "structure" in results - data_regression.check(results["symmetry"].attributes) + assert 'symmetry' in results + assert 'structure' in results + attributes = results['symmetry'].attributes + attributes['computation'].pop('symmetry_version') + attributes['computation'].pop('computation_version') + data_regression.check(attributes) -@pytest.mark.parametrize('compute_primitive', [ - True, - False -]) +@pytest.mark.parametrize('compute_primitive', [True, False]) def test_new_kind_names(db_test_app, get_structure, compute_primitive, data_regression): """test add kind names to StructureData """ results, node = run_get_node( - WorkflowFactory("crystal17.sym3d"), - settings=DataFactory("dict")(dict={ - "symprec": 0.01, - "kind_names": ["Fe1", "Fe1", "Fe2", "Fe2", "S", "S", "S", "S"], - "compute_primitive": compute_primitive}), - structure=get_structure("zincblende")) + WorkflowFactory('crystal17.sym3d'), + settings=DataFactory('dict')(dict={ + 'symprec': 0.01, + 'kind_names': ['Fe1', 'Fe1', 'Fe2', 'Fe2', 'S', 'S', 'S', 'S'], + 'compute_primitive': compute_primitive + }), + structure=get_structure('zincblende')) assert node.is_finished_ok, node.exit_status - assert "symmetry" in results - assert "structure" in results - data_regression.check(results["symmetry"].attributes) + assert 'symmetry' in results + assert 'structure' in results + attributes = results['symmetry'].attributes + attributes['computation'].pop('symmetry_version') + attributes['computation'].pop('computation_version') + data_regression.check(attributes) def test_new_kind_names_fail(db_test_app, get_structure): """test add kind names to StructureData """ - wflow_cls = WorkflowFactory("crystal17.sym3d") + wflow_cls = WorkflowFactory('crystal17.sym3d') results, node = run_get_node( - wflow_cls, settings=DataFactory("dict")(dict={ - "symprec": 0.01, - "kind_names": ["A"]}), - structure=get_structure("zincblende")) + wflow_cls, + settings=DataFactory('dict')(dict={ + 'symprec': 0.01, + 'kind_names': ['A'] + }), + structure=get_structure('zincblende')) assert node.is_failed, node.exit_status assert node.exit_status == wflow_cls.exit_codes.ERROR_RESET_KIND_NAMES.status diff --git a/aiida_crystal17/tests/test_workflows/test_symmetrise_3d_struct/test_new_kind_names_False_.yml b/aiida_crystal17/tests/test_workflows/test_symmetrise_3d_struct/test_new_kind_names_False_.yml index 2227c69..a176643 100644 --- a/aiida_crystal17/tests/test_workflows/test_symmetrise_3d_struct/test_new_kind_names_False_.yml +++ b/aiida_crystal17/tests/test_workflows/test_symmetrise_3d_struct/test_new_kind_names_False_.yml @@ -2,9 +2,7 @@ basis: fractional computation: angle_tolerance: null computation_class: aiida_crystal17.symmetry.symmetry - computation_version: 0.9.1b5 symmetry_program: spglib - symmetry_version: 1.13.0 symprec: 0.01 equivalent_sites: - 0 diff --git a/aiida_crystal17/tests/test_workflows/test_symmetrise_3d_struct/test_new_kind_names_True_.yml b/aiida_crystal17/tests/test_workflows/test_symmetrise_3d_struct/test_new_kind_names_True_.yml index dc89f74..584ab04 100644 --- a/aiida_crystal17/tests/test_workflows/test_symmetrise_3d_struct/test_new_kind_names_True_.yml +++ b/aiida_crystal17/tests/test_workflows/test_symmetrise_3d_struct/test_new_kind_names_True_.yml @@ -2,9 +2,7 @@ basis: fractional computation: angle_tolerance: null computation_class: aiida_crystal17.symmetry.symmetry - computation_version: 0.9.1b5 symmetry_program: spglib - symmetry_version: 1.13.0 symprec: 0.01 equivalent_sites: - 0 diff --git a/aiida_crystal17/tests/test_workflows/test_symmetrise_3d_struct/test_symmetrise_structure_False_True_.yml b/aiida_crystal17/tests/test_workflows/test_symmetrise_3d_struct/test_symmetrise_structure_False_True_.yml index 0303c15..dd7428a 100644 --- a/aiida_crystal17/tests/test_workflows/test_symmetrise_3d_struct/test_symmetrise_structure_False_True_.yml +++ b/aiida_crystal17/tests/test_workflows/test_symmetrise_3d_struct/test_symmetrise_structure_False_True_.yml @@ -2,9 +2,7 @@ basis: fractional computation: angle_tolerance: null computation_class: aiida_crystal17.symmetry.symmetry - computation_version: 0.9.1b5 symmetry_program: spglib - symmetry_version: 1.13.0 symprec: 0.01 equivalent_sites: - 0 diff --git a/aiida_crystal17/tests/test_workflows/test_symmetrise_3d_struct/test_symmetrise_structure_True_False_.yml b/aiida_crystal17/tests/test_workflows/test_symmetrise_3d_struct/test_symmetrise_structure_True_False_.yml index a5e668d..145d7fb 100644 --- a/aiida_crystal17/tests/test_workflows/test_symmetrise_3d_struct/test_symmetrise_structure_True_False_.yml +++ b/aiida_crystal17/tests/test_workflows/test_symmetrise_3d_struct/test_symmetrise_structure_True_False_.yml @@ -2,9 +2,7 @@ basis: fractional computation: angle_tolerance: null computation_class: aiida_crystal17.symmetry.symmetry - computation_version: 0.9.1b5 symmetry_program: spglib - symmetry_version: 1.13.0 symprec: 0.01 equivalent_sites: - 0 diff --git a/aiida_crystal17/tests/test_workflows/test_symmetrise_3d_struct/test_symmetrise_structure_True_True_.yml b/aiida_crystal17/tests/test_workflows/test_symmetrise_3d_struct/test_symmetrise_structure_True_True_.yml index a5e668d..145d7fb 100644 --- a/aiida_crystal17/tests/test_workflows/test_symmetrise_3d_struct/test_symmetrise_structure_True_True_.yml +++ b/aiida_crystal17/tests/test_workflows/test_symmetrise_3d_struct/test_symmetrise_structure_True_True_.yml @@ -2,9 +2,7 @@ basis: fractional computation: angle_tolerance: null computation_class: aiida_crystal17.symmetry.symmetry - computation_version: 0.9.1b5 symmetry_program: spglib - symmetry_version: 1.13.0 symprec: 0.01 equivalent_sites: - 0 diff --git a/aiida_crystal17/tests/test_workflows/test_symmetrise_3d_struct/test_with_cif.yml b/aiida_crystal17/tests/test_workflows/test_symmetrise_3d_struct/test_with_cif.yml index 0ca7480..e66951e 100644 --- a/aiida_crystal17/tests/test_workflows/test_symmetrise_3d_struct/test_with_cif.yml +++ b/aiida_crystal17/tests/test_workflows/test_symmetrise_3d_struct/test_with_cif.yml @@ -2,9 +2,7 @@ basis: fractional computation: angle_tolerance: null computation_class: aiida_crystal17.symmetry.symmetry - computation_version: 0.9.1b5 symmetry_program: spglib - symmetry_version: 1.13.0 symprec: 0.01 equivalent_sites: - 0 diff --git a/aiida_crystal17/tests/test_workflows/test_symmetrise_3d_struct/test_with_structure.yml b/aiida_crystal17/tests/test_workflows/test_symmetrise_3d_struct/test_with_structure.yml index 0ca7480..e66951e 100644 --- a/aiida_crystal17/tests/test_workflows/test_symmetrise_3d_struct/test_with_structure.yml +++ b/aiida_crystal17/tests/test_workflows/test_symmetrise_3d_struct/test_with_structure.yml @@ -2,9 +2,7 @@ basis: fractional computation: angle_tolerance: null computation_class: aiida_crystal17.symmetry.symmetry - computation_version: 0.9.1b5 symmetry_program: spglib - symmetry_version: 1.13.0 symprec: 0.01 equivalent_sites: - 0 diff --git a/aiida_crystal17/validation/__init__.py b/aiida_crystal17/validation/__init__.py index ec46ace..c6a87e6 100644 --- a/aiida_crystal17/validation/__init__.py +++ b/aiida_crystal17/validation/__init__.py @@ -1,3 +1,18 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2019 Chris Sewell +# +# This file is part of aiida-crystal17. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms and conditions +# of version 3 of the GNU Lesser General Public License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. import json import os @@ -56,9 +71,8 @@ def load_validator(schema): def is_array(checker, instance): return isinstance(instance, (tuple, list)) - type_checker = validator_cls.TYPE_CHECKER.redefine("array", is_array) - validator_cls = jsonschema.validators.extend( - validator_cls, type_checker=type_checker) + type_checker = validator_cls.TYPE_CHECKER.redefine('array', is_array) + validator_cls = jsonschema.validators.extend(validator_cls, type_checker=type_checker) validator = validator_cls(schema=schema) return validator @@ -91,10 +105,8 @@ def validate_against_schema(data, schema): # validator.validate(data) errors = sorted(validator.iter_errors(data), key=lambda e: e.path) if errors: - raise jsonschema.ValidationError( - "\n".join(["- {} [key path: '{}']".format( - error.message, "/".join([str(p) for p in error.path])) - for error in errors]) - ) + raise jsonschema.ValidationError('\n'.join([ + "- {} [key path: '{}']".format(error.message, '/'.join([str(p) for p in error.path])) for error in errors + ])) return True diff --git a/aiida_crystal17/validation/doss_input.schema.json b/aiida_crystal17/validation/doss_input.schema.json index 8c75596..c9fb654 100644 --- a/aiida_crystal17/validation/doss_input.schema.json +++ b/aiida_crystal17/validation/doss_input.schema.json @@ -51,11 +51,13 @@ "array", "null" ], + "maxItems": 15, "items": { "type": "array", "minItems": 1, "items": { - "type": "integer" + "type": "integer", + "minimum": 0 } }, "default": null @@ -66,14 +68,16 @@ "array", "null" ], + "maxItems": 15, "items": { "type": "array", "minItems": 1, "items": { - "type": "integer" + "type": "integer", + "minimum": 1 } }, "default": null } } -} \ No newline at end of file +} diff --git a/aiida_crystal17/workflows/__init__.py b/aiida_crystal17/workflows/__init__.py index e69de29..10f1044 100644 --- a/aiida_crystal17/workflows/__init__.py +++ b/aiida_crystal17/workflows/__init__.py @@ -0,0 +1,15 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2019 Chris Sewell +# +# This file is part of aiida-crystal17. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms and conditions +# of version 3 of the GNU Lesser General Public License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. diff --git a/aiida_crystal17/workflows/common/__init__.py b/aiida_crystal17/workflows/common/__init__.py index e69de29..10f1044 100644 --- a/aiida_crystal17/workflows/common/__init__.py +++ b/aiida_crystal17/workflows/common/__init__.py @@ -0,0 +1,15 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2019 Chris Sewell +# +# This file is part of aiida-crystal17. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms and conditions +# of version 3 of the GNU Lesser General Public License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. diff --git a/aiida_crystal17/workflows/common/restart.py b/aiida_crystal17/workflows/common/restart.py index c7b378e..cfb9b52 100644 --- a/aiida_crystal17/workflows/common/restart.py +++ b/aiida_crystal17/workflows/common/restart.py @@ -1,4 +1,18 @@ +#!/usr/bin/env python # -*- coding: utf-8 -*- +# +# Copyright 2019 Chris Sewell +# +# This file is part of aiida-crystal17. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms and conditions +# of version 3 of the GNU Lesser General Public License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. # pylint: disable=inconsistent-return-statements,no-member """Base implementation of `WorkChain` class that implements a simple automated restart mechanism for calculations. diff --git a/aiida_crystal17/workflows/crystal_main/base.py b/aiida_crystal17/workflows/crystal_main/base.py index 9c6489f..6fd805b 100644 --- a/aiida_crystal17/workflows/crystal_main/base.py +++ b/aiida_crystal17/workflows/crystal_main/base.py @@ -1,4 +1,18 @@ +#!/usr/bin/env python # -*- coding: utf-8 -*- +# +# Copyright 2019 Chris Sewell +# +# This file is part of aiida-crystal17. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms and conditions +# of version 3 of the GNU Lesser General Public License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. from aiida import orm from aiida.common import AttributeDict from aiida.common.exceptions import InputValidationError diff --git a/aiida_crystal17/workflows/crystal_props/__init__.py b/aiida_crystal17/workflows/crystal_props/__init__.py index e69de29..10f1044 100644 --- a/aiida_crystal17/workflows/crystal_props/__init__.py +++ b/aiida_crystal17/workflows/crystal_props/__init__.py @@ -0,0 +1,15 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2019 Chris Sewell +# +# This file is part of aiida-crystal17. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms and conditions +# of version 3 of the GNU Lesser General Public License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. diff --git a/aiida_crystal17/workflows/crystal_props/cry_doss.py b/aiida_crystal17/workflows/crystal_props/cry_doss.py index 94b7837..92bbebc 100644 --- a/aiida_crystal17/workflows/crystal_props/cry_doss.py +++ b/aiida_crystal17/workflows/crystal_props/cry_doss.py @@ -1,3 +1,18 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2019 Chris Sewell +# +# This file is part of aiida-crystal17. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms and conditions +# of version 3 of the GNU Lesser General Public License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. from aiida.common import AttributeDict, LinkType from aiida.engine import if_, ToContext, WorkChain from aiida.manage.caching import disable_caching @@ -100,12 +115,12 @@ def submit_scf_calculation(self): incoming = self.inputs.wf_folder.get_incoming( node_class=CryCalculation, link_type=LinkType.CREATE).all_nodes() if not incoming: - self.report("{} was not created by a CryMainCalculation".format(self.inputs.wf_folder)) + self.report('{} was not created by a CryMainCalculation'.format(self.inputs.wf_folder)) return self.exit_codes.ERROR_NO_INCOMING_CALC previous_calc = incoming[0] if not previous_calc.is_finished_ok: - self.report("{} did not finish ok: {}".format(previous_calc, previous_calc.exit_status)) + self.report('{} did not finish ok: {}'.format(previous_calc, previous_calc.exit_status)) return self.exit_codes.ERROR_FAILED_INCOMING_CALC # create a restart calculation @@ -114,9 +129,9 @@ def submit_scf_calculation(self): # we only want to run a single-point calculation, so can remove any geometry optimisation try: params = builder.parameters.get_dict() - params.get("geometry", {}).pop("optimise", None) + params.get('geometry', {}).pop('optimise', None) except AttributeError: - self.report("{} has no `parameters` intput".format(previous_calc)) + self.report('{} has no `parameters` intput'.format(previous_calc)) return self.exit_codes.ERROR_INCOMPLETE_INCOMING_CALC self.ctx.calc_params = params @@ -124,40 +139,40 @@ def submit_scf_calculation(self): try: self.ctx.calc_options = builder.metadata.options except AttributeError: - self.report("{} has no `metadata.options` set".format(previous_calc)) + self.report('{} has no `metadata.options` set'.format(previous_calc)) return self.exit_codes.ERROR_INCOMPLETE_INCOMING_CALC # if new metadata options have been supplied then use them - if "meta_options" in self.inputs.cry: - self.report("replacing metadata of calculation") - self.ctx.calc_options.update(self.inputs.cry["meta_options"]) + if 'meta_options' in self.inputs.cry: + self.report('replacing metadata of calculation') + self.ctx.calc_options.update(self.inputs.cry['meta_options']) # use the final structure (output if the previous calculation was an optimization) - if "structure" in previous_calc.outputs: - self.report("using optimised structure") + if 'structure' in previous_calc.outputs: + self.report('using optimised structure') builder.structure = previous_calc.outputs.structure # we want to use the final structure, so the input wavefunction will not apply - if "wf_folder" in builder: - builder.pop("wf_folder") + if 'wf_folder' in builder: + builder.pop('wf_folder') # TODO add a `remove_restarts` function to CryMainCalculation, # to remove e.g. GUESSP, HESSOPT, RESTART keywords - self.ctx.calc_params.setdefault("scf", {}).pop("GUESSP", None) + self.ctx.calc_params.setdefault('scf', {}).pop('GUESSP', None) builder.parameters = CryInputParamsData(data=params) builder.metadata.options = self.ctx.calc_options if 'test_run' in self.inputs and self.inputs.test_run.value: - self.report("`test_run` specified, stopping before submitting scf calculation") + self.report('`test_run` specified, stopping before submitting scf calculation') return self.exit_codes.END_OF_TEST_RUN # TODO could submit CryMainBaseWorkChain - builder.metadata.call_link_label = "scf_calc" + builder.metadata.call_link_label = 'scf_calc' try: with disable_caching(): calculation = self.submit(builder) except Exception as err: - self.report("{} submission failed: {}".format(previous_calc, err)) + self.report('{} submission failed: {}'.format(previous_calc, err)) return self.exit_codes.ERROR_INCOMPLETE_INCOMING_CALC self.report('launching SCF calculation {}'.format(calculation)) @@ -167,19 +182,19 @@ def submit_scf_calculation(self): def check_scf_calculation(self): if not self.ctx.calc_scf.is_finished_ok: - self.report("{} failed with exit code: {}".format(self.ctx.calc_scf, self.ctx.calc_scf.exit_status)) + self.report('{} failed with exit code: {}'.format(self.ctx.calc_scf, self.ctx.calc_scf.exit_status)) return self.exit_codes.ERROR_SCF_CALC_FAILED - self.report("{} finished successfully".format(self.ctx.calc_scf)) + self.report('{} finished successfully'.format(self.ctx.calc_scf)) self.ctx.wf_folder = self.ctx.calc_scf.outputs.remote_folder def submit_doss_calculation(self): if 'test_run' in self.inputs and self.inputs.test_run.value: - self.report("`test_run` specified, stopping before submitting doss calculation") + self.report('`test_run` specified, stopping before submitting doss calculation') return self.exit_codes.END_OF_TEST_RUN inputs = AttributeDict(self.exposed_inputs(CryDossCalculation, self._doss_namespace)) inputs.wf_folder = self.ctx.wf_folder - inputs['metadata']['call_link_label'] = "doss_calc" + inputs['metadata']['call_link_label'] = 'doss_calc' calculation = self.submit(CryDossCalculation, **inputs) self.report('launching DOSS calculation {}'.format(calculation)) return ToContext(calc_doss=calculation) @@ -187,11 +202,11 @@ def submit_doss_calculation(self): def check_doss_calculation(self): if not self.ctx.calc_doss.is_finished_ok: - self.report("{} failed with exit code: {}".format( + self.report('{} failed with exit code: {}'.format( self.ctx.calc_doss, self.ctx.calc_doss.exit_status)) return self.exit_codes.ERROR_DOSS_CALC_FAILED - self.report("{} finished successfully".format(self.ctx.calc_doss)) + self.report('{} finished successfully'.format(self.ctx.calc_doss)) namespace_separator = self.spec().namespace_separator for link_triple in self.ctx.calc_doss.get_outgoing(link_type=LinkType.CREATE).link_triples: @@ -201,7 +216,7 @@ def on_terminated(self): """Clean the working directories of all child calculations if `clean_workdir=True` in the inputs.""" super(CryPropertiesWorkChain, self).on_terminated() - if "clean_workdir" not in self.inputs or self.inputs.clean_workdir.value is False: + if 'clean_workdir' not in self.inputs or self.inputs.clean_workdir.value is False: self.report('remote folders will not be cleaned') return diff --git a/aiida_crystal17/workflows/symmetrise_3d_struct.py b/aiida_crystal17/workflows/symmetrise_3d_struct.py index 83476ca..8835076 100644 --- a/aiida_crystal17/workflows/symmetrise_3d_struct.py +++ b/aiida_crystal17/workflows/symmetrise_3d_struct.py @@ -1,10 +1,23 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2019 Chris Sewell +# +# This file is part of aiida-crystal17. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms and conditions +# of version 3 of the GNU Lesser General Public License. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. """a work flow to symmetrise a structure and compute the symmetry operations""" import traceback from aiida.plugins import DataFactory from aiida.engine import WorkChain, calcfunction -from aiida_crystal17.symmetry import ( - reset_kind_names, - standardize_cell, find_primitive, compute_symmetry_dict) +from aiida_crystal17.symmetry import (reset_kind_names, standardize_cell, find_primitive, compute_symmetry_dict) from aiida_crystal17.validation import validate_against_schema from aiida.orm.nodes.data.base import to_aiida_type @@ -15,59 +28,49 @@ @calcfunction def standard_structure(structure, settings): - atol = settings.get_attribute("angle_tolerance", None) - return standardize_cell( - structure, settings["symprec"], atol, - to_primitive=False, no_idealize=True) + atol = settings.get_attribute('angle_tolerance', None) + return standardize_cell(structure, settings['symprec'], atol, to_primitive=False, no_idealize=True) @calcfunction def standard_primitive_structure(structure, settings): - atol = settings.get_attribute("angle_tolerance", None) - return standardize_cell( - structure, settings["symprec"], atol, - to_primitive=True, no_idealize=True) + atol = settings.get_attribute('angle_tolerance', None) + return standardize_cell(structure, settings['symprec'], atol, to_primitive=True, no_idealize=True) @calcfunction def standard_primitive_ideal_structure(structure, settings): - atol = settings.get_attribute("angle_tolerance", None) - return standardize_cell( - structure, settings["symprec"], atol, - to_primitive=True, no_idealize=False) + atol = settings.get_attribute('angle_tolerance', None) + return standardize_cell(structure, settings['symprec'], atol, to_primitive=True, no_idealize=False) @calcfunction def standard_ideal_structure(structure, settings): - atol = settings.get_attribute("angle_tolerance", None) - return standardize_cell( - structure, settings["symprec"], atol, - to_primitive=False, no_idealize=False) + atol = settings.get_attribute('angle_tolerance', None) + return standardize_cell(structure, settings['symprec'], atol, to_primitive=False, no_idealize=False) @calcfunction def primitive_structure(structure, settings): - atol = settings.get_attribute("angle_tolerance", None) - return find_primitive( - structure, settings["symprec"], atol) + atol = settings.get_attribute('angle_tolerance', None) + return find_primitive(structure, settings['symprec'], atol) @calcfunction def change_kind_names(structure, settings): - return reset_kind_names(structure, settings["kind_names"]) + return reset_kind_names(structure, settings['kind_names']) @calcfunction def compute_symmetry(structure, settings): - atol = settings.get_attribute("angle_tolerance", None) - data = compute_symmetry_dict( - structure, settings["symprec"], atol) + atol = settings.get_attribute('angle_tolerance', None) + data = compute_symmetry_dict(structure, settings['symprec'], atol) return SymmetryData(data=data) @calcfunction def cif_to_structure(cif): - return cif.get_structure(converter="ase") + return cif.get_structure(converter='ase') class Symmetrise3DStructure(WorkChain): @@ -79,80 +82,84 @@ class Symmetrise3DStructure(WorkChain): @classmethod def define(cls, spec): super(Symmetrise3DStructure, cls).define(spec) - spec.input("structure", valid_type=StructureData, required=False) - spec.input("cif", valid_type=DataFactory("cif"), required=False) - spec.input("settings", valid_type=DataFactory("dict"), - serializer=to_aiida_type, - required=True, validator=cls.validate_settings) - - spec.outline( - cls.validate_inputs, - cls.compute - ) - - spec.output("symmetry", valid_type=SymmetryData, required=True) - spec.output("structure", valid_type=StructureData, required=False) - - spec.exit_code(300, 'ERROR_INVALID_INPUT_RESOURCES', - message='one of either a structure or cif input must be supplied') - spec.exit_code(301, 'ERROR_NON_3D_STRUCTURE', - message='the supplied structure must be 3D (i.e. have all dimensions pbc=True)"') - spec.exit_code(302, 'ERROR_COMPUTE_OPTIONS', - message='idealize can only be used when standardize=True') - spec.exit_code(303, 'ERROR_RESET_KIND_NAMES', - message='the kind names supplied are not compatible with the structure') - spec.exit_code(304, 'ERROR_NEW_STRUCTURE', - message='error creating new structure') - spec.exit_code(305, 'ERROR_COMPUTING_SYMMETRY', - message='error computing symmetry operations') + spec.input('structure', valid_type=StructureData, required=False) + spec.input('cif', valid_type=DataFactory('cif'), required=False) + spec.input( + 'settings', + valid_type=DataFactory('dict'), + serializer=to_aiida_type, + required=True, + validator=cls.validate_settings) + + spec.outline(cls.validate_inputs, cls.compute) + + spec.output('symmetry', valid_type=SymmetryData, required=True) + spec.output('structure', valid_type=StructureData, required=False) + + spec.exit_code( + 300, 'ERROR_INVALID_INPUT_RESOURCES', message='one of either a structure or cif input must be supplied') + spec.exit_code( + 301, + 'ERROR_NON_3D_STRUCTURE', + message='the supplied structure must be 3D (i.e. have all dimensions pbc=True)"') + spec.exit_code(302, 'ERROR_COMPUTE_OPTIONS', message='idealize can only be used when standardize=True') + spec.exit_code( + 303, 'ERROR_RESET_KIND_NAMES', message='the kind names supplied are not compatible with the structure') + spec.exit_code(304, 'ERROR_NEW_STRUCTURE', message='error creating new structure') + spec.exit_code(305, 'ERROR_COMPUTING_SYMMETRY', message='error computing symmetry operations') @classmethod def get_settings_schema(cls): return { - "$schema": "http://json-schema.org/draft-07/schema", - "type": "object", - "required": [ - "symprec" - ], - "properties": { - "symprec": { - "description": ("Length tolerance for symmetry finding: " - "0.01 is fairly strict and works well for properly refined structures"), - "default": 0.01, - "type": "number", - "exclusiveMinimum": 0 + '$schema': 'http://json-schema.org/draft-07/schema', + 'type': 'object', + 'required': ['symprec'], + 'properties': { + 'symprec': { + 'description': ('Length tolerance for symmetry finding: ' + '0.01 is fairly strict and works well for properly refined structures'), + 'default': + 0.01, + 'type': + 'number', + 'exclusiveMinimum': + 0 }, - "angle_tolerance": { - "description": "Angle tolerance for symmetry finding, in the unit of angle degrees", - "default": None, - "type": ["number", "null"], - "exclusiveMinimum": 0 + 'angle_tolerance': { + 'description': 'Angle tolerance for symmetry finding, in the unit of angle degrees', + 'default': None, + 'type': ['number', 'null'], + 'exclusiveMinimum': 0 }, - "kind_names": { - "description": "a list of kind names, to assign each Site of the StructureData", - "type": "array", - "minItems": 1, - "items": { - "type": "string" + 'kind_names': { + 'description': 'a list of kind names, to assign each Site of the StructureData', + 'type': 'array', + 'minItems': 1, + 'items': { + 'type': 'string' } }, - "compute_primitive": { - "description": "whether to convert the structure to its primitive form", - "type": "boolean", - "default": False + 'compute_primitive': { + 'description': 'whether to convert the structure to its primitive form', + 'type': 'boolean', + 'default': False }, - "standardize_cell": { - "description": ( - "whether to standardize the structure, see " - "https://atztogo.github.io/spglib/definition.html#conventions-of-standardized-unit-cell"), - "type": "boolean", - "default": False + 'standardize_cell': { + 'description': + ('whether to standardize the structure, see ' + 'https://atztogo.github.io/spglib/definition.html#conventions-of-standardized-unit-cell'), + 'type': + 'boolean', + 'default': + False }, - "idealize_cell": { - "description": ("whether to remove distortions of the unit cell's atomic positions, " - "using obtained symmetry operations"), - "type": "boolean", - "default": False + 'idealize_cell': { + 'description': ("whether to remove distortions of the unit cell's atomic positions, " + 'using obtained symmetry operations'), + 'type': + 'boolean', + 'default': + False } } } @@ -180,10 +187,10 @@ def validate_inputs(self): return self.exit_codes.ERROR_NON_3D_STRUCTURE settings_dict = self.inputs.settings.get_dict() - self.ctx.kind_names = settings_dict.get("kind_names", None) - self.ctx.compute_primitive = settings_dict.get("compute_primitive", False) - self.ctx.standardize_cell = settings_dict.get("standardize_cell", False) - self.ctx.idealize_cell = settings_dict.get("idealize_cell", False) + self.ctx.kind_names = settings_dict.get('kind_names', None) + self.ctx.compute_primitive = settings_dict.get('compute_primitive', False) + self.ctx.standardize_cell = settings_dict.get('standardize_cell', False) + self.ctx.idealize_cell = settings_dict.get('idealize_cell', False) if self.ctx.idealize_cell and not self.ctx.standardize_cell: return self.exit_codes.ERROR_COMPUTE_OPTIONS @@ -197,34 +204,30 @@ def compute(self): structure = change_kind_names(structure, self.inputs.settings) except AssertionError as err: traceback.print_exc() - self.logger.error("reset_kind_names: {}".format(err)) + self.logger.error('reset_kind_names: {}'.format(err)) return self.exit_codes.ERROR_RESET_KIND_NAMES self.ctx.new_structure = True try: if self.ctx.standardize_cell: if self.ctx.compute_primitive and self.ctx.idealize_cell: - structure = standard_primitive_ideal_structure( - structure, self.inputs.settings) + structure = standard_primitive_ideal_structure(structure, self.inputs.settings) self.ctx.new_structure = True elif self.ctx.compute_primitive: - structure = standard_primitive_structure( - structure, self.inputs.settings) + structure = standard_primitive_structure(structure, self.inputs.settings) self.ctx.new_structure = True elif self.ctx.idealize_cell: - structure = standard_ideal_structure( - structure, self.inputs.settings) + structure = standard_ideal_structure(structure, self.inputs.settings) self.ctx.new_structure = True else: - structure = standard_structure( - structure, self.inputs.settings) + structure = standard_structure(structure, self.inputs.settings) self.ctx.new_structure = True elif self.ctx.compute_primitive: structure = primitive_structure(structure, self.inputs.settings) self.ctx.new_structure = True except Exception as err: traceback.print_exc() - self.logger.error("structure creation: {}".format(err)) + self.logger.error('structure creation: {}'.format(err)) return self.exit_codes.ERROR_NEW_STRUCTURE if self.ctx.new_structure: @@ -234,7 +237,7 @@ def compute(self): symmetry = compute_symmetry(structure, self.inputs.settings) except Exception as err: traceback.print_exc() - self.logger.error("symmetry computation: {}".format(err)) + self.logger.error('symmetry computation: {}'.format(err)) return self.exit_codes.ERROR_COMPUTING_SYMMETRY self.out('symmetry', symmetry) diff --git a/conda_dev_env.yml b/conda_dev_env.yml index 12afb73..1fa68ab 100644 --- a/conda_dev_env.yml +++ b/conda_dev_env.yml @@ -15,7 +15,7 @@ dependencies: - jsonschema - pycifrw==4.4 - ruamel.yaml -- six +- six >=1.12.0 - spglib>=1.10.0,<2.0.0 # testing - coverage diff --git a/docs/source/changelog.rst b/docs/source/changelog.rst new file mode 100644 index 0000000..67d2262 --- /dev/null +++ b/docs/source/changelog.rst @@ -0,0 +1,459 @@ +Changelog +========= + +v0.9.2b5 (2019-08-01) +--------------------- + +- Add documentation. +- Add doss fort.25 parsing from cmndline. +- Add ``compute_orbitals`` function. +- Implement parse_bsets_stdin to create BasisSetData. +- Add licence to all python files and pre-commit format. +- Change copyright license. +- Add basis set parser and tests. +- Rename gui_parse to parse_fort34. +- Make check for num_operations==nsymops in 'gui_file_read' optional. + + This is not set for TESTGEOM output. +- Allow for 'trigonal' and 'rhombohedral' crystal types in gulp geometry + input (these are subsets of 'hexagonal'). + + +v0.9.1b5 (2019-07-25) +--------------------- + +- Remove ``version`` key from process attribute comparisons in tests. + +- Add command line functions for parsing crystal stdout and stdin files + (#16) +- Update Documentation. + + Restructure documentation into separate indexes and add guides for: + + - ``CryDossCalculation`` + - ``BaseRestartWorkChain`` + - ``CryPropertiesWorkChain`` + + Also add ``doc8`` to ``pre-commit`` and fix ``conda_dev_env.yaml`` +- ``BaseRestartWorkChain``: allow handlers to override exit status. + + Mirrors aiidateam/aiida-quantumespresso commit 35299616a03625fe899d63051f6a7c78dc53408e + + Sometimes a calculation returns a non-zero exit code indicating it is + failed, but it can actually still be considered successful by the + workchain. In this case, the handler wants to override the exit code of + the calculation and return ``ExitCode(0)`` to indicate that the workchain + is successful despite the last calculation being failed. To facilitate + this the logic in the ``BaseRestartWorkChain`` is updated to detect zero + being returned by a handler. + + In addition, to make this work, the default value of the ``exit_code`` for + an ``ErrorHandlerReport`` tuple needs to be ``None`` to be able to + distinguish when ``ExitCode(0)`` is purposefully returned. +- Improve getting started documentation. +- Add development instuctions for creating documentation. +- Fix heading levels. +- Improve install and development instructions. +- Add pip dev install of root package to conda usage instuctions. +- Add ``aiida-core.services`` to conda development environment. +- Upgrade ipypublish dependancy to 0.10.7. +- Update pre-commit configuration and upgrade RTD's to Sphinx v2 (#14) + +- Add warning for fortran floating-point exceptions. +- Minor fixes (#13) + + - fix ``crystal17.doss`` test (when run with non-mock executable) + - reformat crystal_stdout.py (with yapf line_length=120) + - added additional tests for stdout parser (molecule and testgeom) + - add description of how computation runs for serial and parallel modes + - for ase requirements ase 3.18 is not compatible with python < 3.5. + + +v0.9.0b5 (2019-07-18) +--------------------- + +- Added documentation for ``crystal17.main.base`` workflow. +- Updated documentation and removed graph.py (now in aiida-core) +- Upgraded to aiida-core==1.0.0b5. +- Moved fort.25 parsing internally, and removed ejplugins dependancy. +- Use internal stdout parser for ``crystal17.main`` + + This builds and improves on the original ejplugins implementation: + making the parsing flow more easy to understand, + adding additional data parsing (some taken from tilde), + and restructuring the output json. +- Record the order of configuration names in the ``gulp.fitting`` results + node. +- Output a new potential, resulting from the ``gulp.fitting`` +- Hard code breaking terms in ``read_atom_section`` +- Add line breaking (with ``&``) to reaxff potential lines longer than 78 + characters. +- Add reading of lennard potential files. +- Format lennard-jones number valuesin input file. +- Add band gap calcfunction. +- Add ``CryPropertiesWorkChain`` and tests. + + Also: + + - sort output of basis sets (by element) in crystal INPUT file + - moved/re-computed doss/fermi raw test files + - added environmental variable to run test computations in non-tempdir + - doc string improvements. +- Fix reading gulp tables that have values replaced with + \*\*\*\*\*\*\*s. + + Sometimes values can be output as \*'s (presumably if they are too large) +- Improve docstring of ``CryInputParams`` +- Added functionality to run GULP calculations with 1-d structures. +- Add a settings input node to ``GulpFittingCalculation`` +- Update package version in tests. +- Version bump. +- Add extra info to fitting parser. +- Rewrote GULP execution and parsing. + + - The input file is no streamed to ``gulp`` via stdin and outputs are captured from stdout and stderr. + - Single/Opt raw parser rewrote, to be inline with fitting parser + - Exit codes updated and added + - stderr file read and added to 'warnings' key of results + - added dump file to fitting output + - made calculation have data_regression checks. +- Store names of files in potential repo (rather than using class + attributes) +- Retrieve fitting flag info from potential creation, and store + potential dict in repo (rather than as attributes) +- Added input creation for reaxff fitting. +- Added outout of fitting. +- Finalised creation of fitting input file (implemented for ``lj``) +- Add checks for index keys. +- Refactored reaxff keys and gulp write (in preparation for adding + fitting flags) +- Create gulp_fitting_flags.yaml. +- Store full potential file in PotentialData (rather than creating on + calculation submission) + + Then we don't have to rely on the external modules being there at calculation time. + Also change potential keys from ``id1.id2`` to ``id1-id2`` (since AiiDa doesn't allow attributes with '.'s) +- Standardised GULP potentials. + + All potentials should share the a common jsonschema + + Also added reaxff tests, and initial implementation of fitting calculation. +- Restructure gulp raw test files. +- Run program directly from ``crystal`` executable, and add + ``CryMainBaseWorkChain`` (#9) + + Before the calculations were running from ``runcry17``, + which is a bash script that copies the files to/from a temporary folder, + and changes the names of the files. + This functionality should all be handled by other parts of the AiiDA machinery, + so running from the base binary is more appropriate, and allows for more functionality. + + Additionally: + + - added restart functionality to ``CryMainCalculation`` (\*via\* a fort.9 in a remote folder) + - added checks and error codes for PbsPro messages to ``_scheduler_stderr.txt`` (e.g. walltime limit reached) + - allow SHRINK IS input to be a list ([IS1, IS2, IS3]) + - added output of ``TrajectoryData`` of optimisation steps for ``CryMainCalculation`` + - added ``CryMainBaseWorkChain`` (a replica of ``PwBaseWorkChain`` from ``aiida-quanumespresso``) + - improved testing infrastructure + - updated documentation. +- Fix ``KindData`` docstrings. +- Move test files to correct place. +- Combatibility test fixes. +- Added crystal.fermi calculation. +- Add crystal17.doss calculation. +- Change doss input format. +- Make num_regression test optional (on pandas ImportError) +- Added DOSS output (f25) raw parser. +- Add DOSS raw input parsers. +- Minor updates. +- Ensure cif to structure conversion provenance is stored. +- Update calc_main_immigrant.ipynb. +- Move tests to central folder. +- Rewrite immigration functions. +- Update aiida-core version to 1.0.0b4. +- Graph improvements. + + - add global_edge_style + - color excepted processes red. +- Graph improvements. + + - Change ``include_calculation_intputs`` - > ``include_process_intputs``, + and ``include_calculation_outputs`` - > ``include_process_outputs`` + - include link_pair in edge set, so that multiple (unique) links can exist between nodes + - add sublabel for Str, Bool and UpfData. + +- Allow additional keys in the dictionary (so it can be used for other + purposes) +- Improve ``crystal17.sym3d`` + + ``Symmetrise3DStructure`` now accepts a settings ``Dict`` containing the settings data. + This is validated against a jsonschema. + + - kind names can also now be reset + - add exit codes + - add addational tests + - update documentation. +- Add some helpful methods for manipulating StructureData. + + +v0.6.0b3 (2019-06-22) +--------------------- +- Improve fractional <-> cartesian conversion. + + Use efficient numpy functions. +- Use kinds from input structure, in ``gulp.optimize`` parser. +- Fix ``gulp.optimize`` parser, if the optimisation does not converge. + + - ensure the correct exit_code is returned + - ensure the output cif is still read, and the output structure node created + - add test. +- Improve crstal17.main error reporting, and add tests. + + Added lots more error codes, and the parser maps the error messages, + extracted from the CRYSTAL output file, to the most appropriate one. +- Move raw file content parsers to a submodule. + + To make it more obvious what is the aiida Parser plugin. +- Move pytest timeout to configuration file. +- Update readme conda install. +- Update conda installation command. +- Don't retrieve input file (since it is already stored in CalcJob repo) +- Fix creation of output structure from cif. +- Add gulp potential class to entry points. +- Add EmpiricalPotential node type for gulp potential input. +- Use ase for cif converter. +- Update Symmetrise3DStructure workflow and add tests. +- Move structure creation in tests to pytest fixture. +- Add an exit code for non optimised calculations. +- Fix symmetry restricted computations for GULP. + + When including symmetry restrictions in GULP input files, + only symmetry inequivalent sites (and) positions should be listed. + We parse these in the symmetry input node. +- Retrieve input file for GULP computations. +- Add method for getting the spacegroup info of a symmetry node. +- Require correct symmetry input node type (crystal17.symmetry) +- Version bump. +- Spelling error fix. +- Remove pypi deployment flag from python=2.7 tests. + + +v0.5.0b3 (2019-06-13) +--------------------- +- Add GULP calculations (#4) + + - update aiida-core to v1.0.0b3 + - added GULP calculations, tests and documentation + - add dependencies for reading CIF files + - implement calculation submission tests (using process.prepare_for_submission) + - implement new calculation immigration method + - re-number calculation exit codes + - update readthedocs build. +- Update .travis.yml. +- Update to aiida-core v1.0.0b2 (#2) + + Essentially rewrote the entire package. + + +v0.4.1 (2019-03-03) +------------------- +- Bug fix for pbc not 3. +- Added conda install info. +- Update test_parse_geometry.py. + + +v0.4.0 (2019-03-02) +------------------- +- Round coordinates. +- Change mock_runcry17 to an entry_point. +- Replace aiida_core atomic_tools extras with subset. +- Update geometry.py. +- Update test_cry_basic.py. +- Remove pymatgen dependency from tests. +- Fix pymatgen dependecies. +- Update .travis.yml. +- Setup for conda dist. +- Test style fix. +- Updated computer get method for develop (1.0.0a2) + + +v0.3.2a1 (2018-09-15) +--------------------- +- Updated version. + + +v0.3.1a1 (2018-09-15) +--------------------- +- Remove file. +- Install in development mode. +- Fix coverage (4) +- Fix coverage (3) +- Only cover package. +- Omit tests from coverage report. +- Updated doc on installation. +- Updated readme and added pypi deployment. + + +v0.3.0a1 (2018-09-12) +--------------------- +- Update version. +- Finished documentation. +- Updated documentation. +- Potential fix for aiida v0.12 process runs. +- Added cmndline tests. +- Added cmnd line plugins. +- Add to test. +- Don't output structure in no optimisation. +- Added test. +- Store fractional symops instead of cartesian. +- Convert output operations fractional coordinates to cartesian + coordinates. +- Compare_operations improvement. +- Moved operation comparison to StructSettingsData. +- Test update. +- Replaced output_arrays with output_settings. +- Refactored structure manipulation as two-step process. +- Tests fix. +- Added full run test for main calc. +- Use input structure to get kinds. +- Added run_get_node util. +- Added StructSettingsData (and tests) +- Added skipif mark in pytest. +- Roll back commit. +- Possible fix for sqalchemy get_authinfo. +- Refactored test utils and added allowed fails. +- Remove ignored tests. +- Revert "test" + + This reverts commit ba2047e5465f0f826ca08a0cb6b5e3a552bba22c. +- Added development sqlalchemy test. +- Turn off caching. +- Added full execution test. +- Added immigrant documentation. +- Added input linking to immigrant creation. +- Added immigrant example. +- Added retrieved folder to outputs of immigrant. +- Added rabbitmq to services. +- Added pytest-tornado. +- Added pytest-timeout. +- Added migration workflow function. +- Doc fix. +- Api documentation update. +- Refactored parser to extract mainout parsing. +- Added immigrant as plugin. +- Add to test. +- Added CryMainImmigrant (and tests) +- Sqlalchemy fix. +- Aiida v1 test fix. +- V1 test fix. +- Added computer configuration to computer configuration. +- Added migrate.create_inputs. +- Added basis set validation. +- Style test fix. +- Added read_inputd12 (and tests) +- Removed diff modules and updated version. + + +v0.2.0a0 (2018-09-05) +--------------------- +- Finished initial crystal17.main documentation. +- Refactored geometry and added documentation. +- Added initial Settings documentation. +- Added full api to docs. +- Added test documentation. +- Minor doc update. +- Initial addition of main calculation documentation. +- End-to-end test fixes (relaying atomid_kind_map to parser) +- Added crystal.main example. +- Moved atom_props creation to own method and addto to + prepare_and_validate. +- Added test with spin. +- Added atom specific properties to output d12. +- Move validation to separate module. +- Pre-commit test fix. +- Break symmetry by kind. +- Added kinds section to settings dict. +- Added BasisSetData input to .d12 creation. +- Refactored BasisSetData to store file content separately to metadata. +- Added python 3 compatabilty. +- Prospector test fix. +- Added BasisSetData plugin (and tests) +- Added settings schema. +- Added inputd12 writer. +- Added .gui creation and input schema. +- Added template implementation of crystal17.main calculation plugin. +- Remove stdoout since file isn't actually created via this. +- Coverage only for package. +- Add coverage setting and badge. +- Add testing requirement for coverage. +- Add test coverage reporting. +- Skip failing test for develop branch. +- Added initial .gui read/write and testing. +- Added symmops and arraydata output. +- Remove separate pip install. +- Updated readme on code style. +- Update pre-commit versions and ignored folders. +- Merged style and version into pre-commit. +- Changed diff to crystal17.diff and added correct requirement extra. +- Run yapf formatting. +- Added todo extension. +- Updated documentation. +- Added mulliken parsing. +- Improvements to tests. +- Remove computers workdir after testing. +- Enforce pytest 3.6.3 (for aiida develop) + + See https://github.com/aiidateam/aiida_core/issues/1911#issuecomment-416470291. +- Added user guide for ``crystal17.basic`` +- Added example and documentation. +- Remove ase install. +- Attempt to fix build failure of readthedocs. +- Added to readme. +- Updated some things in line with aiida-plugin-cutter. + + Upto commit on on Aug 27, 2018 873921e327a0944884088a11ae1548b00ccff7e7. +- Added optional input of external geometry file (and testing) +- Added initial parser and tests. +- Initial implementation and testing of crystal17.basic parser. +- Typo stopped extras installing. +- Added output file check. +- Test for files output by calculation. +- Fixed locating executable scripts created by pip install. +- Added tests for process execution. +- Added basic crystal parser. +- Test running diff calc. +- Style correction. +- Added mock CRYSTAL17 executable and refactored testing. +- Added basic crystal computation and sumbission test. +- Test correction. +- Corrected cry17_script location. +- Split version and style checks. +- Added local CRYSTAL17 setup scripts. +- Changed example entrance potins. +- Spilt session coped test fixture into overarching conftest.py. + + As per https://docs.pytest.org/en/latest/fixture.html#conftest-py-sharing-fixture-functions. +- Changes to pass pylint test. +- Pre commit fix. +- Revert "try adding pre-commit test (6)" + + This reverts commit 6e7a33d1ac4baa2f406f200e799484376d087f13. +- Revert "try without reentry scan" + + This reverts commit a12dc048c9168b4718c00ecc39865de70d125bc9. +- Refactored modules and updated test setup. +- Travis: ignore examples folder. +- Travis: load plugins. +- Change tests from unittest to pytest. +- Remove version check for travis. +- Changed to template from https://github.com/aiidateam/aiida-plugin- + cutter. +- Commit to activate travis. +- Updated setup information. +- Replaced template name with crystal17. + + Step 3 of https://aiida-core.readthedocs.io/en/latest/developer_guide/plugins/quickstart.html. +- Added plugin template from https://github.com/aiidateam/aiida-plugin- + template/archive/master. +- Initial commit. diff --git a/docs/source/index.rst b/docs/source/index.rst index aa32760..3d26154 100755 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -36,9 +36,10 @@ The aiida-crystal17 plugin for `AiiDA`_ .. toctree:: :maxdepth: 1 - :caption: API reference + :caption: Additional :hidden: + changelog api_index If you use this plugin for your research, please cite the Github repository @@ -51,7 +52,7 @@ If you use AiiDA for your research, please cite the following work: for computational science*, Comp. Mat. Sci 111, 218-230 (2016); http://dx.doi.org/10.1016/j.commatsci.2015.09.013; http://www.aiida.net. -``aiida-crystal17`` is released under the MIT license. +``aiida-crystal17`` is released under the GNU Lesser General Public License. Please contact chrisj_sewell@hotmail.com for information concerning ``aiida-crystal17`` and the `AiiDA mailing list `_ for questions concerning ``aiida`` diff --git a/docs/source/user_guide/calc_basic.ipynb b/docs/source/user_guide/calc_basic.ipynb index faf9a90..5876478 100644 --- a/docs/source/user_guide/calc_basic.ipynb +++ b/docs/source/user_guide/calc_basic.ipynb @@ -100,7 +100,7 @@ " 210: The main (stdout) output file was not found\u001b[0m\n", " 211: The temporary retrieved folder was not found\u001b[0m\n", " 300: An error was flagged trying to parse the crystal exec stdout file\u001b[0m\n", - " 301: An error occurred parsing the 'opta'/'optc' geomerty files\u001b[0m\n", + " 301: An error occurred parsing the 'opta'/'optc' geometry files\u001b[0m\n", " 350: The input file could not be read by crystal\u001b[0m\n", " 351: Crystal could not find the required wavefunction file\u001b[0m\n", " 400: The calculation stopped prematurely because it ran out of walltime.\u001b[0m\n", diff --git a/docs/source/user_guide/calc_doss.rst b/docs/source/user_guide/calc_doss.rst index 154e31c..91bc681 100644 --- a/docs/source/user_guide/calc_doss.rst +++ b/docs/source/user_guide/calc_doss.rst @@ -10,9 +10,12 @@ DOSS Calculation The :py:class:`~.aiida_crystal17.calculations.cry_doss.CryDossCalculation` can be used to run the `properties` executable for DOSS calculations, from an existing ``fort.9``. -.. code:: shell +.. nbinput:: ipython + + !verdi plugin list aiida.calculations crystal17.doss + +.. nboutput:: - $ verdi plugin list aiida.calculations crystal17.doss Inputs code: required Code The Code to use for this job. parameters: required Dict the input parameters to create the DOSS input file. @@ -34,7 +37,7 @@ executable for DOSS calculations, from an existing ``fort.9``. 210: The main (stdout) output file was not found 211: The temporary retrieved folder was not found 300: An error was flagged trying to parse the crystal exec stdout file - 301: An error occurred parsing the 'opta'/'optc' geomerty files + 301: An error occurred parsing the 'opta'/'optc' geometry files 302: The crystal exec stdout file denoted that the run was a testgeom 350: The input file could not be read by crystal 351: Crystal could not find the required wavefunction file @@ -52,7 +55,10 @@ executable for DOSS calculations, from an existing ``fort.9``. 520: Primitive symmops were not found in the output file -The :ref:`doss_input_schema` gives the allowed format of the input dictionary, for example:: +The :ref:`doss_input_schema` gives the allowed format of the input dictionary, for example: + +.. nbinput:: python + :no-output: from aiida.orm import Dict Dict(dict={ @@ -63,3 +69,156 @@ The :ref:`doss_input_schema` gives the allowed format of the input dictionary, f "band_maximum": 10, "band_units": "eV" }) + +.. _doss_projections: + +Computing Projections +~~~~~~~~~~~~~~~~~~~~~ + +Projections can be added per atom or per orbital set: + +.. nbinput:: python + :no-output: + + Dict(dict={ + "shrink_is": 18, + "shrink_isp": 36, + "npoints": 100, + "band_minimum": -10, + "band_maximum": 10, + "band_units": "eV", + "atomic_projections": [0, 1], + "orbital_projections": [[1, 2, 3]] + }) + +.. note:: + + A maximum of 15 projections are allowed per calculation. + +In order to create orbital sets, +it is possible to compute the nature of each orbital, +using the atomic structure and basis sets used to create the ``fort.9``: + +.. nbinput:: python + + from aiida_crystal17.tests import get_test_structure_and_symm + from aiida_crystal17.symmetry import print_structure + structure, _ = get_test_structure_and_symm('NiO_afm') + print_structure(structure) + +.. nboutput:: + + StructureData Summary + Lattice + abc : 2.944 2.944 4.164 + angles : 90.0 90.0 90.0 + volume : 36.1 + pbc : True True True + A : 2.944 0.0 0.0 + B : 0.0 2.944 0.0 + C : 0.0 0.0 4.164 + Kind Symbols Position + ---- ------- -------- + Ni1 Ni 0.0 0.0 0.0 + Ni2 Ni 1.472 1.472 2.082 + O O 0.0 0.0 2.082 + O O 1.472 1.472 0.0 + +.. nbinput:: python + + from aiida.plugins import DataFactory + basis_cls = DataFactory('crystal17.basisset') + basis_sets = basis_cls.get_basissets_from_structure(structure, 'sto3g') + basis_data = {k: v.get_data() for k, v in basis_sets.items()} + basis_data + +.. nboutput:: + + {'Ni': {'type': 'all-electron', + 'bs': [{'type': 'S', 'functions': ['STO-nG(nd) type 3-21G core shell']}, + {'type': 'SP', 'functions': ['STO-nG(nd) type 3-21G core shell']}, + {'type': 'SP', 'functions': ['STO-nG(nd) type 3-21G core shell']}, + {'type': 'SP', 'functions': ['STO-nG(nd) type 3-21G core shell']}, + {'type': 'D', 'functions': ['STO-nG(nd) type 3-21G core shell']}]}, + 'O': {'type': 'all-electron', + 'bs': [{'type': 'S', 'functions': ['STO-nG(nd) type 3-21G core shell']}, + {'type': 'SP', 'functions': ['STO-nG(nd) type 3-21G core shell']}]}} + +.. nbinput:: python + + from aiida_crystal17.parsers.raw.parse_bases import compute_orbitals + result = compute_orbitals(structure.get_ase().numbers, basis_data) + print("number of electrons: ", result.electrons) + print("number of core electrons: ", result.core_electrons) + result.ao_indices + +.. nboutput:: + + number of electrons: 72 + number of core electrons: 40 + { 1: {'atom': 0, 'element': 'Ni', 'type': 'S', 'index': 1}, + 2: {'atom': 0, 'element': 'Ni', 'type': 'SP', 'index': 1}, + 3: {'atom': 0, 'element': 'Ni', 'type': 'SP', 'index': 1}, + 4: {'atom': 0, 'element': 'Ni', 'type': 'SP', 'index': 1}, + 5: {'atom': 0, 'element': 'Ni', 'type': 'SP', 'index': 1}, + 6: {'atom': 0, 'element': 'Ni', 'type': 'SP', 'index': 2}, + 7: {'atom': 0, 'element': 'Ni', 'type': 'SP', 'index': 2}, + 8: {'atom': 0, 'element': 'Ni', 'type': 'SP', 'index': 2}, + 9: {'atom': 0, 'element': 'Ni', 'type': 'SP', 'index': 2}, + 10: {'atom': 0, 'element': 'Ni', 'type': 'SP', 'index': 3}, + 11: {'atom': 0, 'element': 'Ni', 'type': 'SP', 'index': 3}, + 12: {'atom': 0, 'element': 'Ni', 'type': 'SP', 'index': 3}, + 13: {'atom': 0, 'element': 'Ni', 'type': 'SP', 'index': 3}, + 14: {'atom': 0, 'element': 'Ni', 'type': 'D', 'index': 1}, + 15: {'atom': 0, 'element': 'Ni', 'type': 'D', 'index': 1}, + 16: {'atom': 0, 'element': 'Ni', 'type': 'D', 'index': 1}, + 17: {'atom': 0, 'element': 'Ni', 'type': 'D', 'index': 1}, + 18: {'atom': 0, 'element': 'Ni', 'type': 'D', 'index': 1}, + 19: {'atom': 1, 'element': 'Ni', 'type': 'S', 'index': 1}, + 20: {'atom': 1, 'element': 'Ni', 'type': 'SP', 'index': 1}, + 21: {'atom': 1, 'element': 'Ni', 'type': 'SP', 'index': 1}, + 22: {'atom': 1, 'element': 'Ni', 'type': 'SP', 'index': 1}, + 23: {'atom': 1, 'element': 'Ni', 'type': 'SP', 'index': 1}, + 24: {'atom': 1, 'element': 'Ni', 'type': 'SP', 'index': 2}, + 25: {'atom': 1, 'element': 'Ni', 'type': 'SP', 'index': 2}, + 26: {'atom': 1, 'element': 'Ni', 'type': 'SP', 'index': 2}, + 27: {'atom': 1, 'element': 'Ni', 'type': 'SP', 'index': 2}, + 28: {'atom': 1, 'element': 'Ni', 'type': 'SP', 'index': 3}, + 29: {'atom': 1, 'element': 'Ni', 'type': 'SP', 'index': 3}, + 30: {'atom': 1, 'element': 'Ni', 'type': 'SP', 'index': 3}, + 31: {'atom': 1, 'element': 'Ni', 'type': 'SP', 'index': 3}, + 32: {'atom': 1, 'element': 'Ni', 'type': 'D', 'index': 1}, + 33: {'atom': 1, 'element': 'Ni', 'type': 'D', 'index': 1}, + 34: {'atom': 1, 'element': 'Ni', 'type': 'D', 'index': 1}, + 35: {'atom': 1, 'element': 'Ni', 'type': 'D', 'index': 1}, + 36: {'atom': 1, 'element': 'Ni', 'type': 'D', 'index': 1}, + 37: {'atom': 2, 'element': 'O', 'type': 'S', 'index': 1}, + 38: {'atom': 2, 'element': 'O', 'type': 'SP', 'index': 1}, + 39: {'atom': 2, 'element': 'O', 'type': 'SP', 'index': 1}, + 40: {'atom': 2, 'element': 'O', 'type': 'SP', 'index': 1}, + 41: {'atom': 2, 'element': 'O', 'type': 'SP', 'index': 1}, + 42: {'atom': 3, 'element': 'O', 'type': 'S', 'index': 1}, + 43: {'atom': 3, 'element': 'O', 'type': 'SP', 'index': 1}, + 44: {'atom': 3, 'element': 'O', 'type': 'SP', 'index': 1}, + 45: {'atom': 3, 'element': 'O', 'type': 'SP', 'index': 1}, + 46: {'atom': 3, 'element': 'O', 'type': 'SP', 'index': 1}} + + +To observe DoS at the fermi level, +these results can also be used to choose a sensible range of bands: + +.. nbinput:: python + :no-output: + + filled_bands = int(result.electrons / 2) + first_band = int(result.core_electrons / 2) + 1 + last_band = min([first_band + 2 * (filled_bands - first_band), result.number_ao]) + + Dict(dict={ + "shrink_is": 18, + "shrink_isp": 36, + "npoints": 1000, + "band_minimum": first_band, + "band_maximum": last_band, + "band_units": "bands" + }) diff --git a/docs/source/user_guide/calc_main.ipynb b/docs/source/user_guide/calc_main.ipynb index 1d81cc8..b0f642d 100644 --- a/docs/source/user_guide/calc_main.ipynb +++ b/docs/source/user_guide/calc_main.ipynb @@ -114,7 +114,7 @@ " 210: The main (stdout) output file was not found\u001b[0m\n", " 211: The temporary retrieved folder was not found\u001b[0m\n", " 300: An error was flagged trying to parse the crystal exec stdout file\u001b[0m\n", - " 301: An error occurred parsing the 'opta'/'optc' geomerty files\u001b[0m\n", + " 301: An error occurred parsing the 'opta'/'optc' geometry files\u001b[0m\n", " 350: The input file could not be read by crystal\u001b[0m\n", " 351: Crystal could not find the required wavefunction file\u001b[0m\n", " 400: The calculation stopped prematurely because it ran out of walltime.\u001b[0m\n", diff --git a/setup.json b/setup.json index 8740b12..2e564eb 100644 --- a/setup.json +++ b/setup.json @@ -13,7 +13,7 @@ "Topic :: Scientific/Engineering :: Physics", "Framework :: AiiDA" ], - "version": "0.9.1b5", + "version": "0.9.2b5", "entry_points": { "console_scripts": [ "mock_crystal17 = aiida_crystal17.tests.mock_crystal17:main", @@ -64,7 +64,7 @@ "reentry_register": true, "install_requires": [ "aiida-core==1.0.0b5", - "six", + "six >=1.12.0", "ruamel.yaml", "jsonextended>=0.7.10", "jsonschema",