Skip to content

Commit

Permalink
Dependencies: Add support for aiida-core~=2.0
Browse files Browse the repository at this point in the history
  • Loading branch information
sphuber committed Oct 4, 2023
1 parent 3f43fba commit 7fdf3dc
Show file tree
Hide file tree
Showing 15 changed files with 80 additions and 75 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/cd.yml
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ jobs:
run: sudo apt update && sudo apt install postgresql

- name: Install Python dependencies
run: pip install -e .[tests] && reentry scan
run: pip install -e .[tests]

- name: Run pytest
run: pytest -sv tests
Expand Down
4 changes: 3 additions & 1 deletion .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,9 @@ jobs:
run: sudo apt update && sudo apt install postgresql

- name: Install Python package and dependencies
run: pip install -e .[tests] && reentry scan
run: pip install -e .[tests]

- name: Run pytest
env:
AIIDA_WARN_v3: True
run: pytest -sv tests
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ Some addition utility functions are:
1. If using GPAW it is possible to run parallel calculations using `/path/to/execut/gpaw python run_gpaw.py`. Set up the code through AiiDA by adding in the `gpaw` executable. The add the `python` tag using the command line option
```
settings = {'CMDLINE': ['python']}
builder.settings = orm.Dict(dict=settings)
builder.settings = orm.Dict(settings)
```

2. If the code you are interested in is present in this plugin registry it might make more sense to use that https://aiidateam.github.io/aiida-registry/
Expand Down
8 changes: 4 additions & 4 deletions examples/calculations/example_grid_gpaw_relax.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ def runner():
]

# BaTiO3 cubic structure
StructureData = DataFactory('structure')
StructureData = DataFactory('core.structure')
structure = StructureData(cell=cell)
structure.append_atom(position=(0., 0., 0.), symbols=['Ba'])
structure.append_atom(position=(alat / 2., alat / 2., alat / 2.), symbols=['Ti'])
Expand All @@ -43,7 +43,7 @@ def runner():
builder.structure = structure

# k-point information
KpointsData = DataFactory('array.kpoints')
KpointsData = DataFactory('core.array.kpoints')
kpoints = KpointsData()
kpoints.set_kpoints_mesh([2,2,2])
builder.kpoints = kpoints
Expand All @@ -69,11 +69,11 @@ def runner():
}


builder.parameters = orm.Dict(dict=parameters)
builder.parameters = orm.Dict(parameters)

# Running the calculation using gpaw python
settings = {'CMDLINE': ['python']}
builder.settings = orm.Dict(dict=settings)
builder.settings = orm.Dict(settings)

builder.metadata.options.resources = {'num_machines': 1}
builder.metadata.options.max_wallclock_seconds = 1 * 30 * 60
Expand Down
14 changes: 7 additions & 7 deletions examples/calculations/example_pw_gpaw_relax.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,9 @@
# Change the following value to the ``Code`` that you have configured
CODE_NAME = 'gpaw-21.6.0@localhost'

Dict = plugins.DataFactory('dict')
StructureData = plugins.DataFactory('structure')
KpointsData = plugins.DataFactory('array.kpoints')
Dict = plugins.DataFactory('core.dict')
StructureData = plugins.DataFactory('core.structure')
KpointsData = plugins.DataFactory('core.array.kpoints')
AseCalculation = plugins.CalculationFactory('ase.ase')


Expand All @@ -33,7 +33,7 @@ def main():
]

# BaTiO3 cubic structure
StructureData = DataFactory('structure')
StructureData = DataFactory('core.structure')
structure = StructureData(cell=cell)
structure.append_atom(position=(0., 0., 0.), symbols=['Ba'])
structure.append_atom(position=(alat / 2., alat / 2., alat / 2.), symbols=['Ti'])
Expand All @@ -44,7 +44,7 @@ def main():
builder.structure = structure

# k-point information
KpointsData = DataFactory('array.kpoints')
KpointsData = DataFactory('core.array.kpoints')
kpoints = KpointsData()
kpoints.set_kpoints_mesh([2,2,2])
builder.kpoints = kpoints
Expand Down Expand Up @@ -79,8 +79,8 @@ def main():
builder.code = load_code(CODE_NAME)
builder.structure = structure
builder.kpoints = kpoints
builder.parameters = orm.Dict(dict=parameters)
builder.settings = orm.Dict(dict=settings)
builder.parameters = orm.Dict(parameters)
builder.settings = orm.Dict(settings)
builder.metadata.options.resources = {'num_machines': 1}
builder.metadata.options.max_wallclock_seconds = 30 * 60 # 30 minutes
builder.metadata.options.withmpi = False
Expand Down
14 changes: 7 additions & 7 deletions examples/calculations/example_pw_gpaw_scf.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,20 +6,20 @@
# Change the following value to the ``Code`` that you have configured
CODE_NAME = 'gpaw-21.6.0@localhost'

Dict = plugins.DataFactory('dict')
StructureData = plugins.DataFactory('structure')
KpointsData = plugins.DataFactory('array.kpoints')
Dict = plugins.DataFactory('core.dict')
StructureData = plugins.DataFactory('core.structure')
KpointsData = plugins.DataFactory('core.array.kpoints')
AseCalculation = plugins.CalculationFactory('ase.ase')


def main():
# generate an example structure
atoms = bulk('Si', 'diamond', a=5.4)
StructureData = DataFactory('structure')
StructureData = DataFactory('core.structure')
structure = StructureData(ase=atoms)

# k-point information
KpointsData = DataFactory('array.kpoints')
KpointsData = DataFactory('core.array.kpoints')
kpoints = KpointsData()
kpoints.set_kpoints_mesh([1,1,1])

Expand Down Expand Up @@ -49,8 +49,8 @@ def main():
builder.code = load_code(CODE_NAME)
builder.structure = structure
builder.kpoints = kpoints
builder.parameters = orm.Dict(dict=parameters)
builder.settings = orm.Dict(dict=settings)
builder.parameters = orm.Dict(parameters)
builder.settings = orm.Dict(settings)
builder.metadata.options.resources = {'num_machines': 1}
builder.metadata.options.max_wallclock_seconds = 30 * 60 # 30 minutes
builder.metadata.options.withmpi = False
Expand Down
8 changes: 4 additions & 4 deletions examples/workflows/example_base_gpaw.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ def runner():
]

# BaTiO3 cubic structure
StructureData = DataFactory('structure')
StructureData = DataFactory('core.structure')
structure = StructureData(cell=cell)
structure.append_atom(position=(0., 0., 0.), symbols=['Ba'])
structure.append_atom(position=(alat / 2., alat / 2., alat / 2.), symbols=['Ti'])
Expand All @@ -44,7 +44,7 @@ def runner():
builder.gpaw.code = code

# k-point information
KpointsData = DataFactory('array.kpoints')
KpointsData = DataFactory('core.array.kpoints')
kpoints = KpointsData()
kpoints.set_kpoints_mesh([2,2,2])
builder.gpaw.kpoints = kpoints
Expand All @@ -68,11 +68,11 @@ def runner():
},
}

builder.gpaw.parameters = orm.Dict(dict=parameters)
builder.gpaw.parameters = orm.Dict(parameters)

# Running the calculation using gpaw python
settings = {'CMDLINE': ['python']}
builder.gpaw.settings = orm.Dict(dict=settings)
builder.gpaw.settings = orm.Dict(settings)

builder.gpaw.metadata.options.resources = {'num_machines': 1}
builder.gpaw.metadata.options.max_wallclock_seconds = 1 * 30 * 60
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ classifiers = [
keywords = ['aiida', 'workflows', 'ase']
requires-python = '>=3.8'
dependencies = [
'aiida-core~=1.6',
'aiida-core~=2.0',
'ase',
]

Expand Down
12 changes: 6 additions & 6 deletions src/aiida_ase/calculations/ase.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,9 @@
"""`CalcJob` implementation that can be used to wrap around the ASE calculators."""
from aiida import common, engine, orm, plugins

Dict = plugins.DataFactory('dict')
StructureData = plugins.DataFactory('structure')
KpointsData = plugins.DataFactory('array.kpoints')
Dict = plugins.DataFactory('core.dict')
StructureData = plugins.DataFactory('core.structure')
KpointsData = plugins.DataFactory('core.array.kpoints')


class AseCalculation(engine.CalcJob):
Expand Down Expand Up @@ -218,7 +218,7 @@ def return_a_function(v):
else:
raise ValueError('format for extra imports not recognized')

if self.options.withmpi:
if self.options.get('withmpi', False):
all_imports.append('from ase.parallel import paropen')

all_imports_string = '\n'.join(all_imports) + '\n'
Expand Down Expand Up @@ -291,7 +291,7 @@ def return_a_function(v):

input_txt += '\n'
# Dump results to file
right_open = 'paropen' if self.options.withmpi else 'open'
right_open = 'paropen' if self.options.get('withmpi', False) else 'open'
input_txt += f"with {right_open}('{self._OUTPUT_FILE_NAME}', 'w') as f:\n"
input_txt += ' json.dump(results,f)'
input_txt += '\n'
Expand All @@ -307,7 +307,7 @@ def return_a_function(v):
input_txt += '\n'

# write all the input script to a file
with folder.open(self._INPUT_FILE_NAME, 'w') as handle:
with folder.base.repository.open(self._INPUT_FILE_NAME, 'w') as handle:
handle.write(input_txt)

# ============================ calcinfo ================================
Expand Down
18 changes: 9 additions & 9 deletions src/aiida_ase/parsers/ase.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,9 @@
from ase.io import read
import numpy

Dict = plugins.DataFactory('dict')
ArrayData = plugins.DataFactory('array')
StructureData = plugins.DataFactory('structure')
Dict = plugins.DataFactory('core.dict')
ArrayData = plugins.DataFactory('core.array')
StructureData = plugins.DataFactory('core.structure')
AseCalculation = plugins.CalculationFactory('ase.ase')


Expand All @@ -20,7 +20,7 @@ def parse(self, **kwargs): # pylint: disable=inconsistent-return-statements
retrieved = self.retrieved

# check what is inside the folder
list_of_files = retrieved.list_object_names()
list_of_files = retrieved.base.repository.list_object_names()

# at least the stdout should exist
if AseCalculation._OUTPUT_FILE_NAME not in list_of_files: # pylint: disable=protected-access
Expand All @@ -29,15 +29,15 @@ def parse(self, **kwargs): # pylint: disable=inconsistent-return-statements

# output structure
if AseCalculation._output_aseatoms in list_of_files: # pylint: disable=protected-access
with retrieved.open(AseCalculation._output_aseatoms, 'r') as handle: # pylint: disable=protected-access
with retrieved.base.repository.open(AseCalculation._output_aseatoms, 'r') as handle: # pylint: disable=protected-access
atoms = read(handle, format='json')
structure = StructureData(ase=atoms)
self.out('structure', structure)

filename_stdout = self.node.get_attribute('output_filename')
filename_stdout = self.node.base.base.attributes.all.get('output_filename')

# load the results dictionary
with retrieved.open(filename_stdout, 'r') as handle:
with retrieved.base.repository.open(filename_stdout, 'r') as handle:
json_params = json.load(handle)

# extract arrays from json_params
Expand All @@ -48,7 +48,7 @@ def parse(self, **kwargs): # pylint: disable=inconsistent-return-statements

# look at warnings
warnings = []
with retrieved.open('_scheduler-stderr.txt', 'r') as handle:
with retrieved.base.repository.open('_scheduler-stderr.txt', 'r') as handle:
errors = handle.read()
if errors:
warnings = [errors]
Expand All @@ -61,6 +61,6 @@ def parse(self, **kwargs): # pylint: disable=inconsistent-return-statements
self.out('array', array_data)

if json_params:
self.out('parameters', Dict(dict=json_params))
self.out('parameters', Dict(json_params))

return
28 changes: 15 additions & 13 deletions src/aiida_ase/parsers/gpaw.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,10 @@
from ase.io import read
import numpy

Dict = plugins.DataFactory('dict')
ArrayData = plugins.DataFactory('array')
StructureData = plugins.DataFactory('structure')
TrajectoryData = plugins.DataFactory('array.trajectory')
Dict = plugins.DataFactory('core.dict')
ArrayData = plugins.DataFactory('core.array')
StructureData = plugins.DataFactory('core.structure')
TrajectoryData = plugins.DataFactory('core.array.trajectory')
AseCalculation = plugins.CalculationFactory('ase.ase')


Expand Down Expand Up @@ -60,7 +60,7 @@ def parse(self, **kwargs): # pylint: disable=inconsistent-return-statements,too
"""Parse the retrieved files from a ``AseCalculation``."""

# check what is inside the folder
list_of_files = self.retrieved.list_object_names()
list_of_files = self.retrieved.base.repository.list_object_names()

# check if it was a relaxation
optimizer = self.node.inputs.parameters.get_dict().pop('optimizer', None)
Expand All @@ -74,7 +74,7 @@ def parse(self, **kwargs): # pylint: disable=inconsistent-return-statements,too
# Probably helpful for restarts
self.logger.error('Output results was not found, inspecting log file')
# Checking for possible errors common to all calculations
with self.retrieved.open('_scheduler-stderr.txt', 'r') as handle:
with self.retrieved.base.repository.open('_scheduler-stderr.txt', 'r') as handle:
lines = handle.readlines()
if check_paw_missing(lines):
self.logger.error('Could not find paw potentials')
Expand All @@ -87,7 +87,9 @@ def parse(self, **kwargs): # pylint: disable=inconsistent-return-statements,too
# This is a relaxation calculation that did not complete
# try to get all the structures that are available
try:
with self.retrieved.open(self.node.get_attribute('log_filename'), 'r') as handle:
with self.retrieved.base.repository.open(
self.node.base.attributes.get('log_filename'), 'r'
) as handle:
all_ase_traj = read(handle, index=':', format='gpaw-out')
trajectory = store_to_trajectory_data(all_ase_traj)
self.outputs.trajectory = trajectory
Expand All @@ -108,19 +110,19 @@ def parse(self, **kwargs): # pylint: disable=inconsistent-return-statements,too
# Check if output structure is needed
if optimizer is not None:
# If we are here the calculation did complete sucessfully
with self.retrieved.open(AseCalculation._output_aseatoms, 'r') as handle: # pylint: disable=protected-access
with self.retrieved.base.repository.open(AseCalculation._output_aseatoms, 'r') as handle: # pylint: disable=protected-access
atoms = read(handle, format='json')
self.out('structure', StructureData(ase=atoms))
# Store the trajectory as well
with self.retrieved.open(self.node.get_attribute('log_filename'), 'r') as handle:
with self.retrieved.base.repository.open(self.node.base.attributes.get('log_filename'), 'r') as handle:
all_ase_traj = read(handle, index=':', format='gpaw-out')
self.outputs.trajectory = store_to_trajectory_data(all_ase_traj)
# load the results dictionary
with self.retrieved.open(AseCalculation._OUTPUT_FILE_NAME, 'r') as handle: # pylint: disable=protected-access
with self.retrieved.base.repository.open(AseCalculation._OUTPUT_FILE_NAME, 'r') as handle: # pylint: disable=protected-access
json_params = json.load(handle)

# get the relavent data from the log file for the final structure
with self.retrieved.open(self.node.get_attribute('log_filename'), 'r') as handle:
with self.retrieved.base.repository.open(self.node.base.attributes.get('log_filename'), 'r') as handle:
atoms_log = read(handle, format='gpaw-out')
create_output_parameters(atoms_log, json_params)

Expand All @@ -130,7 +132,7 @@ def parse(self, **kwargs): # pylint: disable=inconsistent-return-statements,too
return self.exit_codes.ERROR_FERMI_LEVEL_INF

# look at warnings
with self.retrieved.open('_scheduler-stderr.txt', 'r') as handle:
with self.retrieved.base.repository.open('_scheduler-stderr.txt', 'r') as handle:
errors = handle.read()
if errors:
json_params['warnings'] = [errors]
Expand All @@ -148,6 +150,6 @@ def parse(self, **kwargs): # pylint: disable=inconsistent-return-statements,too
self.out('array', array_data)

if json_params:
self.out('parameters', Dict(dict=json_params))
self.out('parameters', Dict(json_params))

return
Loading

0 comments on commit 7fdf3dc

Please sign in to comment.