Skip to content

Commit

Permalink
Merge branch '404-units-of-size-and-origin-missing' into 'development'
Browse files Browse the repository at this point in the history
DADF5 1.1

Closes #404

See merge request damask/DAMASK!946
  • Loading branch information
sharanroongta committed May 29, 2024
2 parents 56dffa7 + 5647e92 commit 3db7798
Show file tree
Hide file tree
Showing 5 changed files with 51 additions and 23 deletions.
2 changes: 1 addition & 1 deletion PRIVATE
13 changes: 11 additions & 2 deletions python/damask/_result.py
Original file line number Diff line number Diff line change
Expand Up @@ -2113,11 +2113,16 @@ def export_DADF5(self,
Names of the datasets to export.
Defaults to '*', in which case all visible datasets are exported.
mapping : numpy.ndarray of int, shape (:,:,:), optional
Indices for regridding.
Indices for regridding. Only applicable for grid
solver results.
"""
if Path(fname).expanduser().absolute() == self.fname:
raise PermissionError(f'cannot overwrite {self.fname}')
raise PermissionError(f'cannot overwrite "{self.fname}"')

if mapping is not None and not self.structured:
raise PermissionError('cannot regrid unstructured mesh')


def cp(path_in,path_out,label,mapping):
if mapping is None:
Expand All @@ -2135,7 +2140,11 @@ def cp(path_in,path_out,label,mapping):
if mapping is not None:
cells = mapping.shape
mapping_flat = mapping.flatten(order='F')

f_out['geometry'].attrs['cells'] = cells
if self.version_major == 1 and self.version_minor > 0:
f_out['geometry']['cells'][...] = cells

f_out.create_group('cell_to') # ToDo: attribute missing
mappings = {'phase':{},'homogenization':{}} # type: ignore

Expand Down
12 changes: 6 additions & 6 deletions src/HDF5_utilities.f90
Original file line number Diff line number Diff line change
Expand Up @@ -981,7 +981,7 @@ end subroutine HDF5_read_int5

#if defined(__GFORTRAN__)
!--------------------------------------------------------------------------------------------------
!> @brief write dataset of type real with 1 dimension
!> @brief Write dataset of type real with 1 dimension.
!--------------------------------------------------------------------------------------------------
subroutine HDF5_write_real1(dataset,loc_id,datasetName,parallel)

Expand Down Expand Up @@ -1018,7 +1018,7 @@ subroutine HDF5_write_real1(dataset,loc_id,datasetName,parallel)
end subroutine HDF5_write_real1

!--------------------------------------------------------------------------------------------------
!> @brief write dataset of type real with 2 dimensions
!> @brief Write dataset of type real with 2 dimensions.
!--------------------------------------------------------------------------------------------------
subroutine HDF5_write_real2(dataset,loc_id,datasetName,parallel)

Expand Down Expand Up @@ -1055,7 +1055,7 @@ subroutine HDF5_write_real2(dataset,loc_id,datasetName,parallel)
end subroutine HDF5_write_real2

!--------------------------------------------------------------------------------------------------
!> @brief write dataset of type real with 3 dimensions
!> @brief Write dataset of type real with 3 dimensions.
!--------------------------------------------------------------------------------------------------
subroutine HDF5_write_real3(dataset,loc_id,datasetName,parallel)

Expand Down Expand Up @@ -1092,7 +1092,7 @@ subroutine HDF5_write_real3(dataset,loc_id,datasetName,parallel)
end subroutine HDF5_write_real3

!--------------------------------------------------------------------------------------------------
!> @brief write dataset of type real with 4 dimensions
!> @brief Write dataset of type real with 4 dimensions.
!--------------------------------------------------------------------------------------------------
subroutine HDF5_write_real4(dataset,loc_id,datasetName,parallel)

Expand Down Expand Up @@ -1130,7 +1130,7 @@ end subroutine HDF5_write_real4


!--------------------------------------------------------------------------------------------------
!> @brief write dataset of type real with 5 dimensions
!> @brief Write dataset of type real with 5 dimensions.
!--------------------------------------------------------------------------------------------------
subroutine HDF5_write_real5(dataset,loc_id,datasetName,parallel)

Expand Down Expand Up @@ -1169,7 +1169,7 @@ end subroutine HDF5_write_real5
#else

!--------------------------------------------------------------------------------------------------
!> @brief write dataset of type real with 1-5 dimension
!> @brief Write dataset of type real with 1-5 dimensions.
!--------------------------------------------------------------------------------------------------
subroutine HDF5_write_real(dataset,loc_id,datasetName,parallel)

Expand Down
22 changes: 16 additions & 6 deletions src/grid/discretization_grid.f90
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,8 @@ module discretization_grid
use CLI
use IO
use config
use HDF5
use HDF5_utilities
use result
use discretization
use geometry_plastic_nonlocal
Expand Down Expand Up @@ -73,6 +75,7 @@ subroutine discretization_grid_init(restart)
displs, sendcounts
character(len=:), allocatable :: &
fileContent, fname
integer(HID_T) :: handle


print'(/,1x,a)', '<<<+- discretization_grid init -+>>>'; flush(IO_STDOUT)
Expand Down Expand Up @@ -145,12 +148,19 @@ subroutine discretization_grid_init(restart)

!--------------------------------------------------------------------------------------------------
! store geometry information for post processing
if (.not. restart) then
call result_openJobFile()
call result_closeGroup(result_addGroup('geometry'))
call result_addAttribute('cells', cells, '/geometry')
call result_addAttribute('size', geomSize,'/geometry')
call result_addAttribute('origin',origin, '/geometry')
if (.not. restart .and. worldrank == 0) then
call result_openJobFile(parallel=.false.)
handle = result_addGroup('geometry')
call HDF5_write(cells, handle,'cells', .false.)
call HDF5_write(geomSize,handle,'size', .false.)
call HDF5_write(origin, handle,'origin',.false.)
call HDF5_addAttribute(handle,'unit','1','cells')
call HDF5_addAttribute(handle,'unit','','size')
call HDF5_addAttribute(handle,'unit','m','origin')
call result_addAttribute('cells', cells, '/geometry') ! legacy for DADF5 1.x
call result_addAttribute('size', geomSize,'/geometry') ! legacy for DADF5 1.x
call result_addAttribute('origin',origin, '/geometry') ! legacy for DADF5 1.x
call result_closeGroup(handle)
call result_closeJobFile()
end if

Expand Down
25 changes: 17 additions & 8 deletions src/result.f90
Original file line number Diff line number Diff line change
Expand Up @@ -90,12 +90,21 @@ subroutine result_init(restart)
if (.not. restart) then
resultFile = HDF5_openFile(getSolverJobName()//'.hdf5','w')
call result_addAttribute('DADF5_version_major',1)
call result_addAttribute('DADF5_version_minor',0)
call result_addAttribute('DADF5_version_minor',1)
call get_command_argument(0,commandLine)
call result_addAttribute('creator',trim(commandLine)//' '//DAMASKVERSION)
call result_addAttribute('created',now())
call get_command(commandLine)
call result_addAttribute('call',trim(commandLine))
#ifdef DAMASK_GRID
call result_addAttribute('solver','grid')
#endif
#ifdef DAMASK_MESH
call result_addAttribute('solver','mesh')
#endif
#ifdef MARC4DAMASK
call result_addAttribute('solver','Marc')
#endif
call result_closeGroup(result_addGroup('cell_to'))
call result_addAttribute('description','mappings to place data in space','cell_to')
call result_closeGroup(result_addGroup('setup'))
Expand All @@ -118,7 +127,7 @@ end subroutine result_init


!--------------------------------------------------------------------------------------------------
!> @brief opens the result file to append data
!> @brief Open the result file to append data.
!--------------------------------------------------------------------------------------------------
subroutine result_openJobFile(parallel)

Expand All @@ -131,7 +140,7 @@ end subroutine result_openJobFile


!--------------------------------------------------------------------------------------------------
!> @brief closes the result file
!> @brief Close the result file.
!--------------------------------------------------------------------------------------------------
subroutine result_closeJobFile

Expand All @@ -141,7 +150,7 @@ end subroutine result_closeJobFile


!--------------------------------------------------------------------------------------------------
!> @brief creates the group of increment and adds time as attribute to the file
!> @brief Creates a group for the increment and add time as attribute to the file.
!--------------------------------------------------------------------------------------------------
subroutine result_addIncrement(inc,time)

Expand All @@ -160,8 +169,8 @@ end subroutine result_addIncrement


!--------------------------------------------------------------------------------------------------
!> @brief finalize increment
!> @details remove soft link
!> @brief Finalize increment.
!> @details Remove soft link.
!--------------------------------------------------------------------------------------------------
subroutine result_finalizeIncrement

Expand Down Expand Up @@ -478,7 +487,7 @@ end subroutine result_writeTensorDataset_int


!--------------------------------------------------------------------------------------------------
!> @brief adds the unique mapping from spatial position and constituent ID to results
!> @brief Add the unique mapping from spatial position and constituent ID to results.
!--------------------------------------------------------------------------------------------------
subroutine result_mapping_phase(ID,entry,label)

Expand Down Expand Up @@ -631,7 +640,7 @@ end subroutine result_mapping_phase


!--------------------------------------------------------------------------------------------------
!> @brief adds the unique mapping from spatial position and constituent ID to results
!> @brief Add the unique mapping from spatial position and constituent ID to results.
!--------------------------------------------------------------------------------------------------
subroutine result_mapping_homogenization(ID,entry,label)

Expand Down

0 comments on commit 3db7798

Please sign in to comment.