Skip to content

Commit a47daab

Browse files
authored
Merge pull request FABLE-3DXRD#462 from haixing0a/master
minor update with exporting xdmf (including 5 dimensional datasets, e.g. eps_sample, UBI etc) for grainmaps.py
2 parents f526f2c + 003bb67 commit a47daab

File tree

1 file changed

+103
-0
lines changed

1 file changed

+103
-0
lines changed

ImageD11/forward_model/grainmaps.py

Lines changed: 103 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -446,6 +446,109 @@ def DS_merge_and_identify_grains_sub(DS, FirstGrainID = 0, min_misori = 3.0, dis
446446
print('{} grains identified out of {} regions.'.format(i+1, id0))
447447

448448
return DS_merge
449+
450+
451+
def DS_to_paraview(DS, h5name = 'DS.h5'):
452+
"""
453+
Write an .xdmf file that lets you read the DS data with ParaView
454+
455+
Arguments:
456+
DS -- a dictionary contains DS-like map from the output of ImageD11.forward_model.grainmap class
457+
h5name -- the corresponding h5 filename, if not exist, I will creat one
458+
"""
459+
assert 'labels' in DS.keys() and 'voxel_size' in DS.keys(), 'DS keys must contain "labels" and "voxel_size"'
460+
h5_relpath = os.path.split(h5name)[1]
461+
xdmf_filename = h5name.replace('.h5', '.xdmf')
462+
# write an .h5 file if it does not exist
463+
if not os.path.exists(h5name):
464+
print('{} is not found; I am creating one ...'.format(h5name))
465+
with h5py.File(h5name, 'w') as hout:
466+
for key, value in DS.items():
467+
hout.create_dataset(key, data = value)
468+
print('Done with saving DS to {}'.format(h5name))
469+
470+
dims = DS['labels'].shape
471+
scalar_dims = dims
472+
vector_dims = dims + (3,)
473+
tensor_dims = dims + (3, 3,)
474+
MeshDimensions = (dims[0] + 1, dims[1] + 1, dims[2] + 1)
475+
476+
MeshDimensionsStr = 'Dimensions="%d %d %d"' % MeshDimensions
477+
ScalarDimensionsStr = 'Dimensions="%d %d %d"' % scalar_dims
478+
VectorDimensionsStr = 'Dimensions="%d %d %d %d"' % vector_dims
479+
TensorDimensionsStr = 'Dimensions="%d %d %d %d %d"' % tensor_dims
480+
481+
steps = tuple(DS['voxel_size'])
482+
483+
# Write .xdmf file
484+
with open(xdmf_filename, 'wt') as fileID:
485+
fileID.write('<?xml version="1.0"?>\n')
486+
fileID.write('<!DOCTYPE Xdmf SYSTEM "Xdmf.dtd"[]>\n')
487+
fileID.write('<Xdmf xmlns:xi="http://www.w3.org/2003/XInclude" Version="2.2">\n')
488+
fileID.write(' <Domain>\n')
489+
fileID.write(' <Grid Name="GM3D" GridType="Uniform">\n')
490+
fileID.write(' <Topology TopologyType="3DCoRectMesh" %s></Topology>\n' % MeshDimensionsStr)
491+
fileID.write(' <Geometry Type="ORIGIN_DXDYDZ">\n')
492+
fileID.write(' <!-- Origin Z, Y, X -->\n')
493+
fileID.write(' <DataItem Format="XML" Dimensions="3">0 0 0</DataItem>\n')
494+
fileID.write(' <!-- DxDyDz (Spacing/Resolution) Z, Y, X -->\n')
495+
fileID.write(' <DataItem Format="XML" Dimensions="3">%.6f %.6f %.6f</DataItem>\n' % steps)
496+
fileID.write(' </Geometry>\n')
497+
498+
# iterate over all the keys
499+
for key_name in DS.keys():
500+
array = DS[key_name]
501+
502+
# work out what sort of array we have
503+
map_shape = array.shape
504+
n_dims = len(map_shape)
505+
if n_dims == 3:
506+
# scalar field
507+
fileID.write(' <Attribute Name="%s" AttributeType="Scalar" Center="Cell">\n' % key_name)
508+
fileID.write(' <DataItem Format="HDF" %s NumberType="Float" Precision="6" >%s:/%s</DataItem>\n' % (
509+
ScalarDimensionsStr, h5_relpath, '/' + key_name))
510+
fileID.write(' </Attribute>\n')
511+
elif n_dims == 4:
512+
# vector field (like IPF)
513+
fileID.write(' <Attribute Name="%s" AttributeType="Vector" Center="Cell">\n' % key_name)
514+
fileID.write(' <DataItem Format="HDF" %s NumberType="Float" Precision="6" >%s:/%s</DataItem>\n' % (
515+
VectorDimensionsStr, h5_relpath, '/' + key_name))
516+
fileID.write(' </Attribute>\n')
517+
elif n_dims == 5:
518+
assert map_shape == tensor_dims, "Tensor {} shape {} does not match {}".format(key_name, map_shape, tensor_dims)
519+
# Define the 9 tensor components, e.g. (xx, xy, xz, yx, yy, yz, zx, zy, zz) for eps_sample
520+
if key_name == 'eps_sample':
521+
tensor_components = [
522+
('xx', 0, 0), ('xy', 0, 1), ('xz', 0, 2),
523+
('yx', 1, 0), ('yy', 1, 1), ('yz', 1, 2),
524+
('zx', 2, 0), ('zy', 2, 1), ('zz', 2, 2)
525+
]
526+
else:
527+
tensor_components = [
528+
('11', 0, 0), ('12', 0, 1), ('13', 0, 2),
529+
('21', 1, 0), ('22', 1, 1), ('23', 1, 2),
530+
('31', 2, 0), ('32', 2, 1), ('33', 2, 2)
531+
]
532+
for comp_name, i, j in tensor_components:
533+
attr_name = "{}_{}".format(key_name, comp_name)
534+
fileID.write(' <Attribute Name="%s" AttributeType="Scalar" Center="Cell">\n' % attr_name)
535+
fileID.write(' <DataItem ItemType="HyperSlab" %s>\n' % ScalarDimensionsStr)
536+
fileID.write(' <DataItem Dimensions="3 5" Format="XML">\n')
537+
fileID.write(' %d %d %d %d %d\n' % (0, 0, 0, i, j)) # Origin: fix i, j for the component
538+
fileID.write(' %d %d %d %d %d\n' % (1, 1, 1, 1, 1)) # Stride: 1 in all dims
539+
fileID.write(' %d %d %d %d %d\n' % (dims[0], dims[1], dims[2], 1, 1)) # Count: full 3D, 1x1 in tensor dims
540+
fileID.write(' </DataItem>\n')
541+
fileID.write(' <DataItem Format="HDF" NumberType="Float" Precision="6" %s >%s:/%s</DataItem>\n' % (
542+
TensorDimensionsStr, h5_relpath, '/' + key_name))
543+
fileID.write(' </DataItem>\n')
544+
fileID.write(' </Attribute>\n')
545+
continue
546+
else:
547+
continue
548+
fileID.write(' </Grid>\n')
549+
fileID.write(' </Domain>\n')
550+
fileID.write('</Xdmf>\n')
551+
print('Done with writing xdmf file to {}'.format(xdmf_filename))
449552

450553

451554
def indexing_iterative(cf_strong, grains, ds, ucell, pars, ds_max = 1.6, tol_angle = 0.25, tol_pixel =3, peak_assign_tol = 0.25, tol_misori = 3, crystal_system='cubic', **kwargs):

0 commit comments

Comments
 (0)