diff --git a/README.rst b/README.rst index 6fe3e3b60ee9db6c1c57abb2f65430dff5aab9e0..bc568f81cda6d6ce8325385121e096740a0f578a 100644 --- a/README.rst +++ b/README.rst @@ -38,6 +38,8 @@ Per default, only the strictly required libraries are installed, but there are a * ``colors`` will install the `cmocean <https://matplotlib.org/cmocean/>`_, whose ``balance`` color map is used as a default for the ``imshow`` commmand, if available. +* ``3d`` will install the `mayavi <https://docs.enthought.com/mayavi/mayavi/>`_ package for 3D plotting capabilities. + * ``tests`` will install all dependencies that are needed to test the package (usually not necessary for the average user). * ``all`` will install all of the dependencies listed above. diff --git a/docs/fields.rst b/docs/fields.rst index e5b3e9ee1e382b3a5536ecadfb9ccaf96fa2e264..901c3ea2e5389306be4d6162424452c3664df085 100644 --- a/docs/fields.rst +++ b/docs/fields.rst @@ -3,6 +3,8 @@ The Field container class General empyre.fields docu here! +All functions implemented in the subpackages can be accessed directly in the ``empyre.fields`` namespace. + The field module ---------------- diff --git a/docs/io.rst b/docs/io.rst new file mode 100644 index 0000000000000000000000000000000000000000..66ecee38bf78a08b9affc46af8249b5d23a60eca --- /dev/null +++ b/docs/io.rst @@ -0,0 +1,20 @@ +The io visualization submodule +=============================== + +General empyre.io docu here! + + +The io_field module +------------------- + +.. automodule:: empyre.io.io_field + :members: + :show-inheritance: + + +The field_plugins module +------------------------ + +.. automodule:: empyre.io.field_plugins + :members: + :show-inheritance: diff --git a/docs/vis.rst b/docs/vis.rst index 517d646eacc53ae006b1abfc17943f8557675a1b..565c29c1dab3783190c823d29c9250f4cec40b37 100644 --- a/docs/vis.rst +++ b/docs/vis.rst @@ -3,9 +3,32 @@ The vis visualization submodule General empyre.vis docu here! +All functions implemented in the subpackages can be accessed directly in the ``empyre.io`` namespace, with the exception +of the ``color`` module, whose methods are accessible via ``empyre.vis.colors``. + Note that the y-axis of all image plots is flipped in comparison to ``matplotlib.pyplot.imshow``, i.e. that the origin is `'lower'` in this case instead of `'upper'`. +Please note that 3D plots only work in your Jupyter notebook after first installing and then enabling the corresponding +mayavi extension: + +.. code-block:: bash + + $ jupyter nbextension install --py mayavi --user + $ jupyter nbextension enable mayavi --user --py + +If you'd like to utilize 3D plots in your Jupyter Notebook or in the Python Interactive Window + +.. code-block:: Python + + >>> from mayavi import mlab + >>> mlab.init_notebook('x3d') + +The backend, chosen by the `init_notebook` function can be `ipy` (the default, produces static images in Juypter +Notebook and does not work in VSCode), `x3d` (produces interactive plots in Jupyter Noteboo, but seems to not work with +VSCode), or `png` (produces png-images, which work in both Jupyter Notebooks and VSCode). +For more information and a quick introduction visit the `mayavi tips & tricks section <https://docs.enthought.com/mayavi/mayavi/tips.html>`_. + The plot2d module ----------------- diff --git a/environment.yml b/environment.yml index aa4e226b9104f4b416cd1991329c05501c6b012d..b605553e40ca7e6f3df0b8a9e56b62133b1cc6d6 100644 --- a/environment.yml +++ b/environment.yml @@ -22,13 +22,15 @@ dependencies: # File IO: - hyperspy=1.5 - hyperspy-gui-ipywidgets=1.2 - #- h5py=2.9 # TODO: depends on hyperspy? Not needed here? + #- h5py=2.9 # TODO: dependency of hyperspy? Not needed here? # Plotting and colors: - matplotlib=3.1 - Pillow=6.1 - cmocean=2.0 - #- qt=5.9 # TODO: only needed for mayavi? which version? - - mayavi=4.6 # TODO: Get rid of! + # 3D plotting: + - mayavi=4.7 + - ipyevents=0.7 + - ipywidgets=7.5 # Testing: - pytest=5.0 - pytest-cov=2.7 diff --git a/setup.cfg b/setup.cfg index 19c5d29969dcff73b499faaa7eebbdd3615a3e71..d63b2a8a357234c96bc1a7c070b34a17f2ba429b 100644 --- a/setup.cfg +++ b/setup.cfg @@ -7,7 +7,7 @@ [metadata] name = empyre -version = 0.1.0 +version = 0.2.0 author = Jan Caron author-email = j.caron@fz-juelich.de description = Electron Microscopy Python Reconstruction @@ -55,6 +55,8 @@ fftw = pyfftw colors = cmocean +3d = + mayavi >= 4.7 all = pyfftw cmocean @@ -68,6 +70,7 @@ tests = # TODO: pip install .[io] is not working because hyperspy depends on trait which has no wheels on PyPI at the moment... # TODO: See https://github.com/hyperspy/hyperspy/issues/2315 and https://github.com/enthought/traits/issues/357 # TODO: Add hyperspy back as soon (if?) this is resolved... Until then: install hyperspy with conda! +# TODO: mayavi has the same exact problem and tvtk is apparently also a problem... # CONFIGURATION FOR TESTING: diff --git a/src/empyre/fields/field.py b/src/empyre/fields/field.py index fe8ccf61b66ebf4eee2a7b14ed04901aa09376c6..66f3f00fb559655362a724b320de11d4d8673877 100644 --- a/src/empyre/fields/field.py +++ b/src/empyre/fields/field.py @@ -95,13 +95,13 @@ class Field(NDArrayOperatorsMixin): @scale.setter def scale(self, scale): if isinstance(scale, Number): # Scale is the same for each dimension! - self.__scale = (scale,) * len(self.dim) + self.__scale = (float(scale),) * len(self.dim) elif isinstance(scale, tuple): ndim = len(self.dim) assert len(scale) == ndim, f'Each of the {ndim} dimensions needs a scale, but {scale} was given!' self.__scale = scale else: - raise AssertionError('Scaling has to be a number or a tuple of numbers!') + raise AssertionError(f'Scaling has to be a number or a tuple of numbers, was {scale} instead!') @property def shape(self): @@ -135,13 +135,13 @@ class Field(NDArrayOperatorsMixin): def mask(self): # TODO: Function or property? Philosophical question? return Field(np.where(np.asarray(self.amp) > 0, True, False), self.scale, vector=False) - def __init__(self, data, scale=1, vector=False): + def __init__(self, data, scale=1.0, vector=False): self._log.debug('Calling __init__') self.__data = data self.vector = vector # Set vector before scale, because scale needs to know if vector (via calling dim)! self.scale = scale if self.vector: - assert self.ncomp in (2, 3), 'Only 2- or 3-dimensional vector fields are supported!' + assert self.ncomp in (2, 3), 'Only 2- or 3-component vector fields are supported!' self._log.debug('Created ' + str(self)) def __repr__(self): @@ -292,7 +292,7 @@ class Field(NDArrayOperatorsMixin): return cls(np.stack(scalar_list, axis=-1), scalar_list[0].scale, vector=True) @classmethod - def from_signal(cls, signal, scale=None, vector=False): + def from_signal(cls, signal, scale=None, vector=None, comp_pos=-1): """Convert a :class:`~hyperspy.signals.Signal` object to a :class:`~.Field` object. Parameters @@ -310,6 +310,11 @@ class Field(NDArrayOperatorsMixin): If set to True, forces the signal to be interpreted as a vector field. EMPyRe will check if the first axis is named 'vector components' (EMPyRe saves vector fields like this). If this is the case, vector will be automatically set to True and the signal will also be interpreted as a vector field. + comp_pos: int, optoinal + The index of the axis containing the vector components (if `vector=True`). EMPyRe needs this to be the last + axis (index `-1`, which is the default). In case another position is given, the vector component will be + moved to the last axis. Old Pyramid files will have this axis at index `0`, so use this for backwards + compatibilty. Notes ----- @@ -318,10 +323,12 @@ class Field(NDArrayOperatorsMixin): """ cls._log.debug('Calling from_signal') data = signal.data - if signal.axes_manager[0].name == 'vector components': - vector = True # Automatic detection! - if scale is None: # If not provided, try to read from axes_manager: - scale = [signal.axes_manager[i].scale for i in range(len(data.shape) - vector)] # One less axis if vector! + if vector and comp_pos != -1: # component axis should be last, but is currently first -> roll to the end: + data = np.moveaxis(data, source=comp_pos, destination=-1) + if vector is None: # Automatic detection: + vector = True if signal.axes_manager[0].name == 'vector components' else False + if scale is None: # If not provided, try to read from axes_manager, one less axis if vector!: + scale = tuple([signal.axes_manager[i].scale for i in range(len(data.shape) - vector)]) return cls(data, scale, vector) def to_signal(self): diff --git a/src/empyre/fields/shapes.py b/src/empyre/fields/shapes.py index 792e2ce47f486474e4293f5524cf15e11c82d44d..bb20cd3964dc121395e3d8ed0571c6469351e535 100644 --- a/src/empyre/fields/shapes.py +++ b/src/empyre/fields/shapes.py @@ -20,7 +20,7 @@ _log = logging.getLogger(__name__) # TODO: TEST!!! -def create_shape_slab(dim, center=None, width=None, scale=1): +def create_shape_slab(dim, center=None, width=None, scale=1.0): """Creates a Field object with the shape of a slab as a scalar field in arbitrary dimensions. Attributes @@ -51,7 +51,7 @@ def create_shape_slab(dim, center=None, width=None, scale=1): return Field(data=data, scale=scale, vector=False) -def create_shape_disc(dim, center=None, radius=None, height=None, axis=0, scale=1): +def create_shape_disc(dim, center=None, radius=None, height=None, axis=0, scale=1.0): """Creates a Field object with the shape of a cylindrical disc in 2D or 3D. Attributes @@ -102,7 +102,7 @@ def create_shape_disc(dim, center=None, radius=None, height=None, axis=0, scale= return Field(data=data, scale=scale, vector=False) -def create_shape_ellipse(dim, center=None, width=None, height=None, axis=0, scale=1): +def create_shape_ellipse(dim, center=None, width=None, height=None, axis=0, scale=1.0): """Creates a Field object with the shape of an ellipse in 2D or 3D. Attributes @@ -154,7 +154,7 @@ def create_shape_ellipse(dim, center=None, width=None, height=None, axis=0, scal return Field(data=data, scale=scale, vector=False) -def create_shape_ellipsoid(dim, center=None, width=None, scale=1): +def create_shape_ellipsoid(dim, center=None, width=None, scale=1.0): """Creates a Field object with the shape of an ellipsoid in arbitrary dimensions. Attributes @@ -182,7 +182,7 @@ def create_shape_ellipsoid(dim, center=None, width=None, scale=1): return Field(data=data, scale=scale, vector=False) -def create_shape_sphere(dim, center=None, radius=None, scale=1): +def create_shape_sphere(dim, center=None, radius=None, scale=1.0): """Creates a Field object with the shape of a sphere in arbitrary dimensions. Attributes @@ -211,7 +211,7 @@ def create_shape_sphere(dim, center=None, radius=None, scale=1): return Field(data=data, scale=scale, vector=False) -def create_shape_filament(dim, pos=None, axis=0, scale=1): +def create_shape_filament(dim, pos=None, axis=0, scale=1.0): """Creates a Field object with the shape of a filament in arbitrary dimension. Parameters @@ -239,7 +239,7 @@ def create_shape_filament(dim, pos=None, axis=0, scale=1): return Field(data=data, scale=scale, vector=False) -def create_shape_voxel(dim, pos=None, scale=1): +def create_shape_voxel(dim, pos=None, scale=1.0): """Creates a Field object with the shape of a single voxel in arbitrary dimension. Parameters diff --git a/src/empyre/fields/vectors.py b/src/empyre/fields/vectors.py index f6925fdc87997964c06ef54afa84890b452317c0..9d4305a34f870723190f2297c9a99934e7c010b0 100644 --- a/src/empyre/fields/vectors.py +++ b/src/empyre/fields/vectors.py @@ -17,7 +17,7 @@ __all__ = ['create_vector_homog', 'create_vector_vortex', 'create_vector_skyrmio _log = logging.getLogger(__name__) -def create_vector_homog(dim, phi=0, theta=None, scale=1): +def create_vector_homog(dim, phi=0, theta=None, scale=1.0): """Field subclass implementing a homogeneous vector field with 3 components in 2 or 3 dimensions. Attributes @@ -37,11 +37,13 @@ def create_vector_homog(dim, phi=0, theta=None, scale=1): assert len(dim) in (2, 3), 'Disc can only be build in 2 or 3 dimensions!' assert isinstance(phi, Number), 'phi has to be an angle in radians!' assert isinstance(theta, Number) or theta is None, 'theta has to be an angle in radians or None!' - if theta is None: + if len(dim) == 2 and theta is None: # 2D and 3rd component not explicitely wanted: y_comp = np.ones(dim) * np.sin(phi) x_comp = np.ones(dim) * np.cos(phi) data = np.stack([x_comp, y_comp], axis=-1) - else: + else: # 3D, always have a third component: + if theta is None: + theta = np.pi/2 # xy-plane if not specified otherwise! z_comp = np.ones(dim) * np.cos(theta) y_comp = np.ones(dim) * np.sin(theta) * np.sin(phi) x_comp = np.ones(dim) * np.sin(theta) * np.cos(phi) @@ -49,7 +51,7 @@ def create_vector_homog(dim, phi=0, theta=None, scale=1): return Field(data=data, scale=scale, vector=True) -def create_vector_vortex(dim, center=None, core_r=0, oop_r=None, axis=0, scale=1): +def create_vector_vortex(dim, center=None, core_r=0, oop_r=None, axis=0, scale=1.0): # TODO: oop and core_r documentation!!! General description! # TODO: CHIRALITY """Field subclass implementing a vortex vector field with 3 components in 2 or 3 dimensions. @@ -123,7 +125,7 @@ def create_vector_vortex(dim, center=None, core_r=0, oop_r=None, axis=0, scale=1 return Field(data=data, scale=scale, vector=True) -def create_vector_skyrmion(dim, center=None, phi_0=0, skyrm_d=None, wall_d=None, axis=0, scale=1): +def create_vector_skyrmion(dim, center=None, phi_0=0, skyrm_d=None, wall_d=None, axis=0, scale=1.0): """Create a 3-dimensional magnetic Bloch or Neel type skyrmion distribution. Parameters @@ -230,7 +232,7 @@ def create_vector_skyrmion(dim, center=None, phi_0=0, skyrm_d=None, wall_d=None, return Field(data=data, scale=scale, vector=True) -def create_vector_singularity(dim, center=None, scale=1): +def create_vector_singularity(dim, center=None, scale=1.0): """Create a 3-dimensional magnetic distribution of a homogeneous magnetized object. Parameters diff --git a/src/empyre/io/__init__.py b/src/empyre/io/__init__.py index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..9ef67883e4f9cc8148f4f14afb46a90e9db38d0f 100644 --- a/src/empyre/io/__init__.py +++ b/src/empyre/io/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 by Forschungszentrum Juelich GmbH +# Author: J. Caron +# +"""Subpackage containing EMPyRe IO functionality for several EMPyRe classes.""" + + +from .io_field import * + + +__all__ = [] +__all__.extend(io_field.__all__) + + +del io_field diff --git a/src/empyre/io/field_plugins/__init__.py b/src/empyre/io/field_plugins/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..7c8ac2a212394e92a8870acf37ffce09021a0f15 --- /dev/null +++ b/src/empyre/io/field_plugins/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 by Forschungszentrum Juelich GmbH +# Author: J. Caron +# +"""Subpackage containing EMPyRe IO functionality for the Field class.""" + + +from . import llg, numpy, ovf, tec, text, vtk + + +plugin_list = [llg, numpy, ovf, tec, text, vtk] +__all__ = ['plugin_list'] diff --git a/src/empyre/io/field_plugins/llg.py b/src/empyre/io/field_plugins/llg.py new file mode 100644 index 0000000000000000000000000000000000000000..895c492daf0da09650e8c278f6e142ca0f60be3d --- /dev/null +++ b/src/empyre/io/field_plugins/llg.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 by Forschungszentrum Juelich GmbH +# Author: J. Caron +# +"""IO plugin for LLG format.""" + + +import logging + +import numpy as np + +from ...fields.field import Field + + +_log = logging.getLogger(__name__) + +file_extensions = ('.llg',) # Recognised file extensions + + +def reader(filename, scale=None, vector=None, **kwargs): + _log.debug('Call reader') + if vector is None: + vector = True + assert vector is True, 'Only vector fields can be read from the llg file format!' + SCALE = 1.0E-9 / 1.0E-2 # From cm to nm + data_columns = np.genfromtxt(filename, skip_header=2) + dim = tuple(np.genfromtxt(filename, dtype=int, skip_header=1, skip_footer=len(data_columns[:, 0]))) + if scale is None: # Otherwise overwrite! + stride_x = 1 # x varies fastest + stride_y = dim[2] # one y step for dim[2] x steps + stride_z = dim[1] * dim[2] # one z step for one x-y layer (dim[1]*dim[2]) + scale_x = (data_columns[stride_x, 0] - data_columns[0, 0]) / SCALE # first column varies in x + scale_y = (data_columns[stride_y, 1] - data_columns[0, 1]) / SCALE # second column varies in y + scale_z = (data_columns[stride_z, 2] - data_columns[0, 2]) / SCALE # third column varies in z + scale = (scale_z, scale_y, scale_x) + x_mag, y_mag, z_mag = data_columns[:, 3:6].T + data = np.stack((x_mag.reshape(dim), y_mag.reshape(dim), z_mag.reshape(dim)), axis=-1) + return Field(data, scale, vector=True) + + +def writer(filename, field, **kwargs): + _log.debug('Call writer') + assert field.vector and len(field.dim) == 3, 'Only 3D vector fields can be saved to the llg file format!' + SCALE = 1.0E-9 / 1.0E-2 # from nm to cm + # Create 3D meshgrid and reshape it and the field into a list where x varies first: + zzz, yyy, xxx = (np.indices(field.dim) + 0.5) * np.reshape(field.scale, (3, 1, 1, 1)) * SCALE # broadcast shape! + z_coord, y_coord, x_coord = np.ravel(zzz), np.ravel(yyy), np.ravel(xxx) # Turn into vectors! + x_comp, y_comp, z_comp = field.comp # Extract scalar field components! + x_vec, y_vec, z_vec = np.ravel(x_comp.data), np.ravel(y_comp.data), np.ravel(z_comp.data) # Turn into vectors! + data = np.array([x_coord, y_coord, z_coord, x_vec, y_vec, z_vec]).T + # Save data to file: + with open(filename, 'w') as mag_file: + mag_file.write('LLGFileCreator: EMPyRe vector Field\n') + mag_file.write(' {:d} {:d} {:d}\n'.format(*field.dim)) + mag_file.writelines('\n'.join(' '.join('{:7.6e}'.format(cell) for cell in row) for row in data)) diff --git a/src/empyre/io/field_plugins/numpy.py b/src/empyre/io/field_plugins/numpy.py new file mode 100644 index 0000000000000000000000000000000000000000..fabb6e8e93a2baab64ee5a728c84d2e3bb597972 --- /dev/null +++ b/src/empyre/io/field_plugins/numpy.py @@ -0,0 +1,31 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 by Forschungszentrum Juelich GmbH +# Author: J. Caron +# +"""IO plugin for the numpy format.""" + + +import logging + +import numpy as np + +from ...fields.field import Field + + +_log = logging.getLogger(__name__) + +file_extensions = ('.npy', '.npz') # Recognised file extensions + + +def reader(filename, scale=None, vector=None, **kwargs): + _log.debug('Call reader') + if vector is None: + vector = False + if scale is None: + scale = 1.0 + return Field(np.load(filename, **kwargs), scale, vector) + + +def writer(filename, field, **kwargs): + _log.debug('Call writer') + np.save(filename, field.data, **kwargs) diff --git a/src/empyre/io/field_plugins/ovf.py b/src/empyre/io/field_plugins/ovf.py new file mode 100644 index 0000000000000000000000000000000000000000..80e59df3be3cdb2ff50f388a88f5aac1f365d278 --- /dev/null +++ b/src/empyre/io/field_plugins/ovf.py @@ -0,0 +1,176 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 by Forschungszentrum Juelich GmbH +# Author: J. Caron +# +"""IO plugin for simple text format.""" + + +import logging +import os + +import numpy as np + +from ...fields.field import Field + + +_log = logging.getLogger(__name__) + +file_extensions = ('.ovf', '.omf', '.ohf', 'obf') # Recognised file extensions + + +def reader(filename, scale=None, vector=None, segment=None, **kwargs): + """More info at: + + http://math.nist.gov/oommf/doc/userguide11b2/userguide/vectorfieldformat.html + http://math.nist.gov/oommf/doc/userguide12a5/userguide/OVF_2.0_format.html + + """ + _log.debug('Calling reader') + if vector is None: + vector = True + assert vector is True, 'Only vector fields can be loaded from ovf-files!' + with open(filename, 'rb') as load_file: + line = load_file.readline() + assert line.startswith(b'# OOMMF') # File has OVF format! + read_header, read_data = False, False + header = {'version': line.split()[-1].decode('utf-8')} + x_mag, y_mag, z_mag = [], [], [] + data_mode = None + while True: + # --- READ START OF FILE OR IN BETWEEN SEGMENTS LINE BY LINE --------------------------- + if not read_header and not read_data: # Start of file or between segments! + line = load_file.readline() + if line == b'': + break # End of file is reached! + if line.startswith(b'# Segment count'): + seg_count = int(line.split()[-1]) # Total number of segments (often just 1)! + seg_curr = 0 # Current segment (0: not in first segment, yet!) + if seg_count > 1: # If multiple segments, check if "segment" was set correctly: + assert segment is not None, (f'Multiple ({seg_count}) segments were found! ' + 'Chose one via the segment parameter!') + elif segment is None: # Only one segment AND parameter not set: + segment = 1 # Default to the first/only segment! + assert 0 < segment <= seg_count, (f'parameter segment={segment} out of bounds, ' + f'Use value between 1 and {seg_count}!') + header['segment_count'] = seg_count + elif line.startswith(b'# Begin: Segment'): # Segment start! + seg_curr += 1 + if seg_curr > segment: + break # Stop reading the file! + elif line.startswith(b'# Begin: Header'): # Header start! + read_header = True + elif line.startswith(b'# Begin: Data'): # Data start! + read_data = True + data_mode = ' '.join(line.decode('utf-8').split()[3:]) + assert data_mode in ['text', 'Text', 'Binary 4', 'Binary 8'], \ + f'Data mode {data_mode} is currently not supported by this reader!' + assert header.get('meshtype') == 'rectangular', \ + 'Only rectangular grids can be currently read!' + # --- READ HEADER LINE BY LINE --------------------------------------------------------- + elif read_header: + line = load_file.readline() + if line.startswith(b'# End: Header'): # Header is done: + read_header = False + continue + line = line.decode('utf-8') # Decode to use strings here! + line_list = line.split() + if '##' in line_list: # Strip trailing comments: + del line_list[line_list.index('##'):] + if len(line_list) <= 1: # Just '#' or empty line: + continue + key, value = line_list[1].strip(':'), ' '.join(line_list[2:]) + if key not in header: # Add new key, value pair if not existant: + header[key] = value + elif key == 'Desc': # Description can go over several lines: + header['Desc'] = ' '.join([header['Desc'], value]) + # --- READ DATA LINE BY LINE ----------------------------------------------------------- + elif read_data: # Currently in a data block: + if data_mode in ['text', 'Text']: # Read data as text, line by line: + line = load_file.readline() + if line.startswith(b'# End: Data'): + read_data = False # Stop reading data and search for new segments! + continue + elif seg_curr < segment: # Do nothing with the line if wrong segment! + continue + else: + x, y, z = [float(i) for i in line.split()] + x_mag.append(x) + y_mag.append(y) + z_mag.append(z) + elif 'Binary' in data_mode: # Read data as binary, all bytes at the same time: + # Currently every segment is read until the wanted one is processed. Only that one is returned! + count = int(data_mode.split()[-1]) # Either 4 or 8! + if header['version'] == '1.0': # Big endian float: + dtype = f'>f{count}' + elif header['version'] == '2.0': # Little endian float: + dtype = f'<f{count}' + test = np.fromfile(load_file, dtype=dtype, count=1) # Read test byte! + if count == 4: # Binary 4: + assert test == 1234567.0, 'Wrong test bytes!' + elif count == 8: # Binary 8: + assert test == 123456789012345.0, 'Wrong test bytes!' + dim = (int(header['znodes']), int(header['ynodes']), int(header['xnodes'])) + data_raw = np.fromfile(load_file, dtype=dtype, count=3*np.prod(dim)) + x_mag, y_mag, z_mag = data_raw[0::3], data_raw[1::3], data_raw[2::3] + read_data = False # Stop reading data and search for new segments (if any). + # --- READING DONE ------------------------------------------------------------------------- + # Format after reading: + dim = (int(header['znodes']), int(header['ynodes']), int(header['xnodes'])) + x_mag = np.asarray(x_mag).reshape(dim) + y_mag = np.asarray(y_mag).reshape(dim) + z_mag = np.asarray(z_mag).reshape(dim) + data = np.stack((x_mag, y_mag, z_mag), axis=-1) * float(header.get('valuemultiplier', 1)) + if scale is None: + unit = header.get('meshunit', 'nm') + if unit == 'unspecified': + unit = 'nm' + _log.info(f'unit: {unit}') + unit_scale = {'m': 1e9, 'mm': 1e6, 'µm': 1e3, 'nm': 1}[unit] + xstep = float(header.get('xstepsize')) * unit_scale + ystep = float(header.get('ystepsize')) * unit_scale + zstep = float(header.get('zstepsize')) * unit_scale + scale = (zstep, ystep, xstep) + return Field(data, scale=scale, vector=True) + + +def writer(filename, field, **kwargs): + _log.debug('Call writer') + assert field.vector and len(field.dim) == 3, 'Only 3D vector fields can be saved to ovf files!' + with open(filename, 'w') as save_file: + save_file.write('# OOMMF OVF 2.0\n') + save_file.write('# Segment count: 1\n') + save_file.write('# Begin: Segment\n') + # Write Header: + save_file.write('# Begin: Header\n') + name = os.path.split(filename)[1] + save_file.write(f'# Title: PYRAMID-VECTORDATA {name}\n') + save_file.write('# meshtype: rectangular\n') + save_file.write('# meshunit: nm\n') + save_file.write('# valueunit: A/m\n') + save_file.write('# valuemultiplier: 1.\n') + save_file.write('# xmin: 0.\n') + save_file.write('# ymin: 0.\n') + save_file.write('# zmin: 0.\n') + save_file.write(f'# xmax: {field.scale[2] * field.dim[2]}\n') + save_file.write(f'# ymax: {field.scale[1] * field.dim[1]}\n') + save_file.write(f'# zmax: {field.scale[0] * field.dim[0]}\n') + save_file.write('# xbase: 0.\n') + save_file.write('# ybase: 0.\n') + save_file.write('# zbase: 0.\n') + save_file.write(f'# xstepsize: {field.scale[2]}\n') + save_file.write(f'# ystepsize: {field.scale[1]}\n') + save_file.write(f'# zstepsize: {field.scale[0]}\n') + save_file.write(f'# xnodes: {field.dim[2]}\n') + save_file.write(f'# ynodes: {field.dim[1]}\n') + save_file.write(f'# znodes: {field.dim[0]}\n') + save_file.write('# End: Header\n') + # Write data: + save_file.write('# Begin: Data Text\n') + x_mag, y_mag, z_mag = field.comp + x_mag = x_mag.data.ravel() + y_mag = y_mag.data.ravel() + z_mag = z_mag.data.ravel() + for i in range(np.prod(field.dim)): + save_file.write(f'{x_mag[i]:g} {y_mag[i]:g} {z_mag[i]:g}\n') + save_file.write('# End: Data Text\n') + save_file.write('# End: Segment\n') diff --git a/src/empyre/io/field_plugins/tec.py b/src/empyre/io/field_plugins/tec.py new file mode 100644 index 0000000000000000000000000000000000000000..1fd7c1c1b6eb01e85b302eb5ef2e9dc622792088 --- /dev/null +++ b/src/empyre/io/field_plugins/tec.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 by Forschungszentrum Juelich GmbH +# Author: J. Caron +# +"""IO plugin for simple text format.""" + + +import logging +import re + +import numpy as np + +from ...fields.field import Field +from ...utils.misc import interp_to_regular_grid + + +_log = logging.getLogger(__name__) + +file_extensions = ('.tec',) # Recognised file extensions + + +def reader(filename, scale=None, vector=None, **kwargs): + assert isinstance(scale, tuple), 'The scale must be a tuple, each entry corresponding to one grid dimensions!' + _log.debug('Call reader') + if vector is None: + vector = True + assert vector is True, 'Only vector fields can be loaded from tec-files!' + with open(filename, 'r') as mag_file: + lines = mag_file.readlines() # Read in lines! + match = re.search(R'N=(\d+)', lines[2]) # Extract number of points from third line! + if match: + n_points = int(match.group(1)) + else: + raise IOError('File does not seem to match .tec format!') + n_head, n_foot = 3, len(lines) - (3 + n_points) + # Read in data: + data_raw = np.genfromtxt(filename, skip_header=n_head, skip_footer=n_foot) + if scale is None: + raise ValueError('For the interpolation of unstructured grids, the `scale` parameter is required!') + data = interp_to_regular_grid(data_raw[:, :3], data_raw[:, 3:], scale, **kwargs) + return Field(data, scale, vector=vector) + + +def writer(filename, field, **kwargs): + _log.debug('Call writer') + raise NotImplementedError('A writer for this extension is not yet implemented!') diff --git a/src/empyre/io/field_plugins/text.py b/src/empyre/io/field_plugins/text.py new file mode 100644 index 0000000000000000000000000000000000000000..c63bd113cb957896d7e1a1d61596aef94f83367a --- /dev/null +++ b/src/empyre/io/field_plugins/text.py @@ -0,0 +1,49 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 by Forschungszentrum Juelich GmbH +# Author: J. Caron +# +"""IO plugin for simple text format.""" + + +import logging +import ast + +import numpy as np + +from ...fields.field import Field + + +_log = logging.getLogger(__name__) + +file_extensions = ('.txt',) # Recognised file extensions + + +def reader(filename, scale=None, vector=None, **kwargs): + _log.debug('Call reader') + if vector is None: + vector = False + assert vector is False, 'Only scalar 2D fields can currently be read with this file reader!' + with open(filename, 'r') as load_file: # Read data: + empyre_format = load_file.readline().startswith('EMPYRE-FORMAT') + if empyre_format: # File has EMPyRe structure: + scale = ast.literal_eval(load_file.readline()[8:-4]) # [8:-4] takes just the scale string! + data = np.loadtxt(filename, delimiter='\t', skiprows=2) # skips header! + else: # Try default with provided kwargs: + scale = 1.0 if scale is None else scale # Set default if not provided! + data = np.loadtxt(filename, **kwargs) + return Field(data, scale=scale, vector=False) + + +def writer(filename, field, with_header=True, **kwargs): + _log.debug('Call writer') + assert not field.vector, 'Vector fields can currently not be saved to text!' + assert len(field.dim) == 2, 'Only 2D fields can currenty be saved to text!' + if with_header: # write header: + with open(filename, 'w') as save_file: + save_file.write('EMPYRE-FORMAT\n') + save_file.write(f'scale = {field.scale} nm\n') + save_kwargs = {'fmt': '%7.6e', 'delimiter': '\t'} + else: + save_kwargs = kwargs + with open(filename, 'ba') as save_file: # the 'a' is for append! + np.savetxt(save_file, field.data, **save_kwargs) diff --git a/src/empyre/io/field_plugins/vtk.py b/src/empyre/io/field_plugins/vtk.py new file mode 100644 index 0000000000000000000000000000000000000000..43ac784f6f363b8954aaa910037938a092185595 --- /dev/null +++ b/src/empyre/io/field_plugins/vtk.py @@ -0,0 +1,131 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 by Forschungszentrum Juelich GmbH +# Author: J. Caron +# +"""IO plugin for simple text format.""" + + +import logging +from numbers import Number + +import numpy as np + +from ...fields.field import Field +from ...utils.misc import interp_to_regular_grid +from ...vis import colors + + +_log = logging.getLogger(__name__) + +file_extensions = ('.vtk',) # Recognised file extensions + + +def reader(filename, scale=None, vector=None, **kwargs): + """More infos at: + + overview: https://docs.enthought.com/mayavi/mayavi/data.html + writing: https://vtk.org/Wiki/VTK/Writing_VTK_files_using_python + format: https://www.vtk.org/wp-content/uploads/2015/04/file-formats.pdf + + """ + _log.debug('Calling reader') + try: + from tvtk.api import tvtk + except ImportError: + _log.error('This extension recquires the tvtk package!') + return + if vector is None: + vector = True + # Setting up reader: + reader = tvtk.DataSetReader(file_name=filename, read_all_scalars=True, read_all_vectors=vector) + reader.update() + # Getting output: + output = reader.output + assert output is not None, 'File reader could not find data or file "{}"!'.format(filename) + # Reading points and vectors: + if isinstance(output, tvtk.ImageData): # tvtk.StructuredPoints is a subclass of tvtk.ImageData! + # Connectivity: implicit; described by: 3D data array and spacing along each axis! + _log.info('geometry: ImageData') + # Load relevant information from output (reverse to get typical Python order z,y,x): + dim = output.dimensions[::-1] + origin = output.origin[::-1] + spacing = output.spacing[::-1] + _log.info(f'dim: {dim}, origin: {origin}, spacing: {spacing}') + assert len(dim) == 3, 'Data has to be three-dimensional!' + if scale is None: + scale = tuple(spacing) + if vector: # Extract vector compontents and create magnitude array: + vector_array = np.asarray(output.point_data.vectors, dtype=np.float) + x_mag, y_mag, z_mag = vector_array.T + data = np.stack((x_mag.reshape(dim), y_mag.reshape(dim), z_mag.reshape(dim)), axis=-1) + else: # Extract scalar data and create magnitude array: + scalar_array = np.asarray(output.point_data.scalars, dtype=np.float) + data = scalar_array.reshape(dim) + elif isinstance(output, tvtk.RectilinearGrid): + # Connectivity: implicit; described by: 3D data array and 1D array of spacing for each axis! + _log.info('geometry: RectilinearGrid') + raise NotImplementedError('RectilinearGrid is currently not supported!') + elif isinstance(output, tvtk.StructuredGrid): + # Connectivity: implicit; described by: 3D data array and 3D position arrays for each axis! + _log.info('geometry: StructuredGrid') + raise NotImplementedError('StructuredGrid is currently not supported!') + elif isinstance(output, tvtk.PolyData): + # Connectivity: explicit; described by: x, y, z positions of vertices and arrays of surface cells! + _log.info('geometry: PolyData') + raise NotImplementedError('PolyData is currently not supported!') + elif isinstance(output, tvtk.UnstructuredGrid): + # Connectivity: explicit; described by: x, y, z positions of vertices and arrays of volume cells! + _log.info('geometry: UnstructuredGrid') + # Load relevant information from output: + point_array = np.asarray(output.points, dtype=np.float) + if vector: + data_array = np.asarray(output.point_data.vectors, dtype=np.float) + else: + data_array = np.asarray(output.point_data.scalars, dtype=np.float) + if scale is None: + raise ValueError('For the interpolation of unstructured grids, the `scale` parameter is required!') + elif isinstance(scale, Number): # Scale is the same for each dimension x, y, z! + scale = (scale,) * 3 + elif isinstance(scale, tuple): + assert len(scale) == 3, f'Each dimension (z, y, x) needs a scale, but {scale} was given!' + data = interp_to_regular_grid(point_array, data_array, scale, **kwargs) + else: + raise TypeError('Data type of {} not understood!'.format(output)) + return Field(data, scale, vector=True) + + +def writer(filename, field, **kwargs): + _log.debug('Call writer') + assert len(field.dim) == 3, 'Currently only 3D fields can be saved to vtk!' + try: + from tvtk.api import tvtk, write_data + except ImportError: + _log.error('This extension recquires the tvtk package!') + return + # Create dataset: + origin = (0, 0, 0) + spacing = (field.scale[2], field.scale[1], field.scale[0]) + dimensions = (field.dim[2], field.dim[1], field.dim[0]) + sp = tvtk.StructuredPoints(origin=origin, spacing=spacing, dimensions=dimensions) + # Fill with data from field: + if field.vector: # Handle vector fields: + # Put vector components in corresponding array: + vectors = field.data.reshape(-1, 3) + sp.point_data.vectors = vectors + sp.point_data.vectors.name = 'vectors' + # Calculate colors: + x_mag, y_mag, z_mag = field.comp + magvec = np.asarray((x_mag.data.ravel(), y_mag.data.ravel(), z_mag.data.ravel())) + rgb = colors.CMAP_CIRCULAR_DEFAULT.rgb_from_vector(magvec) + point_colors = tvtk.UnsignedIntArray() + point_colors.number_of_components = 3 + point_colors.name = 'colors' + point_colors.from_array(rgb) + sp.point_data.scalars = point_colors + sp.point_data.scalars.name = 'colors' + else: # Handle scalar fields: + scalars = field.data.ravel() + sp.point_data.scalars = scalars + sp.point_data.scalars.name = 'scalars' + # Write the data to file: + write_data(sp, filename) diff --git a/src/empyre/io/io_field.py b/src/empyre/io/io_field.py new file mode 100644 index 0000000000000000000000000000000000000000..20a011e3c92842c505dc358f39ce6bece6dafc84 --- /dev/null +++ b/src/empyre/io/io_field.py @@ -0,0 +1,142 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 by Forschungszentrum Juelich GmbH +# Author: J. Caron +# +"""IO functionality for Field objects.""" + + +import logging +import os + +from ..fields.field import Field +from .field_plugins import plugin_list + + +__all__ = ['load_field', 'save_field'] +_log = logging.getLogger(__name__) + + +def load_field(filename, scale=None, vector=None, **kwargs): + """Load supported file into a :class:`~..fields.Field` instance. + + The function loads the file according to the extension: + SCALAR??? + - hdf5 for HDF5. # TODO: You can use comp_pos here!!! + - EMD Electron Microscopy Dataset format (also HDF5). + - npy or npz for numpy formats. + + PHASEMAP + - hdf5 for HDF5. + - rpl for Ripple (useful to export to Digital Micrograph). + - dm3 and dm4 for Digital Micrograph files. + - unf for SEMPER unf binary format. + - txt format. + - npy or npz for numpy formats. + - Many image formats such as png, tiff, jpeg... + + VECTOR + - hdf5 for HDF5. + - EMD Electron Microscopy Dataset format (also HDF5). + - llg format. + - ovf format. + - npy or npz for numpy formats. + + Any extra keyword is passed to the corresponsing reader. For available options see their individual documentation. + + Parameters + ---------- + filename: str + The filename to be loaded. + scale: tuple of float, optional + Scaling along the dimensions of the underlying data. Default is 1. + vector: bool, optional + True if the field should be a vector field, False if it should be interpreted as a scalar field (default). + + Returns + ------- + field: `Field` + A `Field` object containing the loaded data. + + Notes + ----- + Falls back to HyperSpy routines for loading data, make sure it is installed if you need the full capabilities. + + """ + _log.debug('Calling load_field') + extension = os.path.splitext(filename)[1] + for plugin in plugin_list: # Iterate over all plugins: + if extension in plugin.file_extensions: # Check if extension is recognised: + return plugin.reader(filename, scale=scale, vector=vector, **kwargs) + # If nothing was found, try HyperSpy + _log.debug('Using HyperSpy') + try: + import hyperspy.api as hs + except ImportError: + _log.error('This extension recquires the hyperspy package!') + return + comp_pos = kwargs.pop('comp_pos', -1) + return Field.from_signal(hs.load(filename, **kwargs), scale=scale, vector=vector, comp_pos=comp_pos) + + +def save_field(filename, field, **kwargs): + """Saves the Field in the specified format. + + The function gets the format from the extension: + - hdf5 for HDF5. + - EMD Electron Microscopy Dataset format (also HDF5). + - npy or npz for numpy formats. + + If no extension is provided, 'hdf5' is used. Most formats are saved with the HyperSpy package (internally the field + is first converted to a HyperSpy Signal. + Each format accepts a different set of parameters. For details see the specific format documentation. + + Parameters + ---------- + filename : str, optional + Name of the file which the Field is saved into. The extension determines the saving procedure. + + """ + """Saves the phasemap in the specified format. + + The function gets the format from the extension: + - hdf5 for HDF5. + - rpl for Ripple (useful to export to Digital Micrograph). + - unf for SEMPER unf binary format. + - txt format. + - Many image formats such as png, tiff, jpeg... + + If no extension is provided, 'hdf5' is used. Most formats are + saved with the HyperSpy package (internally the phasemap is first + converted to a HyperSpy Signal. + + Each format accepts a different set of parameters. For details + see the specific format documentation. + + Parameters + ---------- + filename: str, optional + Name of the file which the phasemap is saved into. The extension + determines the saving procedure. + save_mask: boolean, optional + If True, the `mask` is saved, too. For all formats, except HDF5, a separate file will + be created. HDF5 always saves the `mask` in the metadata, independent of this flag. The + default is False. + save_conf: boolean, optional + If True, the `confidence` is saved, too. For all formats, except HDF5, a separate file + will be created. HDF5 always saves the `confidence` in the metadata, independent of + this flag. The default is False + pyramid_format: boolean, optional + Only used for saving to '.txt' files. If this is True, the grid spacing is saved + in an appropriate header. Otherwise just the phase is written with the + corresponding `kwargs`. + + """ + _log.debug('Calling save_field') + extension = os.path.splitext(filename)[1] + for plugin in plugin_list: # Iterate over all plugins: + if extension in plugin.file_extensions: # Check if extension is recognised: + plugin.writer(filename, field, **kwargs) + return + # If nothing was found, try HyperSpy: + _log.debug('Using HyperSpy') + field.to_signal().save(filename, **kwargs) diff --git a/src/empyre/utils/misc.py b/src/empyre/utils/misc.py index 138fe635282a1410fe488a4d4e7bb070237e53c9..90e9fdaca762a49f9a4c6a6e58227c10e870bd97 100644 --- a/src/empyre/utils/misc.py +++ b/src/empyre/utils/misc.py @@ -24,7 +24,7 @@ def levi_civita(i, j, k): return (j-i) * (k-i) * (k-j) / (np.abs(j-i) * np.abs(k-i) * np.abs(k-j)) -def interp_to_regular_grid(points, values, scale, scale_factor=1, step=1, convex=True): +def interp_to_regular_grid(points, values, scale, scale_factor=1, step=1, convex=True, distance_upper_bound=None): """Interpolate values on points to regular grid. Parameters @@ -45,6 +45,12 @@ def interp_to_regular_grid(points, values, scale, scale_factor=1, step=1, convex convex : bool, optional By default True. If this is set to False, additional measures are taken to find holes in the point cloud. WARNING: this is an experimental feature that should be used with caution! + distance_upper_bound: float, optional + Only used if `convex=False`. Set the upper bound, determining if a point of the new (interpolated) grid is too + far away from any original point. They are assumed to be in a "hole" and their values are set to zero. Set this + value in nm, it will be converted to the local unit of the original points internally. If not set and + `convex=True`, double of the the mean of `scale` is calculated and used (can be troublesome if the scales vary + drastically). Returns ------- @@ -66,10 +72,11 @@ def interp_to_regular_grid(points, values, scale, scale_factor=1, step=1, convex _log.info(f'y-range: {y_min:.2g} <-> {y_max:.2g} ({y_diff:.2g})') _log.info(f'z-range: {z_min:.2g} <-> {z_max:.2g} ({z_diff:.2g})') # Determine dimensions from given grid spacing a: - dim = tuple(np.round(np.asarray((z_diff/scale, y_diff/scale, x_diff/scale), dtype=int))) - x = x_min + scale * (np.arange(dim[2]) + 0.5) # +0.5: shift to pixel center! - y = y_min + scale * (np.arange(dim[1]) + 0.5) # +0.5: shift to pixel center! - z = z_min + scale * (np.arange(dim[0]) + 0.5) # +0.5: shift to pixel center! + dim = tuple(np.round(np.asarray((z_diff/scale[0], y_diff/scale[1], x_diff/scale[2]), dtype=int))) + assert all(dim) > 0, f'All dimensions of dim={dim} need to be > 0, please adjust the scale accordingly!' + z = z_min + scale[0] * (np.arange(dim[0]) + 0.5) # +0.5: shift to pixel center! + y = y_min + scale[1] * (np.arange(dim[1]) + 0.5) # +0.5: shift to pixel center! + x = x_min + scale[2] * (np.arange(dim[2]) + 0.5) # +0.5: shift to pixel center! # Create points for new Euclidian grid; fliplr for (x, y, z) order: points_euc = np.fliplr(np.asarray(list(itertools.product(z, y, x)))) # Make values 2D (if not already); double .T so that a new axis is added at the END (n, 1): @@ -92,12 +99,25 @@ def interp_to_regular_grid(points, values, scale, scale_factor=1, step=1, convex # If NOT convex, we have to check for additional holes in the structure (EXPERIMENTAL): if not convex: # Only necessary if the user expects holes in the (-> nonconvex) distribution: # Create k-dimensional tree for queries: + _log.info('Create cKDTree...') + tick = time() tree = cKDTree(points) + tock = time() + _log.info(f'cKDTree creation complete (took {tock-tick:.2f} s)!') # Query the tree for nearest neighbors, x: points to query, k: number of neighbors, p: norm # to use (here: 2 - Euclidean), distance_upper_bound: maximum distance that is searched! - data, leafsize = tree.query(x=points_euc, k=1, p=2, distance_upper_bound=2*np.mean(scale)) + if distance_upper_bound is None: # Take the mean of the scale for the upper bound: + distance_upper_bound = 2 * np.mean(scale) # NOTE: could be problematic for wildly varying scale numbers. + else: # Convert to local scale: + distance_upper_bound *= scale_factor + _log.info('Start cKDTree neighbour query...') + tick = time() + data, leafsize = tree.query(x=points_euc, k=1, p=2, distance_upper_bound=distance_upper_bound) + tock = time() + _log.info(f'cKDTree neighbour query complete (took {tock-tick:.2f} s)!') # Create boolean mask that determines which interpolation points have no neighbor near enough: mask = np.isinf(data).reshape(dim) # Points further away than upper bound were marked 'inf'! - for i in tqdm(range(values.shape[-1])): # Set these points to zero (NOTE: This can take a looooong time): - interpolation[..., i].ravel()[mask.ravel()] = 0 + _log.info(f'{np.sum(mask)} of {points_euc.shape[0]} points were assumed to be in holes of the point cloud!') + # Set these points to zero (NOTE: This can take a looooong time): + interpolation[mask, :] = 0 return np.squeeze(interpolation) diff --git a/src/empyre/vis/__init__.py b/src/empyre/vis/__init__.py index ca3ac98c988918ea84d65440e46b3e489e09b297..c340af48fc2f4725e84d8d55368b040a7e9da326 100644 --- a/src/empyre/vis/__init__.py +++ b/src/empyre/vis/__init__.py @@ -8,14 +8,17 @@ from . import colors from .plot2d import * from .decorators import * from .tools import * +from .plot3d import * __all__ = ['colors'] __all__.extend(plot2d.__all__) __all__.extend(decorators.__all__) __all__.extend(tools.__all__) +__all__.extend(plot3d.__all__) -del decorators del plot2d +del decorators del tools +del plot3d diff --git a/src/empyre/vis/decorators.py b/src/empyre/vis/decorators.py index a3509fea671eb14a8e9cda58ccb4b51c05d3e62f..aa300344fe50c1528cf07b5e25ab1aec90f4625a 100644 --- a/src/empyre/vis/decorators.py +++ b/src/empyre/vis/decorators.py @@ -161,7 +161,7 @@ def quiverkey(quiv, field, axis=None, unit='', loc='lower right', **kwargs): quiv : Quiver instance The quiver instance returned by a call to quiver. field : `Field` or ndarray - The vector data as a `Field` or a numpy array (in the latter case, `vector=True` and `scale=1` are assumed). + The vector data as a `Field` or a numpy array (in the latter case, `vector=True` and `scale=1.0` are assumed). axis : :class:`~matplotlib.axes.AxesSubplot`, optional Axis to which the quiverkey is added, by default None, which will pick the last used axis via `gca`. unit: str, optional @@ -183,7 +183,7 @@ def quiverkey(quiv, field, axis=None, unit='', loc='lower right', **kwargs): if axis is None: # If no axis is set, find the current or create a new one: axis = plt.gca() if not isinstance(field, Field): # Try to convert input to Field if it is not already one: - field = Field(data=np.asarray(field), scale=1, vector=True) + field = Field(data=np.asarray(field), scale=1.0, vector=True) length = field.amp.data.max() shift = 1 / field.squeeze().dim[1] # equivalent to one pixel distance in axis coords! label = f'{length:.3g} {unit}' diff --git a/src/empyre/vis/plot2d.py b/src/empyre/vis/plot2d.py index 2c6b6c34d59a68d14aea24b9582ef8988372ff11..3b9abf5a19c0793c7c5491f3424356a5845411ca 100644 --- a/src/empyre/vis/plot2d.py +++ b/src/empyre/vis/plot2d.py @@ -34,7 +34,7 @@ def imshow(field, axis=None, cmap=None, **kwargs): Parameters ---------- field : `Field` or ndarray - The image data as a `Field` or a numpy array (in the latter case, `vector=False` and `scale=1` are assumed). + The image data as a `Field` or a numpy array (in the latter case, `vector=False` and `scale=1.0` are assumed). axis : `matplotlib.axes.Axes` object, optional The axis to which the image should be added, by default None, which will pick the last use axis via `gca`. cmap : str or `matplotlib.colors.Colormap`, optional @@ -59,7 +59,7 @@ def imshow(field, axis=None, cmap=None, **kwargs): """ _log.debug('Calling imshow') if not isinstance(field, Field): # Try to convert input to Field if it is not already one: - field = Field(data=np.asarray(field), scale=1, vector=False) + field = Field(data=np.asarray(field), scale=1.0, vector=False) assert not field.vector, 'Can only plot scalar fields!' # Get squeezed data and make sure it's 2D scalar: squeezed_field = field.squeeze() @@ -104,7 +104,7 @@ def contour(field, axis=None, **kwargs): Parameters ---------- field : `Field` or ndarray - The contour data as a `Field` or a numpy array (in the latter case, `vector=False` and `scale=1` are assumed). + The contour data as a `Field` or a numpy array (in the latter case, `vector=False` and `scale=1.0` are assumed). axis : `matplotlib.axes.Axes` object, optional The axis to which the contour should be added, by default None, which will pick the last use axis via `gca`. @@ -124,7 +124,7 @@ def contour(field, axis=None, **kwargs): """ _log.debug('Calling contour') if not isinstance(field, Field): # Try to convert input to Field if it is not already one: - field = Field(data=np.asarray(field), scale=1, vector=False) + field = Field(data=np.asarray(field), scale=1.0, vector=False) assert not field.vector, 'Can only plot scalar fields!' # Get squeezed data and make sure it's 2D scalar: squeezed_field = field.squeeze() @@ -156,7 +156,7 @@ def colorvec(field, axis=None, **kwargs): Parameters ---------- field : `Field` or ndarray - The image data as a `Field` or a numpy array (in the latter case, `vector=True` and `scale=1` are assumed). + The image data as a `Field` or a numpy array (in the latter case, `vector=True` and `scale=1.0` are assumed). axis : `matplotlib.axes.Axes` object, optional The axis to which the image should be added, by default None, which will pick the last use axis via `gca`. @@ -185,7 +185,7 @@ def colorvec(field, axis=None, **kwargs): """ _log.debug('Calling colorvec') if not isinstance(field, Field): # Try to convert input to Field if it is not already one: - field = Field(data=np.asarray(field), scale=1, vector=True) + field = Field(data=np.asarray(field), scale=1.0, vector=True) assert field.vector, 'Can only plot vector fields!' assert len(field.dim) <= 3, 'Unusable for vector fields with dimension higher than 3!' assert len(field.dim) == field.ncomp, ('Assignment of vector components to dimensions is ambiguous!' @@ -216,7 +216,7 @@ def cosine_contours(field, axis=None, gain='auto', cmap=None, **kwargs): Parameters ---------- field : `Field` or ndarray - The contour data as a `Field` or a numpy array (in the latter case, `vector=False` and `scale=1` are assumed). + The contour data as a `Field` or a numpy array (in the latter case, `vector=False` and `scale=1.0` are assumed). axis : `matplotlib.axes.Axes` object, optional The axis to which the contour should be added, by default None, which will pick the last use axis via `gca`. gain : float or 'auto', optional @@ -241,7 +241,7 @@ def cosine_contours(field, axis=None, gain='auto', cmap=None, **kwargs): """ _log.debug('Calling cosine_contours') if not isinstance(field, Field): # Try to convert input to Field if it is not already one: - field = Field(data=np.asarray(field), scale=1, vector=False) + field = Field(data=np.asarray(field), scale=1.0, vector=False) assert not field.vector, 'Can only plot scalar fields!' # Get squeezed data and make sure it's 2D scalar: squeezed_field = field.squeeze() @@ -274,7 +274,7 @@ def quiver(field, axis=None, color_angles=False, cmap=None, n_bin='auto', bin_wi Parameters ---------- field : `Field` or ndarray - The vector data as a `Field` or a numpy array (in the latter case, `vector=True` and `scale=1` are assumed). + The vector data as a `Field` or a numpy array (in the latter case, `vector=True` and `scale=1.0` are assumed). axis : `matplotlib.axes.Axes` object, optional The axis to which the image should be added, by default None, which will pick the last use axis via `gca`. color_angles : bool, optional @@ -312,7 +312,7 @@ def quiver(field, axis=None, color_angles=False, cmap=None, n_bin='auto', bin_wi """ _log.debug('Calling quiver') if not isinstance(field, Field): # Try to convert input to Field if it is not already one: - field = Field(data=np.asarray(field), scale=1, vector=True) + field = Field(data=np.asarray(field), scale=1.0, vector=True) assert field.vector, 'Can only plot vector fields!' assert len(field.dim) <= 3, 'Unusable for vector fields with dimension higher than 3!' assert len(field.dim) == field.ncomp, ('Assignment of vector components to dimensions is ambiguous!' diff --git a/src/empyre/vis/plot3d.py b/src/empyre/vis/plot3d.py new file mode 100644 index 0000000000000000000000000000000000000000..54cda15eca4af8a043f5c4c0767c96046e4fc5a2 --- /dev/null +++ b/src/empyre/vis/plot3d.py @@ -0,0 +1,203 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 by Forschungszentrum Juelich GmbH +# Author: J. Caron +# +"""This module provides functions for 3D plots based on the `mayavi` library.""" + + +import logging + +import numpy as np + +from . import colors + + +__all__ = ['contour3d', 'mask3d', 'quiver3d'] + +_log = logging.getLogger(__name__) + + +# TODO: Docstrings and signature! + +def contour3d(field, title='Field Distribution', contours=10, opacity=0.25, size=None, new_fig=True, **kwargs): + """Plot a field as a 3D-contour plot. + + Parameters + ---------- + title: string, optional + The title for the plot. + contours: int, optional + Number of contours which should be plotted. + opacity: float, optional + Defines the opacity of the contours. Default is 0.25. + + Returns + ------- + plot : :class:`mayavi.modules.vectors.Vectors` + The plot object. + + """ + _log.debug('Calling contour3d') + try: + from mayavi import mlab + except ImportError: + _log.error('This extension recquires the mayavi package!') + return + if size is None: + size = (750, 700) + if new_fig: + mlab.figure(size=size, bgcolor=(0.5, 0.5, 0.5), fgcolor=(0., 0., 0.)) + zzz, yyy, xxx = np.indices(field.dim) + np.reshape(field.scale, (3, 1, 1, 1)) / 2 # shifted by half of scale! + zzz, yyy, xxx = zzz.T, yyy.T, xxx.T # Transpose because of VTK order! + field_amp = field.amp.data.T # Transpose because of VTK order! + if not isinstance(contours, (list, tuple, np.ndarray)): # Calculate the contours: + contours = list(np.linspace(field_amp.min(), field_amp.max(), contours)) + extent = np.ravel(list(zip((0, 0, 0), field_amp.shape))) + cont = mlab.contour3d(xxx, yyy, zzz, field_amp, contours=contours, opacity=opacity, **kwargs) + mlab.outline(cont, extent=extent) + mlab.axes(cont, extent=extent) + mlab.title(title, height=0.95, size=0.35) + mlab.orientation_axes() + cont.scene.isometric_view() + return cont + + +def mask3d(field, title='Mask', threshold=0, grid=True, labels=True, + orientation=True, size=None, new_fig=True, **kwargs): + """Plot the mask as a 3D-contour plot. + + Parameters + ---------- + title: string, optional + The title for the plot. + threshold : float, optional + A pixel only gets masked, if it lies above this threshold . The default is 0. + + Returns + ------- + plot : :class:`mayavi.modules.vectors.Vectors` + The plot object. + + """ + _log.debug('Calling mask3d') + try: + from mayavi import mlab + except ImportError: + _log.error('This extension recquires the mayavi package!') + return + if size is None: + size = (750, 700) + if new_fig: + mlab.figure(size=size, bgcolor=(0.5, 0.5, 0.5), fgcolor=(0., 0., 0.)) + zzz, yyy, xxx = np.indices(field.dim) + np.reshape(field.scale, (3, 1, 1, 1)) / 2 # shifted by half of scale! + zzz, yyy, xxx = zzz.T, yyy.T, xxx.T # Transpose because of VTK order! + mask = field.mask.data.T.astype(int) # Transpose because of VTK order! + extent = np.ravel(list(zip((0, 0, 0), mask.shape))) + cont = mlab.contour3d(xxx, yyy, zzz, mask, contours=[1], **kwargs) + if grid: + mlab.outline(cont, extent=extent) + if labels: + mlab.axes(cont, extent=extent) + mlab.title(title, height=0.95, size=0.35) + if orientation: + oa = mlab.orientation_axes() + oa.marker.set_viewport(0, 0, 0.4, 0.4) + mlab.draw() + engine = mlab.get_engine() + scene = engine.scenes[0] + scene.scene.isometric_view() + return cont + + +def quiver3d(field, title='Vector Field', limit=None, cmap='jet', mode='2darrow', + coloring='angle', ar_dens=1, opacity=1.0, grid=True, labels=True, + orientation=True, size=(700, 750), new_fig=True, view='isometric', + position=None, bgcolor=(0.5, 0.5, 0.5)): + """Plot the vector field as 3D-vectors in a quiverplot. + + Parameters + ---------- + title : string, optional + The title for the plot. + limit : float, optional + Plotlimit for the vector field arrow length used to scale the colormap. + cmap : string, optional + String describing the colormap which is used for amplitude encoding (default is 'jet'). + ar_dens: int, optional + Number defining the arrow density which is plotted. A higher ar_dens number skips more + arrows (a number of 2 plots every second arrow). Default is 1. + mode: string, optional + Mode, determining the glyphs used in the 3D plot. Default is '2darrow', which + corresponds to 2D arrows. For smaller amounts of arrows, 'arrow' (3D) is prettier. + coloring : {'angle', 'amplitude'}, optional + Color coding mode of the arrows. Use 'angle' (default) or 'amplitude'. + opacity: float, optional + Defines the opacity of the arrows. Default is 1.0 (completely opaque). + + Returns + ------- + plot : :class:`mayavi.modules.vectors.Vectors` + The plot object. + + """ + _log.debug('Calling quiver_plot3D') + try: + from mayavi import mlab + except ImportError: + _log.error('This extension recquires the mayavi package!') + return + if limit is None: + limit = np.max(np.nan_to_num(field.amp)) + ad = ar_dens + # Create points and vector components as lists: + zzz, yyy, xxx = (np.indices(field.dim) + 1 / 2) + zzz = zzz[::ad, ::ad, ::ad].ravel() + yyy = yyy[::ad, ::ad, ::ad].ravel() + xxx = xxx[::ad, ::ad, ::ad].ravel() + x_mag = field.data[::ad, ::ad, ::ad, 0].ravel() + y_mag = field.data[::ad, ::ad, ::ad, 1].ravel() + z_mag = field.data[::ad, ::ad, ::ad, 2].ravel() + # Plot them as vectors: + if new_fig: + mlab.figure(size=size, bgcolor=(0.5, 0.5, 0.5), fgcolor=(0., 0., 0.)) + if coloring == 'angle': # Encodes the full angle via colorwheel and saturation: + _log.debug('Encoding full 3D angles') + vecs = mlab.quiver3d(xxx, yyy, zzz, x_mag, y_mag, z_mag, mode=mode, opacity=opacity, + scalars=np.arange(len(xxx)), line_width=2) + vector = np.asarray((x_mag.ravel(), y_mag.ravel(), z_mag.ravel())) + rgb = colors.CMAP_CIRCULAR_DEFAULT.rgb_from_vector(vector) + rgba = np.hstack((rgb, 255 * np.ones((len(xxx), 1), dtype=np.uint8))) + vecs.glyph.color_mode = 'color_by_scalar' + vecs.module_manager.scalar_lut_manager.lut.table = rgba + mlab.draw() + elif coloring == 'amplitude': # Encodes the amplitude of the arrows with the jet colormap: + _log.debug('Encoding amplitude') + vecs = mlab.quiver3d(xxx, yyy, zzz, x_mag, y_mag, z_mag, + mode=mode, colormap=cmap, opacity=opacity, line_width=2) + mlab.colorbar(label_fmt='%.2f') + mlab.colorbar(orientation='vertical') + else: + raise AttributeError('Coloring mode not supported!') + vecs.glyph.glyph_source.glyph_position = 'center' + vecs.module_manager.vector_lut_manager.data_range = np.array([0, limit]) + extent = np.ravel(list(zip((0, 0, 0), (field.dim[2], field.dim[1], field.dim[0])))) + if grid: + mlab.outline(vecs, extent=extent) + if labels: + mlab.axes(vecs, extent=extent) + mlab.title(title, height=0.95, size=0.35) + if orientation: + oa = mlab.orientation_axes() + oa.marker.set_viewport(0, 0, 0.4, 0.4) + mlab.draw() + engine = mlab.get_engine() + scene = engine.scenes[0] + if view == 'isometric': + scene.scene.isometric_view() + elif view == 'x_plus_view': + scene.scene.x_plus_view() + elif view == 'y_plus_view': + scene.scene.y_plus_view() + if position: + scene.scene.camera.position = position + return vecs diff --git a/src/empyre/vis/tools.py b/src/empyre/vis/tools.py index 28cdde811f073d85735c10e81c7071fd221a9705..f51cca48388b7e096ae67856e9abf6667642fd97 100644 --- a/src/empyre/vis/tools.py +++ b/src/empyre/vis/tools.py @@ -23,7 +23,7 @@ __all__ = ['new', 'savefig', 'calc_figsize', 'use_style', 'copy_mpl_stylesheets' _log = logging.getLogger(__name__) -def new(nrows=1, ncols=1, mode='image', figsize=None, textwidth=None, width_scale=1, aspect=None, **kwargs): +def new(nrows=1, ncols=1, mode='image', figsize=None, textwidth=None, width_scale=1.0, aspect=None, **kwargs): R"""Convenience function for the creation of a new subplot grid (wraps `~matplotlib.pyplot.subplots`). If you use the `textwidth` parameter, plot sizes are fitting into publications with LaTeX. Requires two stylesheets @@ -110,7 +110,7 @@ def savefig(fname, **kwargs): plt.savefig(fname, **kwargs) -def calc_figsize(textwidth, width_scale=1, aspect=1): +def calc_figsize(textwidth, width_scale=1.0, aspect=1): R"""Helper function to calculate the figure size from various parameters. Useful for publications via LaTeX. Parameters