Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • empyre/empyre
  • weber/empyre
  • wessels/empyre
  • bryan/empyre
4 results
Show changes
Commits on Source (27)
Showing
with 1239 additions and 547 deletions
workflow:
rules:
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
- if: $CI_COMMIT_BRANCH && $CI_OPEN_MERGE_REQUESTS
when: never
- if: $CI_COMMIT_BRANCH
- if: $CI_COMMIT_TAG
before_script: before_script:
# Install requirements for empyre: # Install requirements for empyre:
- conda env create -q # -q: quiet/ no progressbar, because it spams the log! - source ${CONDA_DIR}/etc/profile.d/conda.sh
- conda init bash # needed from conda 4.4 onwards, see https://stackoverflow.com/a/55507956/2286972 - conda create -q --name empyre # -q: quiet/ no progressbar, because it spams the log!
- source ~/.bashrc # Reldad bashrc to apply changes made by conda init
- conda activate empyre - conda activate empyre
- conda install -y python numpy sphinx pip setuptools jupyter pandoc uv
- uv pip install .[tests,docs]
- conda info --envs - conda info --envs
# Install jutil via deploy token access: # TODO: still needed? # Install jutil via deploy token access: # TODO: still needed?
#- pip install git+https://empyre:"$JUTIL_DEPLOY_TOKEN"@jugit.fz-juelich.de/j.ungermann/jutil.git #- pip install git+https://empyre:"$JUTIL_DEPLOY_TOKEN"@jugit.fz-juelich.de/j.ungermann/jutil.git
...@@ -14,18 +23,19 @@ stages: ...@@ -14,18 +23,19 @@ stages:
test_style: test_style:
stage: test stage: test
image: continuumio/miniconda3:latest image: condaforge/miniforge3:latest
script: script:
# -m: only run tests marked with "flake8" - pip install pre-commit
- pyroma . --min=10 # Checks setup.py for cheese! Maximum cheese 10! - pre-commit install
- python setup.py test --addopts "--flake8 -m flake8" - pre-commit run --all-files
test_function: test_function:
stage: test stage: test
image: continuumio/miniconda3:latest image: condaforge/miniforge3:latest
script: script:
# Execute all tests and also check coverage with --cov: # Execute all tests and also check coverage with --cov:
- python setup.py test --addopts "--cov" - pip install "numpy>=2"
- pytest --cov
artifacts: artifacts:
paths: paths:
- .coverage - .coverage
...@@ -33,13 +43,13 @@ test_function: ...@@ -33,13 +43,13 @@ test_function:
test_docs: test_docs:
stage: test stage: test
image: continuumio/miniconda3:latest image: condaforge/miniforge3:latest
script: script:
# -f: Force overwriting of any existing generated files. # -f: Force overwriting of any existing generated files.
# -e: Put documentation for each module on its own page. # -e: Put documentation for each module on its own page.
# -o: Directory to place the output files. If it does not exist, it is created. # -o: Directory to place the output files. If it does not exist, it is created.
# last parameter: module path # last parameter: module path
- python setup.py clean # TODO: Simplest way to generate version.py without doing anything else? # - python setup.py clean # TODO: Simplest way to generate version.py without doing anything else?
#- sphinx-apidoc -f -e -o docs/api src/empyre #- sphinx-apidoc -f -e -o docs/api src/empyre
# Build the documentation from 'docs' and put into 'build/sphinx': # Build the documentation from 'docs' and put into 'build/sphinx':
- sphinx-build docs build/sphinx -W # -w fails the build if warnings appear! - sphinx-build docs build/sphinx -W # -w fails the build if warnings appear!
...@@ -50,9 +60,10 @@ test_docs: ...@@ -50,9 +60,10 @@ test_docs:
test_install: test_install:
stage: test stage: test
image: continuumio/miniconda3:latest image: condaforge/miniforge3:latest
before_script: [] # before_script not needed here! before_script: [] # before_script not needed here!
script: script:
- conda install -y "python<3.12"
- pip install .[all] - pip install .[all]
pages: pages:
...@@ -71,11 +82,11 @@ pages: ...@@ -71,11 +82,11 @@ pages:
pypi: pypi:
stage: deploy stage: deploy
image: continuumio/miniconda3:latest image: condaforge/miniforge3:latest
before_script: [] # before_script not needed here! before_script: [] # before_script not needed here!
script: script:
- pip install twine - pip install hatch twine
- python setup.py sdist bdist_wheel - hatch build
- twine upload -u __token__ -p $PYPI_ACCESS_TOKEN dist/* # -u user -p password upload_source - twine upload -u __token__ -p $PYPI_ACCESS_TOKEN dist/* # -u user -p password upload_source
rules: # similar to only/except, but newer! rules: # similar to only/except, but newer!
# Job is executed if branch is master AND if a tag is building which matches the regular expression! # Job is executed if branch is master AND if a tag is building which matches the regular expression!
......
# See https://pre-commit.com for more information
# See https://pre-commit.com/hooks.html for more hooks
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.6.0
hooks:
- id: trailing-whitespace
files: ^src/
- id: end-of-file-fixer
files: ^src/
- id: check-yaml
- id: check-added-large-files
- repo: https://github.com/pycqa/flake8
rev: 7.0.0
hooks:
- id: flake8
exclude: ^prototypes/
- repo: https://github.com/regebro/pyroma
rev: "4.2"
hooks:
- id: pyroma
\ No newline at end of file
include src/empyre/vis/mplstyles/*.mplstyle include src/empyre/vis/mplstyles/*.mplstyle
graft tests
...@@ -38,5 +38,13 @@ ...@@ -38,5 +38,13 @@
"gitlab": "wessels", "gitlab": "wessels",
"contribution": "Discussions, testing", "contribution": "Discussions, testing",
"orcid": "0000-0001-5526-639X" "orcid": "0000-0001-5526-639X"
},
{
"displayname": "Matthew Bryan",
"authorname": "Bryan, Matthew",
"affiliation": "CEA-Leti",
"gitlab": "bryan",
"orcid": "0000-0001-9134-384X",
"contribution": "Small bugfixes, CI and conda packaging"
} }
] ]
\ No newline at end of file
This diff is collapsed.
This diff is collapsed.
...@@ -33,13 +33,11 @@ author = 'Jan Caron' ...@@ -33,13 +33,11 @@ author = 'Jan Caron'
# Add any Sphinx extension module names here, as strings. They can be # Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones. # ones.
extensions = [ extensions = ['sphinx.ext.autodoc',
'sphinx.ext.autodoc', 'sphinx.ext.viewcode',
'sphinx.ext.viewcode', 'numpydoc',
'numpydoc', 'nbsphinx',
'nbsphinx', 'nbsphinx_link']
'nbsphinx_link'
]
numpydoc_show_class_members = False numpydoc_show_class_members = False
add_module_names = False add_module_names = False
...@@ -78,3 +76,7 @@ html_logo = 'EMPyRe Logo.png' ...@@ -78,3 +76,7 @@ html_logo = 'EMPyRe Logo.png'
# the doc. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # the doc. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large. # pixels large.
html_favicon = 'icon.ico' html_favicon = 'icon.ico'
# cannot cache unpickable configuration value: 'nbsphinx_custom_formats
# See https://github.com/sphinx-doc/sphinx/issues/12300
suppress_warnings = ["config.cache"]
# Create environment with "conda env create", update with "conda env update"!
# This file contains the top level dependencies of the empyre project!
# requirements.txt also contains sub-dependencies, generated by "pip freeze > requirements.txt"!
# Add new dependencies here, then "conda env update", then "pip freeze > requirements.txt"!
# To see if compatible upgrades are available for the current packages, use "conda upgrade --all"!
# When packages are deprecated/deleted, it may be best to recreate the environment from scratch!
name: empyre
channels:
- defaults # Default conda channels, on top to keep highest priority!
- conda-forge # Used for hyperspy, pyFFTW!
dependencies:
# Basic:
- python=3.7
- setuptools
- numpy=1.17
- scipy=1.3
- tqdm=4.36
- scikit-image=0.15
# File IO:
- hyperspy=1.5
- hyperspy-gui-ipywidgets=1.2
#- h5py=2.9 # TODO: dependency of hyperspy? Not needed here?
# Plotting and colors:
- matplotlib=3.1
- Pillow=6.1
- cmocean=2.0
# 3D plotting:
- mayavi=4.7
- ipyevents=0.7
- ipywidgets=7.5
# Testing:
- pytest=5.0
- pytest-cov=2.7
- pytest-flake8=1.0
- pytest-runner=5.1
- pyroma # Checks setup.py for cheese!
#- pytest-mpl=0.10 # Needed for testing hyperspy! # TODO: Use for pyramid/plotting library, too!
- coverage=4.5
# Documentation:
- sphinx=2.4
- numpydoc=0.9
- sphinx_rtd_theme=0.4
- nbsphinx=0.7
- nbsphinx-link=1.3
- pandoc=2.11
# IPython and notebooks:
- ipython=7.7
- jupyter=1.0
- nb_conda=2.2
#- ptvsd=4.3 # Cell debugging in VS Code
- rope=0.16 # for refactoring in VS Code
# TODO: - ipywidgets
# TODO: Add back GUI dependencies!
# TODO: Get Jutil from gitlab (currently doesn't work, git and cygwin don't play nice,...
# TODO: ...because one is Unix, ond is Windows).
# Fast computation:
- pyFFTW=0.11
# TODO: ? - pathos # pathos.multiprocessing uses dill instead of pickle
# PIP installations:
#- pip=19.0
# - pip:
# # ALSO NEEDS JUTIL!
# - "git+ssh://gitlab@iffgit.fz-juelich.de/unger/jutil.git"
# Misc.:
- nodejs=12.8 # TODO: Needs to be fixed to prevent errors! Check again if needed in future!
[build-system]
requires = ["hatchling"]
build-backend = "hatchling.build"
[project]
name = "empyre"
dynamic = ["version"]
description = "Electron Microscopy Python Reconstruction"
readme = "README.rst"
license = "GPL-3.0-or-later"
requires-python = ">=3.6"
authors = [
{ name = "Jan Caron", email = "j.caron@fz-juelich.de" },
]
keywords = [
"Electron Microscopy",
"Inverse Problem Solving",
"Model-based Reconstrution",
]
classifiers = [
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: GNU General Public License v3 (GPLv3)",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Topic :: Scientific/Engineering",
]
dependencies = [
"matplotlib >= 3.2",
"numpy >= 1.17",
"Pillow",
"scikit-image",
"scipy",
"tqdm",
]
[project.optional-dependencies]
3d = [
"mayavi >= 4.7",
]
all = [
"cmocean",
"pyfftw",
]
colors = [
"cmocean",
]
docs = [
"nbsphinx",
"nbsphinx_link",
"numpydoc",
"pandoc",
"sphinx",
"sphinx_rtd_theme",
]
fftw = [
"pyfftw",
]
io = [
"hyperspy",
"tvtk",
]
tests = [
"coverage",
"pyroma",
"pytest",
"pytest-cov",
"pytest-flake8",
"pytest-runner",
]
[project.urls]
Homepage = "https://iffgit.fz-juelich.de/empyre/empyre"
[tool.hatch.version]
path = "src/empyre/__init__.py"
[tool.hatch.build.targets.sdist]
include = [
"/src",
"/tests",
]
...@@ -2,71 +2,6 @@ ...@@ -2,71 +2,6 @@
# setup.cfg # setup.cfg
# :copyright: Copyright 2020 Jan Caron # :copyright: Copyright 2020 Jan Caron
# CONFIGURATION FOR SETUP.PY:
[metadata]
name = empyre
version = 0.2.1
author = Jan Caron
author-email = j.caron@fz-juelich.de
description = Electron Microscopy Python Reconstruction
long-description = file: README.rst
url = https://iffgit.fz-juelich.de/empyre/empyre
license = GPLv3
classifiers =
Development Status :: 3 - Alpha
Intended Audience :: Developers
Intended Audience :: Science/Research
License :: OSI Approved :: GNU General Public License v3 (GPLv3)
Programming Language :: Python :: 3.7
Topic :: Scientific/Engineering
Operating System :: OS Independent
keywords =
Electron Microscopy
Inverse Problem Solving
Model-based Reconstrution
[options]
zip_safe = False
include_package_data = True
package_dir =
=src
packages = find:
python_requires = >=3.6
setup_requires =
setuptools
install_requires =
numpy >= 1.17
matplotlib >= 3
scikit-image
tqdm
scipy
Pillow
[options.packages.find]
where = src
[options.extras_require]
io =
hyperspy
tvtk
fftw =
pyfftw
colors =
cmocean
3d =
mayavi >= 4.7
all =
pyfftw
cmocean
tests =
pyroma
coverage
pytest
pytest-cov
pytest-flake8
pytest-runner
# TODO: pip install .[io] is not working because hyperspy depends on trait which has no wheels on PyPI at the moment... # TODO: pip install .[io] is not working because hyperspy depends on trait which has no wheels on PyPI at the moment...
# TODO: See https://github.com/hyperspy/hyperspy/issues/2315 and https://github.com/enthought/traits/issues/357 # TODO: See https://github.com/hyperspy/hyperspy/issues/2315 and https://github.com/enthought/traits/issues/357
# TODO: Add hyperspy back as soon (if?) this is resolved... Until then: install hyperspy with conda! # TODO: Add hyperspy back as soon (if?) this is resolved... Until then: install hyperspy with conda!
...@@ -86,13 +21,20 @@ omit = ...@@ -86,13 +21,20 @@ omit =
[flake8] [flake8]
max-line-length = 120 max-line-length = 120
ignore = ignore =
E402 # module import not at top of file # module import not at top of file
E124 # closing bracket does not match visual indentation E402
E125 # continuation line with same indent as next logical line # closing bracket does not match visual indentation
E226 # missing whitespace around arithmetic operator E124
W503 # line break before binary operator # continuation line with same indent as next logical line
W504 # line break after binary operator E125
E741 # do not use variables named ‘l’, ‘O’, or ‘I’ # missing whitespace around arithmetic operator
E226
# line break before binary operator
W503
# line break after binary operator
W504
# do not use variables named ‘l’, ‘O’, or ‘I’
E741
per-file-ignores = per-file-ignores =
*/__init__.py: F401, F403, F405, F821 */__init__.py: F401, F403, F405, F821
# F401: module imported but unused # F401: module imported but unused
......
#!python
# coding=utf-8
"""Setup for testing, building, distributing and installing the 'EMPyRe'-package"""
import os
import subprocess
from setuptools import setup, config
# Read version from setup.py:
version = config.read_configuration('setup.cfg')['metadata']['version']
# Get current git revision:
try:
git_rev = subprocess.check_output(['git', 'rev-parse', 'HEAD']).strip().decode()
except Exception:
git_rev = "???"
# Write both to version.py:
print(R'writing src\empyre\version.py')
with open(os.path.join(os.path.dirname(__file__), 'src', 'empyre', 'version.py'), 'w') as vfile:
vfile.write('# -*- coding: utf-8 -*-\n' +
'""""This file was automatically generated by `setup.py`"""\n' +
f'version = "{version}"\n' +
f'git_revision = "{git_rev}"\n')
# Run setup (reads metadata & options from setup.cfg):
print(R'running setup.py')
setup()
...@@ -11,16 +11,13 @@ from . import models ...@@ -11,16 +11,13 @@ from . import models
from . import reconstruct from . import reconstruct
from . import vis from . import vis
from . import utils from . import utils
from .version import version as __version__
from .version import git_revision as __git_revision__ __version__ = "0.3.4"
__git_revision__ = "undefined"
import logging import logging
_log = logging.getLogger(__name__) _log = logging.getLogger(__name__)
_log.info(f'Imported EMPyRe V-{__version__} GIT-{__git_revision__}') _log.info(f'Imported EMPyRe V-{__version__}')
del logging del logging
__all__ = ['fields', 'io', 'models', 'reconstruct', 'vis', 'utils'] __all__ = ['fields', 'io', 'models', 'reconstruct', 'vis', 'utils']
del version
...@@ -10,8 +10,13 @@ from numbers import Number ...@@ -10,8 +10,13 @@ from numbers import Number
from numpy.lib.mixins import NDArrayOperatorsMixin from numpy.lib.mixins import NDArrayOperatorsMixin
import numpy as np import numpy as np
from numpy.core import numeric try:
from scipy.ndimage import interpolation # numpy >= 2
from numpy.lib.array_utils import normalize_axis_tuple
except ModuleNotFoundError:
# numpy < 2
from numpy.core.numeric import normalize_axis_tuple
from scipy import ndimage
from ..utils import Quaternion from ..utils import Quaternion
...@@ -163,6 +168,8 @@ class Field(NDArrayOperatorsMixin): ...@@ -163,6 +168,8 @@ class Field(NDArrayOperatorsMixin):
# outputs = kwargs.pop('out', ()) # TODO: Necessary? # outputs = kwargs.pop('out', ()) # TODO: Necessary?
outputs = kwargs.pop('out', (None,)*ufunc.nout) # Defaults to tuple of None (currently: nout=1 all the time) outputs = kwargs.pop('out', (None,)*ufunc.nout) # Defaults to tuple of None (currently: nout=1 all the time)
outputs_arr = tuple([np.asarray(out) if isinstance(out, Field) else out for out in outputs]) outputs_arr = tuple([np.asarray(out) if isinstance(out, Field) else out for out in outputs])
if all(a is None for a in outputs_arr):
outputs_arr = None
# Cannot handle items that have __array_ufunc__ (other than our own). # Cannot handle items that have __array_ufunc__ (other than our own).
for item in inputs + outputs: for item in inputs + outputs:
if hasattr(item, '__array_ufunc__') and not isinstance(item, Field): # Something else with __array_ufunc__: if hasattr(item, '__array_ufunc__') and not isinstance(item, Field): # Something else with __array_ufunc__:
...@@ -197,7 +204,7 @@ class Field(NDArrayOperatorsMixin): ...@@ -197,7 +204,7 @@ class Field(NDArrayOperatorsMixin):
axis = ax_full if axis is None else axis # This keeps possible components untouched if axis was None! axis = ax_full if axis is None else axis # This keeps possible components untouched if axis was None!
# axis=-1 reduces over the vector components, if they exist # axis=-1 reduces over the vector components, if they exist
# Takes care of pot. neg. indices, ensures tuple! # Takes care of pot. neg. indices, ensures tuple!
axis = numeric.normalize_axis_tuple(axis, len(field.shape)) axis = normalize_axis_tuple(axis, len(field.shape))
kwargs['axis'] = axis # Put normalized axis back into kwargs! kwargs['axis'] = axis # Put normalized axis back into kwargs!
if tuple(sorted(axis)) in (ax_full, ax_full_wc): if tuple(sorted(axis)) in (ax_full, ax_full_wc):
full_reduction = True # Full reduction (or reduction to just components) takes place: full_reduction = True # Full reduction (or reduction to just components) takes place:
...@@ -386,7 +393,7 @@ class Field(NDArrayOperatorsMixin): ...@@ -386,7 +393,7 @@ class Field(NDArrayOperatorsMixin):
return Field(self.data.copy(), self.scale, self.vector) return Field(self.data.copy(), self.scale, self.vector)
def rotate(self, angle, axis='z', **kwargs): def rotate(self, angle, axis='z', **kwargs):
"""Rotate the :class:`~.Field`, based on :meth:`~scipy.ndimage.interpolation.rotate`. """Rotate the :class:`~.Field`, based on :meth:`~scipy.ndimage.rotate`.
Rotation direction is from the first towards the second axis. Works for 2D and 3D Fields. Rotation direction is from the first towards the second axis. Works for 2D and 3D Fields.
...@@ -404,9 +411,9 @@ class Field(NDArrayOperatorsMixin): ...@@ -404,9 +411,9 @@ class Field(NDArrayOperatorsMixin):
Notes Notes
----- -----
All additional kwargs are passed through to :meth:`~scipy.ndimage.interpolation.rotate`. All additional kwargs are passed through to :meth:`~scipy.ndimage.rotate`.
The `reshape` parameter, controlling if the output shape is adapted so that the input array is contained The `reshape` parameter, controlling if the output shape is adapted so that the input array is contained
completely in the output is False per default, contrary to :meth:`~scipy.ndimage.interpolation.rotate`, completely in the output is False per default, contrary to :meth:`~scipy.ndimage.rotate`,
where it is True. where it is True.
""" """
...@@ -425,12 +432,12 @@ class Field(NDArrayOperatorsMixin): ...@@ -425,12 +432,12 @@ class Field(NDArrayOperatorsMixin):
if axis in ('x', 'z'): if axis in ('x', 'z'):
np_angle *= -1 np_angle *= -1
if not self.vector: # Scalar field: if not self.vector: # Scalar field:
data_new = interpolation.rotate(self.data, np_angle, axes=axes, **kwargs) data_new = ndimage.rotate(self.data, np_angle, axes=axes, **kwargs)
else: # Vector field: else: # Vector field:
# Rotate coordinate system: # Rotate coordinate system:
comps = [np.asarray(comp) for comp in self.comp] comps = [np.asarray(comp) for comp in self.comp]
if self.ncomp == 3: if self.ncomp == 3:
data_new = np.stack([interpolation.rotate(c, np_angle, axes=axes, **kwargs) for c in comps], axis=-1) data_new = np.stack([ndimage.rotate(c, np_angle, axes=axes, **kwargs) for c in comps], axis=-1)
# Up till now, only the coordinates are rotated, now we need to rotate the vectors inside the voxels: # Up till now, only the coordinates are rotated, now we need to rotate the vectors inside the voxels:
rot_axis = {'x': 2, 'y': 1, 'z': 0}[axis] rot_axis = {'x': 2, 'y': 1, 'z': 0}[axis]
i, j, k = axes[0], axes[1], rot_axis # next line only works if i != j != k i, j, k = axes[0], axes[1], rot_axis # next line only works if i != j != k
...@@ -442,8 +449,8 @@ class Field(NDArrayOperatorsMixin): ...@@ -442,8 +449,8 @@ class Field(NDArrayOperatorsMixin):
data_new = quat.matrix.dot(data_new.reshape((-1, 3)).T).T.reshape(data_new.shape) data_new = quat.matrix.dot(data_new.reshape((-1, 3)).T).T.reshape(data_new.shape)
elif self.ncomp == 2: elif self.ncomp == 2:
u_comp, v_comp = comps u_comp, v_comp = comps
u_rot = interpolation.rotate(u_comp, np_angle, axes=axes, **kwargs) u_rot = ndimage.rotate(u_comp, np_angle, axes=axes, **kwargs)
v_rot = interpolation.rotate(v_comp, np_angle, axes=axes, **kwargs) v_rot = ndimage.rotate(v_comp, np_angle, axes=axes, **kwargs)
# Up till now, only the coordinates are rotated, now we need to rotate the vectors inside the voxels: # Up till now, only the coordinates are rotated, now we need to rotate the vectors inside the voxels:
ang_rad = np.deg2rad(angle) ang_rad = np.deg2rad(angle)
u_mix = np.cos(ang_rad)*u_rot - np.sin(ang_rad)*v_rot u_mix = np.cos(ang_rad)*u_rot - np.sin(ang_rad)*v_rot
...@@ -667,10 +674,10 @@ class Field(NDArrayOperatorsMixin): ...@@ -667,10 +674,10 @@ class Field(NDArrayOperatorsMixin):
""" """
self._log.debug('Calling zoom') self._log.debug('Calling zoom')
if not self.vector: # Scalar field: if not self.vector: # Scalar field:
data_new = interpolation.zoom(self.data, zoom, **kwargs) data_new = ndimage.zoom(self.data, zoom, **kwargs)
else: # Vector field: else: # Vector field:
comps = [np.asarray(comp) for comp in self.comp] comps = [np.asarray(comp) for comp in self.comp]
data_new = np.stack([interpolation.zoom(comp, zoom, **kwargs) for comp in comps], axis=-1) data_new = np.stack([ndimage.zoom(comp, zoom, **kwargs) for comp in comps], axis=-1)
if isinstance(zoom, Number): # Zoom is the same for each dimension! if isinstance(zoom, Number): # Zoom is the same for each dimension!
zoom = (zoom,) * len(self.dim) zoom = (zoom,) * len(self.dim)
scale_new = tuple([self.scale[i]/z for i, z in enumerate(zoom)]) scale_new = tuple([self.scale[i]/z for i, z in enumerate(zoom)])
...@@ -687,7 +694,7 @@ class Field(NDArrayOperatorsMixin): ...@@ -687,7 +694,7 @@ class Field(NDArrayOperatorsMixin):
self._log.debug('Calling flip') self._log.debug('Calling flip')
if self.vector and axis is None: if self.vector and axis is None:
axis = tuple(range(len(self.dim))) axis = tuple(range(len(self.dim)))
axis = numeric.normalize_axis_tuple(axis, len(self.shape)) # Make sure, axis is a tuple! axis = normalize_axis_tuple(axis, len(self.shape)) # Make sure, axis is a tuple!
data_new = np.flip(self.data, axis, **kwargs).copy() # Only flips space, not component direction! data_new = np.flip(self.data, axis, **kwargs).copy() # Only flips space, not component direction!
if self.vector: if self.vector:
flip_vec = [ flip_vec = [
......
...@@ -11,7 +11,7 @@ from numbers import Number ...@@ -11,7 +11,7 @@ from numbers import Number
import numpy as np import numpy as np
from ...fields.field import Field from ...fields.field import Field
from ...utils.misc import interp_to_regular_grid from ...utils.misc import interp_to_regular_grid, restrict_points
from ...vis import colors from ...vis import colors
...@@ -20,7 +20,7 @@ _log = logging.getLogger(__name__) ...@@ -20,7 +20,7 @@ _log = logging.getLogger(__name__)
file_extensions = ('.vtk',) # Recognised file extensions file_extensions = ('.vtk',) # Recognised file extensions
def reader(filename, scale=None, vector=None, **kwargs): def reader(filename, scale=None, vector=None, bounds=None, **kwargs):
"""More infos at: """More infos at:
overview: https://docs.enthought.com/mayavi/mayavi/data.html overview: https://docs.enthought.com/mayavi/mayavi/data.html
...@@ -88,6 +88,10 @@ def reader(filename, scale=None, vector=None, **kwargs): ...@@ -88,6 +88,10 @@ def reader(filename, scale=None, vector=None, **kwargs):
scale = (scale,) * 3 scale = (scale,) * 3
elif isinstance(scale, tuple): elif isinstance(scale, tuple):
assert len(scale) == 3, f'Each dimension (z, y, x) needs a scale, but {scale} was given!' assert len(scale) == 3, f'Each dimension (z, y, x) needs a scale, but {scale} was given!'
# Crop data to required range, if necessary
if bounds is not None:
_log.info('Restrict data')
point_array, data_array = restrict_points(point_array, data_array, bounds)
data = interp_to_regular_grid(point_array, data_array, scale, **kwargs) data = interp_to_regular_grid(point_array, data_array, scale, **kwargs)
else: else:
raise TypeError('Data type of {} not understood!'.format(output)) raise TypeError('Data type of {} not understood!'.format(output))
......
...@@ -15,7 +15,7 @@ from scipy.spatial import cKDTree, qhull ...@@ -15,7 +15,7 @@ from scipy.spatial import cKDTree, qhull
from scipy.interpolate import LinearNDInterpolator from scipy.interpolate import LinearNDInterpolator
__all__ = ['levi_civita', 'interp_to_regular_grid'] __all__ = ['levi_civita', 'interp_to_regular_grid', 'restrict_points']
_log = logging.getLogger(__name__) _log = logging.getLogger(__name__)
...@@ -121,3 +121,39 @@ def interp_to_regular_grid(points, values, scale, scale_factor=1, step=1, convex ...@@ -121,3 +121,39 @@ def interp_to_regular_grid(points, values, scale, scale_factor=1, step=1, convex
# Set these points to zero (NOTE: This can take a looooong time): # Set these points to zero (NOTE: This can take a looooong time):
interpolation[mask, :] = 0 interpolation[mask, :] = 0
return np.squeeze(interpolation) return np.squeeze(interpolation)
def restrict_points(point_array, data_array, bounds):
"""Restrict range of point_array and data_array
Parameters
----------
points_array : np.ndarray, (N, 3)
Array of points, describing the location of the values that should be interpolated. Three columns x, y, z!
data_array : np.ndarray, (N, c)
Array of values that should be interpolated to the new grid. `c` is the number of components (`1` for scalar
fields, `3` for normal 3D vector fields).
bounds : tuple of 6 values
Restrict data range to given bounds, x0, x1, y0, y1, z0, z1.
Returns
-------
point_restricted: np.ndarray
Cut out of the array of points inside the bounds, describing the location of the values that should be
interpolated. Three columns x, y, z!
value_restricted: np.ndarray
Cut out of the array of values inside the bounds, describing the location of the values that should be
interpolated. Three columns x, y, z!
"""
point_restricted = []
data_restricted = []
for i, pos in enumerate(point_array):
if bounds[0] <= pos[0] <= bounds[1]:
if bounds[2] <= pos[1] <= bounds[3]:
if bounds[4] <= pos[2] <= bounds[5]:
point_restricted.append(pos)
data_restricted.append(data_array[i])
point_restricted = np.array(point_restricted)
data_restricted = np.array(data_restricted)
return point_restricted, data_restricted
...@@ -64,7 +64,7 @@ class Colormap3D(colors.Colormap, metaclass=abc.ABCMeta): ...@@ -64,7 +64,7 @@ class Colormap3D(colors.Colormap, metaclass=abc.ABCMeta):
""" """
self._log.debug('Calling rgb_from_vector') self._log.debug('Calling rgb_from_vector')
x, y, z = np.asarray(vector) x, y, z = vector
R = np.sqrt(x ** 2 + y ** 2 + z ** 2) R = np.sqrt(x ** 2 + y ** 2 + z ** 2)
R_max = vmax if vmax is not None else R.max() + 1E-30 R_max = vmax if vmax is not None else R.max() + 1E-30
# FIRST color dimension: HUE (1D ring/angular direction) # FIRST color dimension: HUE (1D ring/angular direction)
......
### MATPLOTLIB STYLESHEET FOR EMPYRE IMAGES ### MATPLOTLIB STYLESHEET FOR EMPYRE IMAGES
text.usetex : True ## use TeX to render text
font.family : serif ## default font family (use serifs) font.family : serif ## default font family (use serifs)
font.serif : cm ## Computer Modern (LaTeX font) font.serif : cm ## Computer Modern (LaTeX font)
......
### MATPLOTLIB STYLESHEET FOR EMPYRE PLOTS ### MATPLOTLIB STYLESHEET FOR EMPYRE PLOTS
text.usetex : True ## use TeX to render text
font.family : serif ## default font family (use serifs) font.family : serif ## default font family (use serifs)
font.serif : cm ## Computer Modern (LaTeX font) font.serif : cm ## Computer Modern (LaTeX font)
......
### MATPLOTLIB STYLESHEET FOR SAVING EMPYRE IMAGES AND PLOTS ### MATPLOTLIB STYLESHEET FOR SAVING EMPYRE IMAGES AND PLOTS
text.usetex : True ## use TeX to render text
font.family : serif ## default font family (use serifs) font.family : serif ## default font family (use serifs)
font.serif : cm ## Computer Modern (LaTeX font) font.serif : cm ## Computer Modern (LaTeX font)
......