Skip to content
Snippets Groups Projects
Commit 924261af authored by Jan Caron's avatar Jan Caron
Browse files

Removed rest of the scripts (still available in Script Vault/pyramid IO).

parent d575f4fb
No related branches found
No related tags found
No related merge requests found
# -*- coding: utf-8 -*-
"""Create magnetization distributions from fortran sorted txt-files."""
import numpy as np
import pyramid as py
import matplotlib.pyplot as plt
###################################################################################################
filename = 'J=1.D=0.084.H=0.0067.Bobber.dat'
scale = 1
###################################################################################################
data = np.genfromtxt(filename, dtype=np.float32, delimiter=',', usecols=(0, 1, 2, 3, 4, 5))
x, y, z, xmag, ymag, zmag = data.T
a = (y[1] - y[0]) * scale
dim_z = len(np.unique(z))
dim_y = len(np.unique(y))
dim_x = len(np.unique(x))
dim = (dim_z, dim_x, dim_y) # Order of descending variance!
xmag = xmag.reshape(dim).swapaxes(1, 2)
ymag = ymag.reshape(dim).swapaxes(1, 2)
zmag = zmag.reshape(dim).swapaxes(1, 2)
magnitude = np.array((xmag, ymag, zmag))
mag_data = py.VectorData(a, magnitude)
mag_data.save_to_hdf5('magdata_dat_{}'.format(filename.replace('.dat', '.hdf5')), overwrite=True)
mag_data.quiver_plot3d(ar_dens=4, coloring='amplitude')
mag_data.quiver_plot3d(ar_dens=4, coloring='angle')
py.pm(mag_data).combined_plot(interpolation='bilinear')
plt.show()
# -*- coding: utf-8 -*-
"""Create magnetization distributions from fortran sorted txt-files."""
import numpy as np
import pyramid as py
###################################################################################################
filename = 'long_grain_remapped_0p0070.txt'
###################################################################################################
# Load data:
data = np.loadtxt(filename, delimiter=',')
# Get parameters:
a = 1000 * (data[1, 2] - data[0, 2])
dim = len(np.unique(data[:, 2])), len(np.unique(data[:, 1])), len(np.unique(data[:, 0]))
# Get magnetization:
mag_vec = np.concatenate([data[:, 3], data[:, 4], data[:, 5]])
x_mag = np.reshape(data[:, 3], dim, order='F')
y_mag = np.reshape(data[:, 4], dim, order='F')
z_mag = np.reshape(data[:, 5], dim, order='F')
magnitude = np.array((x_mag, y_mag, z_mag))
# Create and save VectorData object:
mag_data = py.VectorData(a, magnitude)
mag_name = 'magdata_txtfortran_{}'.format(filename.replace('.txt', '.hdf5'))
mag_data.save_to_hdf5(mag_name, overwrite=True)
# -*- coding: utf-8 -*-
"""Create magnetization distributions from vtk-files."""
from time import sleep
import pyramid as pr
import matplotlib.pyplot as plt
import numpy as np
import vtk
from pylab import griddata
from scipy.spatial import cKDTree as KDTree
from tqdm import tqdm
###################################################################################################
filename = 'tube_90x30x35nm.vtk'
b_0 = 1.
###################################################################################################
def enclosing_zero(x, y, nx=30, ny=30):
"""Construct a grid of points, that are some distance away from points (x, y)
Parameters
----------
x : int
y : int
nx: int, optional
ny: int, optional
Returns
-------
zero_points: lists
Two lists for the finer grid in x and y.
"""
dx = x.ptp() / nx
dy = y.ptp() / ny
xp, yp = np.mgrid[x.min() - 2 * dx:x.max() + 2 * dx:(nx + 2) * 1j,
y.min() - 2 * dy:y.max() + 2 * dy:(ny + 2) * 1j]
xp = xp.ravel()
yp = yp.ravel()
# Use KDTree to answer the question: "which point of set (x, y) is the
# nearest neighbors of those in (xp, yp)"
tree = KDTree(np.c_[x, y])
dist, j = tree.query(np.c_[xp, yp], k=1)
# Select points sufficiently far away
m = (dist > np.hypot(dx, dy))
return xp[m], yp[m]
print('LOAD VTK-DATA!')
# Setting up reader:
reader = vtk.vtkDataSetReader()
reader.SetFileName(filename)
reader.ReadAllScalarsOn()
reader.ReadAllVectorsOn()
reader.Update()
# Getting output:
output = reader.GetOutput()
# Reading points and vectors:
size = output.GetNumberOfPoints()
vtk_points = output.GetPoints().GetData()
vtk_vectors = output.GetPointData().GetVectors()
# Converting points to numpy array:
point_array = np.zeros(vtk_points.GetSize())
vtk_points.ExportToVoidPointer(point_array)
point_array = np.reshape(point_array, (-1, 3))
# Converting vectors to numpy array:
vector_array = np.zeros(vtk_points.GetSize())
vtk_vectors.ExportToVoidPointer(vector_array)
vector_array = np.reshape(vector_array, (-1, 3))
# Combining data:
data = np.hstack((point_array, vector_array)).astype(pr.fft.FLOAT)
# Discard unused stuff:
del reader, output, vtk_points, vtk_vectors, point_array, vector_array
# Scatter plot of all x-y-coordinates
axis = plt.figure().add_subplot(1, 1, 1)
axis.scatter(data[:, 0], data[:, 1])
print('INTERPOLATE ON REGULAR GRID!')
# Find unique z-slices:
z_uniq = np.unique(data[:, 2])
# Determine the grid spacing:
a = z_uniq[1] - z_uniq[0]
# Determine the size of object:
x_min, x_max = data[:, 0].min(), data[:, 0].max()
y_min, y_max = data[:, 1].min(), data[:, 1].max()
z_min, z_max = data[:, 2].min(), data[:, 2].max()
x_diff, y_diff, z_diff = np.ptp(data[:, 0]), np.ptp(data[:, 1]), np.ptp(data[:, 2])
x_cent, y_cent, z_cent = x_min + x_diff / 2., y_min + y_diff / 2., z_min + z_diff / 2.
# Create regular grid:
x = np.arange(x_cent - x_diff, x_cent + x_diff, a)
y = np.arange(y_cent - y_diff, y_cent + y_diff, a)
z = np.arange(z_min, z_max, a)
xx, yy = np.meshgrid(x, y)
# Create empty field:
magnitude = np.zeros((3, len(z), len(y), len(x)), dtype=pr.fft.FLOAT)
print('Mag Dimensions:', magnitude.shape[1:])
sleep(0.5)
# Fill field slice per slice:
for i, zi in tqdm(enumerate(z), total=len(z)):
# Take all points that lie in one z-voxel of the new regular grid into account (use weights!):
z_slice = data[np.abs(data[:, 2] - zi) <= a / 2., :]
# If z is regular everywhere, weights are always 1:
weights = 1 - np.abs(z_slice[:, 2] - zi) * 2 / a
# Prepare fake data points
x_nan, y_nan = enclosing_zero(z_slice[:, 0], z_slice[:, 1], nx=len(x) // 10, ny=len(y) // 10)
z_nan = np.empty_like(x_nan)
z_nan[:] = np.nan
grid_x = np.r_[z_slice[:, 0], x_nan]
grid_y = np.r_[z_slice[:, 1], y_nan]
for j in range(3): # For all 3 components!
grid_z = np.r_[weights * z_slice[:, 3 + j], z_nan]
gridded_subdata = griddata(grid_x, grid_y, grid_z, xx, yy)
magnitude[j, i, :, :] = gridded_subdata.filled(fill_value=0).astype(pr.fft.FLOAT)
print('CREATE AND SAVE MAGDATA OBJECT!')
# Convert a to nm:
a *= 10
mag_data = pr.VectorData(a, magnitude)
mag_data.save_to_hdf5('magdata_vtk_{}'.format(filename.replace('.vtk', '.hdf5')), overwrite=True)
# Plot stuff:
pr.pm(mag_data).combined_plot()
plt.show()
# -*- coding: utf-8 -*-
"""Create magnetization distributions from DM3 (Digital Micrograph 3) files."""
import os
import numpy as np
from PIL import Image
import hyperspy.api as hs
import pyramid as pr
import matplotlib.pyplot as plt
###################################################################################################
path_mag = '14p5kx_m0150mT_q3_pha_sb400_sc512_magn.dm3'
path_mask = '14p5kx_m0150mT_q3_pha_sb400_sc512_magn_mask.txt'
path_conf = None
filename = 'phasemap_dm3_{}.hdf5'.format(os.path.splitext(path_mag)[0])
print(filename)
a = 3.898
dim_uv = None
threshold = 0.5
flip_up_down = True
###################################################################################################
# Load images:
im_mag_hp = hs.load(path_mag)
im_mag = Image.fromarray(im_mag_hp.data)
if path_mask is not None:
im_mask_hp = hs.load(path_mask)
mask_data = np.genfromtxt(path_mask, delimiter=',')
im_mask = Image.fromarray(mask_data) # )im_mask_hp.data)
else:
im_mask = Image.new('F', im_mag.size, 'white')
if path_conf is not None:
im_conf_hp = hs.load(path_conf)
im_conf = Image.fromarray(im_conf_hp.data)
else:
im_conf = Image.new('F', im_mag.size, 'white')
if flip_up_down:
im_mag = im_mag.transpose(Image.FLIP_TOP_BOTTOM)
im_mask = im_mask.transpose(Image.FLIP_TOP_BOTTOM)
im_conf = im_conf.transpose(Image.FLIP_TOP_BOTTOM)
if dim_uv is not None:
im_mag = im_mag.resize(dim_uv)
im_mask = im_mask.resize(dim_uv)
im_conf = im_conf.resize(dim_uv)
# Calculate phase, mask and confidence:
phase = np.asarray(im_mag)
mask = np.where(np.asarray(im_mask) >= threshold, True, False)
confidence = np.where(np.asarray(im_conf) >= threshold, 1, 0)
# Create and save PhaseMap object:
phase_map = pr.PhaseMap(a, phase, mask, confidence, unit='rad')
phase_map.save_to_hdf5(filename, overwrite=True)
phase_map.combined_plot()
plt.show()
# -*- coding: utf-8 -*-
"""Create magnetization distributions from image-files."""
import numpy as np
from PIL import Image
import pyramid as py
import matplotlib.pyplot as plt
###################################################################################################
path_mag = 'Arnaud_M.tif'
path_mask = 'Arnaud_MIP_mask.tif'
filename = 'phasemap_{}_{}.hdf5'.format(*list(reversed(path_mag.split('.'))))
a = 2 # nm
dim_uv = None
max_phase = 1
threshold = 0.5
offset = 0
flip_up_down = False
###################################################################################################
# Load magnetic phase image:
im_mag = Image.open(path_mag).convert('P')
if flip_up_down:
im_mag = im_mag.transpose(Image.FLIP_TOP_BOTTOM)
if dim_uv is not None:
im_mag = im_mag.resize(dim_uv)
phase = np.asarray(im_mag) / 255. * max_phase - offset
# Create mask:
mask = None
if path_mask is not None:
im_mask = Image.open(path_mask).convert('P')
if flip_up_down:
im_mask = im_mask.transpose(Image.FLIP_TOP_BOTTOM)
if dim_uv is not None:
im_mask = im_mask.resize(dim_uv)
mask = np.where(np.asarray(im_mask) / 255. >= threshold, True, False)
# Create and save PhaseMap object:
phase_map = py.PhaseMap(a, phase, mask, confidence=None, unit='rad')
phase_map.save_to_hdf5(filename, overwrite=True)
phase_map.combined_plot()
plt.show()
# -*- coding: utf-8 -*-
"""Create magnetization distributions from a raw image format."""
import matplotlib.pyplot as plt
import numpy as np
from PIL import Image
import pyramid as pr
###################################################################################################
path_mag = '83-225x148.raw'
path_mask = path_mag
filename = 'skyrmion_cutout_83.hdf5'
im_size = (225, 148)
dim_uv = None
a = 1.
threshold = -1.9
offset = 0.
###################################################################################################
# Load images:
with open(path_mag, 'rb') as raw_file:
raw_data = raw_file.read()
im_mag = Image.fromstring('F', im_size, raw_data, 'raw')
with open(path_mask, 'rb') as raw_file:
raw_data = raw_file.read()
im_mask = Image.fromstring('F', im_size, raw_data, 'raw')
if dim_uv is not None:
im_mag = im_mag.resize(dim_uv)
im_mask = im_mask.resize(dim_uv)
# Calculate phase and mask:
phase = np.asarray(im_mag) - offset
mask = np.where(np.asarray(im_mask) >= threshold, True, False)
# Create and save PhaseMap object:
phase_map = pr.PhaseMap(a, phase, mask, confidence=None, unit='rad')
phase_map.save_to_hdf5(filename, overwrite=True)
phase_map.combined_plot()
plt.show()
......@@ -144,7 +144,6 @@ setup(name=DISTNAME,
include_dirs=[numpy.get_include()],
requires=['numpy', 'scipy', 'matplotlib', 'Pillow',
'mayavi', 'pyfftw', 'hyperspy', 'nose'],
scripts=get_files('scripts'),
test_suite='nose.collector',
cmdclass={'build_ext': build_ext, 'build': build})
print('-------------------------------------------------------------------------------\n')
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment