Commit 2a685018 authored by Ingo Meyer's avatar Ingo Meyer

Merge branch 'develop'

parents f9b696ab 3d7df9ff
......@@ -2,8 +2,8 @@ pyMolDyn
########
pyMolDyn is a molecule viewer capable of computing molecular cavities. It is assumed that the analyzed molecule resides
in a cell shape corresponding to one of the seven bravais lattice systems.
pyMolDyn is a viewer for atomic clusters, crystalline and amorphous materials in a unit cell corresponding to one of
the seven 3D Bravais lattices. The program calculates cavities (vacancies, voids) according to three definitions.
.. image:: https://pgi-jcns.fz-juelich.de/portal/static/images/pymoldyn-gui.png
:width: 80%
......@@ -13,17 +13,18 @@ in a cell shape corresponding to one of the seven bravais lattice systems.
Features
========
- Interactive molecule viewer based on GR3
- Computation of molecular cavities in all seven Bravais lattice systems
- Interactive viewer based on GR3
- Computation of cavities in all seven 3D Bravais lattice systems
- Creation of high resolution images appropriate for publications
- Video creation from a set of input frames to analyse cavity changes in materials over time
- Video creation from a set of input frames to analyze cavity changes in materials over time
- Statistics including
- Surfaces, volumes and surface volume ratios of cavities and domains
- Radial distribution, bonds, bond (dihedral) angles
- Pair distribution functions (including cavities), bonds, bond (dihedral) angles
- Gyration tensor, asphericity, acylindricity
- Filter for atoms and cavities
- Batch mode for processing several input files at once
- Batch mode for simultaneous processing of multiple files
Documentation
......
......@@ -18,6 +18,8 @@ array_contains() {
python_flags() {
local DARWIN_LOCAL_FRAMEWORK="/usr/local/Library/Frameworks"
local DARWIN_FRAMEWORK="/System/Library/Frameworks"
local DARWIN_LOCAL_INCLUDE="/usr/local/Library/Frameworks/Python.framework/Versions/2.7/include/python2.7"
local DARWIN_INCLUDE="/System/Library/Frameworks/Python.framework/Versions/2.7/include/python2.7"
local LINUX_LOCAL_INCLUDE="/usr/local/include/python2.7"
local LINUX_INCLUDE="/usr/include/python2.7"
local LINUX_LOCAL_LIB="/usr/local/lib/python2.7"
......@@ -28,10 +30,10 @@ python_flags() {
case "${OS}" in
Darwin)
if [ -d "${DARWIN_LOCAL_FRAMEWORK}" ]; then
INC_FLAGS="-F${DARWIN_LOCAL_FRAMEWORK}"
INC_FLAGS="-F${DARWIN_LOCAL_FRAMEWORK} -I${DARWIN_LOCAL_INCLUDE}"
LIB_FLAGS="-F${DARWIN_LOCAL_FRAMEWORK} -framework Python"
else
INC_FLAGS="-F${DARWIN_FRAMEWORK}"
INC_FLAGS="-F${DARWIN_FRAMEWORK} -I${DARWIN_INCLUDE}"
LIB_FLAGS="-F${DARWIN_FRAMEWORK} -framework Python"
fi
;;
......@@ -51,34 +53,22 @@ python_flags() {
}
numpy_flags() {
local DARWIN_LOCAL_INCLUDE="/usr/local/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/site-packages/numpy/core/include"
local DARWIN_INCLUDE="/System/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/site-packages/numpy/core/include"
local LINUX_LOCAL_INCLUDE="/usr/local/lib/python2.7/site-packages/numpy/core/include"
local LINUX_INCLUDE="/usr/lib/python2.7/site-packages/numpy/core/include"
local LINUX_ALT_INCLUDE="/usr/lib64/python2.7/site-packages/numpy/core/include"
local DEBIAN_INCLUDE="/usr/lib/python2.7/dist-packages/numpy/core/include"
local REL_NUMPY_INCLUDE="core/include"
local PYTHON_EXEC
local INC_FLAGS
case "${OS}" in
Darwin)
if [ -d "${DARWIN_LOCAL_INCLUDE}" ]; then
INC_FLAGS="-I${DARWIN_LOCAL_INCLUDE}"
else
INC_FLAGS="-I${DARWIN_INCLUDE}"
fi
;;
Linux)
if [ -d "${LINUX_LOCAL_INCLUDE}" ]; then
INC_FLAGS="-I${LINUX_LOCAL_INCLUDE}"
elif [ -d "${DEBIAN_INCLUDE}" ]; then
INC_FLAGS="-I${DEBIAN_INCLUDE}"
elif [ -d "${LINUX_INCLUDE}" ]; then
INC_FLAGS="-I${LINUX_INCLUDE}"
elif [ -d "${LINUX_ALT_INCLUDE}" ]; then
INC_FLAGS="-I${LINUX_ALT_INCLUDE}"
fi
;;
esac
if [ -f "/usr/local/bin/python" ]; then
PYTHON_EXEC="/usr/local/bin/python"
else
PYTHON_EXEC=$(which python)
fi
INC_FLAGS=$(${PYTHON_EXEC} <<EOF
import numpy
import os.path
numpy_package_path = os.path.dirname(numpy.__file__)
print('-I{}'.format(os.path.join(numpy_package_path, 'core/include')))
EOF)
eval "${1}=\"${INC_FLAGS}\""
eval "${2}=\"\""
......
#!/usr/bin/env python
# coding: utf-8
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import sys
import os
import numpy as np
import gr
sys.path.insert(0, os.path.abspath('../src'))
from statistics.rdf import Kernels
x = np.linspace(-1.25, 1.25, 500)
for name in dir(Kernels):
if name.startswith('_') or name == 'bandwidth':
continue
kernel = getattr(Kernels, name)
y = kernel(x)
gr.beginprint(name.lower()+'.svg')
gr.clearws()
gr.setwsviewport(0, 0.1, 0, 0.06)
gr.setviewport(0, 1, 0, 1)
gr.setwindow(-1.25, 1.25, -0.25, 1.25)
gr.grid(0.1, 0.1, 0, 0, 5, 5)
gr.axes(0.1, 0.1, 0, 0, 5, 5, -0.01)
gr.setlinewidth(2)
gr.setlinecolorind(2)
gr.polyline(x, y)
gr.setlinecolorind(1)
gr.setlinewidth(1)
gr.updatews()
gr.endprint()
\ No newline at end of file
......@@ -3,7 +3,7 @@
# platform: osx-64
dateutil=2.4.1=py27_0
freetype=2.5.2=2
gr=0.17.1=py27_0
gr=0.18.0=py27_0
h5py=2.5.0=np19py27_3
hdf5=1.8.15.1=1
jinja2=2.8=py27_0
......
__version_info__ = (0, 7, 0)
__version_info__ = (0, 8, 0)
__version__ = '.'.join(map(str, __version_info__))
......@@ -129,6 +129,15 @@ class Cli(object):
"default": None,
"type": "int",
"help": "maximum number of cached files"},
{"special_type": None,
"name": "no cache files",
"short": "",
"long": "--no-cache",
"action": "store_true",
"dest": "no_cache",
"default": False,
"type": None,
"help": "Do not use any unnecessary cache files. One temporary cache file is always necessary."},
# {"special_type": "parameter",
# "name": "bond delta",
# "short": "-b",
......@@ -191,6 +200,8 @@ class Cli(object):
config.Computation.atom_radius = self.options.atom_radius
if self.options.max_cachefiles is not None:
config.Computation.max_cachefiles = self.options.max_cachefiles
elif self.options.no_cache:
config.Computation.max_cachefiles = 1
print 'processing files:'
print file_list
......
import itertools
from computation.split_and_merge.domain_centers.calculate_domain_centers import calculate_domain_centers as calc_dom
from computation.split_and_merge.util.pos_bool_type import PosBoolType
from computation.split_and_merge.util.numpy_extension.find_index_of_first_element_not_equivalent import find_index_of_first_element_not_equivalent
from computation.split_and_merge.util.node_border_iterator import iterate_node_border_with_adjacent_node_cells
def is_equivalent(a, b):
return (not a or b) and (not b or a)
it = itertools.count()
class ObjectType:
DOMAIN = it.next()
CAVITY = it.next()
def is_homogenous_split(data_part, mask_part):
return PosBoolType(find_index_of_first_element_not_equivalent(data_part, mask_part))
def is_homogenous_merge(data_part, merge_data):
return is_equivalent(data_part[0, 0, 0], merge_data[0, 0, 0])
def is_homogenous_merge(image_data_part, image_merge_data):
def sign(x):
return 1 if x > 0 else -1 if x < 0 else 0
return sign(image_data_part[0, 0, 0]) == sign(image_merge_data[0, 0, 0])
def is_relevant_part(hom_image_data_part, object_type):
obj_to_func = {
ObjectType.DOMAIN: is_domain_part,
ObjectType.CAVITY: is_cavity_part
}
return obj_to_func[object_type](hom_image_data_part)
def is_atom_part(hom_data_part):
return bool(hom_data_part[0, 0, 0])
def is_domain_part(hom_image_data_part):
return hom_image_data_part[0, 0, 0] == 0
def is_cavity_part(hom_image_data_part):
return hom_image_data_part[0, 0, 0] < 0
def is_inside_volume(hom_mask_part):
......@@ -32,19 +50,38 @@ def is_neighboring(node1, node2):
return x-w_ <= x_ <= x+w and y-h_ <= y_ <= y+h and z-d_ <= z_ <= z+d
def split(data, mask, graph):
def split(data, mask, graph, object_type):
node = ((0, 0, 0), data.shape)
graph.set_initial_node(node)
stack = [node]
while len(stack) > 0:
pos, dim = stack.pop()
is_hom = is_homogenous_split(data[pos[0]:pos[0]+dim[0], pos[1]:pos[1]+dim[1], pos[2]:pos[2]+dim[2]], mask[pos[0]:pos[0]+dim[0], pos[1]:pos[1]+dim[1], pos[2]:pos[2]+dim[2]])
is_hom = is_homogenous_split(data[pos[0]:pos[0]+dim[0], pos[1]:pos[1]+dim[1], pos[2]:pos[2]+dim[2]],
mask[pos[0]:pos[0]+dim[0], pos[1]:pos[1]+dim[1], pos[2]:pos[2]+dim[2]])
if not is_hom:
nodes = graph.split_node((pos, dim), is_hom)
for node in nodes:
stack.append(node)
elif is_atom_part(data[pos[0]:pos[0]+dim[0], pos[1]:pos[1]+dim[1], pos[2]:pos[2]+dim[2]]) or not is_inside_volume(mask[pos[0]:pos[0]+dim[0], pos[1]:pos[1]+dim[1], pos[2]:pos[2]+dim[2]]):
elif (is_relevant_part(data[pos[0]:pos[0]+dim[0], pos[1]:pos[1]+dim[1], pos[2]:pos[2]+dim[2]], object_type) or
not is_inside_volume(mask[pos[0]:pos[0]+dim[0], pos[1]:pos[1]+dim[1], pos[2]:pos[2]+dim[2]])):
graph.remove_node((pos, dim))
graph.forbid_splitting()
def merge(data, graph):
for node, neighbors in graph.iteritems():
for neighbor in neighbors:
if not graph.is_merged(node, neighbor):
pos_node, dim_node = node
pos_neighbor, dim_neighbor = neighbor
data_node = data[pos_node[0]:pos_node[0]+dim_node[0],
pos_node[1]:pos_node[1]+dim_node[1],
pos_node[2]:pos_node[2]+dim_node[2]]
data_neighbor = data[pos_neighbor[0]:pos_neighbor[0]+dim_neighbor[0],
pos_neighbor[1]:pos_neighbor[1]+dim_neighbor[1],
pos_neighbor[2]:pos_neighbor[2]+dim_neighbor[2]]
if is_homogenous_merge(data_node, data_neighbor):
graph.merge_nodes(node, neighbor)
def add_periodic_neighbors(graph):
......@@ -54,22 +91,27 @@ def add_periodic_neighbors(graph):
for m in border_nodes[i+1:]:
m_x, m_y, m_z = m[0]
for translation_vector in border_node_translation_vectors[m]:
translated_node = ((m_x+translation_vector[0], m_y+translation_vector[1], m_z+translation_vector[2]), m[1])
translated_node = ((m_x+translation_vector[0], m_y+translation_vector[1], m_z+translation_vector[2]),
m[1])
if is_neighboring(n, translated_node):
graph.add_neighbors(n, [m])
graph.add_neighbors(n, [m], translation_vectors=[translation_vector])
break
def merge(data, graph):
for node, neighbors in graph.iteritems():
for neighbor in neighbors:
if not graph.is_merged(node, neighbor):
pos_node, dim_node = node
pos_neighbor, dim_neighbor = neighbor
data_node = data[pos_node[0]:pos_node[0]+dim_node[0], pos_node[1]:pos_node[1]+dim_node[1], pos_node[2]:pos_node[2]+dim_node[2]]
data_neighbor = data[pos_neighbor[0]:pos_neighbor[0]+dim_neighbor[0], pos_neighbor[1]:pos_neighbor[1]+dim_neighbor[1], pos_neighbor[2]:pos_neighbor[2]+dim_neighbor[2]]
def merge_periodic_border(data, graph):
for border_node, border_neighbors in graph.iter_border_items():
for border_neighbor in border_neighbors:
if not graph.is_merged(border_node, border_neighbor, detect_cyclic_merge=True):
pos_node, dim_node = border_node
pos_neighbor, dim_neighbor = border_neighbor
data_node = data[pos_node[0]:pos_node[0]+dim_node[0],
pos_node[1]:pos_node[1]+dim_node[1],
pos_node[2]:pos_node[2]+dim_node[2]]
data_neighbor = data[pos_neighbor[0]:pos_neighbor[0]+dim_neighbor[0],
pos_neighbor[1]:pos_neighbor[1]+dim_neighbor[1],
pos_neighbor[2]:pos_neighbor[2]+dim_neighbor[2]]
if is_homogenous_merge(data_node, data_neighbor):
graph.merge_nodes(node, neighbor)
graph.merge_nodes(border_node, border_neighbor)
def mark_domain_points(data, areas):
......@@ -84,6 +126,14 @@ def calculate_domain_centers(atoms, combined_translation_vectors, areas):
return calc_dom(atoms, combined_translation_vectors, areas)
def get_domain_area_cells(areas):
cell_positions = [[(pos[0] + x, pos[1] + y, pos[2] + z)
for pos, dim in area
for x, y, z in itertools.product(*(range(c) for c in dim))]
for area in areas]
return cell_positions
def get_domain_surface_cells(data, mask, areas):
domain = None
......
from computation.split_and_merge import algorithm
from computation.split_and_merge.algorithm import ObjectType
from computation.split_and_merge.util.graph import GraphForSplitAndMerge
def start_split_and_merge_pipeline(data, mask, atoms, combined_translation_vectors, get_translation_vector):
graph = GraphForSplitAndMerge(data, mask, get_translation_vector)
def start_split_and_merge_pipeline(data, mask, atoms, combined_translation_vectors,
get_translation_vector, object_type):
def is_relevant_part(hom_image_data_part):
return algorithm.is_relevant_part(hom_image_data_part, object_type)
algorithm.split(data, mask, graph)
algorithm.add_periodic_neighbors(graph)
algorithm.merge(data, graph)
areas = graph.get_all_areas()
algorithm.mark_domain_points(data, areas)
graph = GraphForSplitAndMerge(data, mask, get_translation_vector, is_relevant_part)
centers = algorithm.calculate_domain_centers(atoms, combined_translation_vectors, areas)
surface_cells = algorithm.get_domain_surface_cells(data, mask, areas)
algorithm.split(data, mask, graph, object_type)
algorithm.merge(data, graph)
algorithm.add_periodic_neighbors(graph)
algorithm.merge_periodic_border(data, graph)
areas, non_translated_areas, cyclic_area_indices = graph.get_all_areas(apply_translation=True,
with_non_translated_nodes=True,
mark_cyclic_areas=True)
areas_cells, non_translated_areas_cells = map(algorithm.get_domain_area_cells, [areas, non_translated_areas])
if object_type == ObjectType.DOMAIN:
algorithm.mark_domain_points(data, non_translated_areas)
centers = algorithm.calculate_domain_centers(atoms, combined_translation_vectors, non_translated_areas)
surface_cells = algorithm.get_domain_surface_cells(data, mask, non_translated_areas)
return centers, surface_cells
return centers, areas_cells, non_translated_areas_cells, surface_cells, cyclic_area_indices
else:
return areas_cells, non_translated_areas_cells, cyclic_area_indices
......@@ -45,11 +45,13 @@ class Configuration(ConfigNode):
# camera_position =
# offset = (0.0, 0.0, 0.0)
self.gl_window_size = [400, 400]
self.atom_radius = 0.4
self.bond_radius = 0.1
pass
class Computation(ConfigNode):
def __init__(self):
self.atom_radius = 2.8
self.std_cutoff_radius = 2.8
self.std_resolution = 64
self.max_cachefiles = 0
......
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import collections
import datetime
import dateutil
import json
import os.path
import sys
from .configuration import CONFIG_DIRECTORY
from util.logger import Logger
logger = Logger("config.cutoff_history")
logger.setstream("default", sys.stdout, Logger.WARNING)
DEFAULT_CONFIG_FILE = os.path.expanduser(os.path.join(CONFIG_DIRECTORY, 'cutoff_history.json'))
class HistoryEntry(collections.namedtuple('HistoryEntry', ['filename', 'frame', 'time', 'radii'])):
class Date(object):
def __init__(self, *args, **kwargs):
if not isinstance(args[0], datetime.datetime):
self._date = datetime.datetime(*args, **kwargs)
else:
datetime_obj = args[0]
self._date = datetime_obj
def __str__(self):
return repr(self)
def __repr__(self):
return self._date.strftime('%m/%d/%y, %H:%M')
@property
def datetime_obj(self):
return self._date
def __new__(cls, filename, frame, time, radii):
if isinstance(time, datetime.datetime):
time = HistoryEntry.Date(time)
else:
time = HistoryEntry.Date(dateutil.parser.parse(time))
self = super(HistoryEntry, cls).__new__(cls, filename, frame, time, radii)
return self
class CutoffHistory(object):
def __init__(self, config_filepath=DEFAULT_CONFIG_FILE):
self._config_filepath = config_filepath
self._history = None
self.read()
def read(self):
try:
with open(self._config_filepath, 'r') as f:
history_json = json.load(f)
self._history = [HistoryEntry(*entry) for entry in history_json]
except IOError:
self._history = []
def save(self):
def serialization_helper(obj):
if isinstance(obj, HistoryEntry.Date):
return obj.datetime_obj.isoformat()
else:
print(type(obj))
raise TypeError
try:
with open(self._config_filepath, 'w') as f:
json.dump(self._history, f, default=serialization_helper)
except IOError:
logger.warn('Could not save cutoff radii history')
raise
def filtered_history(self, elements, entries_with_additional_elements=True, preferred_filenames_with_frames=None):
elements = set(elements)
filtered_history = []
for entry in self._history:
current_elements = set(entry.radii.keys())
if (entries_with_additional_elements and elements.issubset(current_elements)) or \
(not entries_with_additional_elements and elements == current_elements):
filtered_history.append(entry)
if preferred_filenames_with_frames is not None:
def key_func(entry):
'''Key function thats for sorting by filenames. It preferres filenames from a given list by using
integer weights.
'''
if entry.filename in preferred_filenames_with_frames:
if entry.frame in preferred_filenames_with_frames[entry.filename]:
weight = 0
else:
weight = 1
else:
weight = 2
return (weight, entry.filename)
filtered_history.sort(key=key_func)
return filtered_history
@property
def history(self):
return list(self._history)
def add(self, entry):
self._history.insert(0, entry)
def extend(self, entry_list):
entry_list = sorted(entry_list, key=lambda x: x.time, reverse=True)
for entry in entry_list:
self.add(entry)
def remove(self, entry):
self._history.remove(entry)
def remove_list(self, entry_list):
for entry in entry_list:
self.remove(entry)
cutoff_history = CutoffHistory()
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import collections
import json
import os.path
import sys
from .configuration import CONFIG_DIRECTORY
from util.logger import Logger
logger = Logger("config.cutoff_presets")
logger.setstream("default", sys.stdout, Logger.WARNING)
DEFAULT_CONFIG_FILE = os.path.expanduser(os.path.join(CONFIG_DIRECTORY, 'cutoff_presets.json'))
Preset = collections.namedtuple('Preset', ['name', 'radii'])
class CutoffPresets(object):
def __init__(self, config_filepath=DEFAULT_CONFIG_FILE):
self._config_filepath = config_filepath
self._presets = None
self.read()
def read(self):
try:
with open(self._config_filepath, 'r') as f:
presets_json = json.load(f)
self._presets = [Preset(*entry) for entry in presets_json]
except IOError:
self._presets = []
def save(self):
try:
with open(self._config_filepath, 'w') as f:
json.dump(self._presets, f)
except IOError:
logger.warn('Could not save cutoff radii presets')
raise
@property
def presets(self):
return list(self._presets)
def add(self, entry):
self._presets.insert(0, entry)
def extend(self, entry_list):
for entry in entry_list:
self.add(entry)
def remove(self, entry):
self._presets.remove(entry)
def remove_list(self, entry_list):
for entry in entry_list:
self.remove(entry)
cutoff_presets = CutoffPresets()
This diff is collapsed.
......@@ -9,7 +9,6 @@ import os
import core.data as data
import core.file
from core.file import File
from datetime import datetime
from algorithm import CavityCalculation, DomainCalculation, FakeDomainCalculation
from discretization import DiscretizationCache, AtomDiscretization
import util.message as message
......@@ -17,7 +16,6 @@ from hashlib import sha256
from config.configuration import config
from util.logger import Logger
import sys
import numpy as np
import core.bonds
__all__ = ["Calculation",
......@@ -73,9 +71,11 @@ class CalculationSettings(object):
def __init__(self,
datasets,
resolution=config.Computation.std_resolution,
cutoff_radii=config.Computation.std_cutoff_radius,
domains=False,
surface_cavities=False,
center_cavities=False,
gyration_tensor=False,
recalculate=False,
exporthdf5=False,
exporttext=False,
......@@ -84,9 +84,11 @@ class CalculationSettings(object):
"""
self.datasets = datasets
self.resolution = resolution
self.cutoff_radii = cutoff_radii
self.domains = domains
self.surface_cavities = surface_cavities
self.center_cavities = center_cavities
self.gyration_tensor = gyration_tensor
self.recalculate = recalculate
self.exporthdf5 = exporthdf5
self.exporttext = exporttext
......@@ -103,9 +105,11 @@ class CalculationSettings(object):
for filename, frames in self.datasets.iteritems():
datasets[filename] = [f for f in frames]