Skip to content

Commit

Permalink
Nodemap to vtk converter
Browse files Browse the repository at this point in the history
  • Loading branch information
brei-er committed Jan 25, 2023
2 parents ee1d6f8 + f6fc2de commit 075d856
Show file tree
Hide file tree
Showing 12 changed files with 354,090 additions and 18 deletions.
2 changes: 1 addition & 1 deletion crackpy/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,4 +5,4 @@
import crackpy.structure_elements

# package information
__version__ = "1.0.2"
__version__ = "1.0.3"
187 changes: 173 additions & 14 deletions crackpy/fracture_analysis/data_processing.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import os

import numpy as np

import pyvista
from pyvista import CellType
from crackpy.structure_elements import data_files
from crackpy.structure_elements.material import Material

Expand All @@ -12,14 +12,17 @@ class InputData:
Methods:
* read_header - method to import metadata from header (if ``data_file`` is provided)
* read_data_file - method to import input data from file (if ``data_file`` is provided)
* set_connection_file - method to set connection file path
* set_data_manually - set physical data manually instead of from nodemap file
* set_data_file - set data file path
* transform_data - with a coordinate shift and angle rotation
* calc_eps_vm - calculate Von Mises strain (stored as attribute)
* calc_stresses - calculate stresses using material law
* calc_facet_size - calculate the face size of DIC data
* to_vtk - convert nodemap data to a PyVista object and save it in vtk format
"""

def __init__(self, nodemap: data_files.Nodemap or None = None, meta_keywords: dict or None = None):
Expand All @@ -32,19 +35,27 @@ def __init__(self, nodemap: data_files.Nodemap or None = None, meta_keywords: di
"""

if nodemap is not None:
self.data_file = os.path.join(nodemap.folder, nodemap.name)
self.nodemap_folder = nodemap.folder
self.nodemap_name = nodemap.name
self.data_file = os.path.join(self.nodemap_folder, self.nodemap_name)
self.nodemap_structure = nodemap.structure
else:
self.nodemap_folder = None
self.nodemap_name = None
self.data_file = None
self.nodemap_structure = data_files.NodemapStructure()

self.meta_keywords = meta_keywords
self.connection_file = None

# input data attributes
self.facet_id = None
self.coor_x = None
self.coor_y = None
self.coor_z = None
self.disp_x = None
self.disp_y = None
self.disp_z = None
self.eps_x = None
self.eps_y = None
self.eps_xy = None
Expand All @@ -53,6 +64,11 @@ def __init__(self, nodemap: data_files.Nodemap or None = None, meta_keywords: di
self.sig_y = None
self.sig_xy = None
self.sig_vm = None
self.eps_1 = None
self.eps_2 = None
self.sig_1 = None
self.sig_2 = None
self.connections = None

# meta data attributes
self.meta_attributes = [
Expand All @@ -77,9 +93,25 @@ def __init__(self, nodemap: data_files.Nodemap or None = None, meta_keywords: di
self.read_data_file()

def set_data_file(self, data_file: str):
"""Set data file path."""
"""Set data file path.
Args:
data_file: name of data file
"""
self.data_file = data_file

def set_connection_file(self, connection_file: str, folder: str):
"""Set connection file path.
Args:
connection_file: name of connection file
folder: folder of connection file
"""
connection_file_path = os.path.join(folder, connection_file)
np_df = np.genfromtxt(connection_file_path, dtype=int, delimiter=';', skip_header=1)
self.connections = self._cut_none_elements(np_df)

def read_header(self, meta_attributes_to_keywords: dict = None):
"""Get meta data by reading from header.
Expand Down Expand Up @@ -114,16 +146,18 @@ def read_header(self, meta_attributes_to_keywords: dict = None):

def read_data_file(self):
"""Read data from nodemap file."""
df = np.genfromtxt(self.data_file,
delimiter=";", encoding="windows-1252")
np_df = np.asarray(df, dtype=np.float64)
raw_inp = np.genfromtxt(self.data_file,
delimiter=";", encoding="windows-1252")
np_df = np.asarray(raw_inp, dtype=np.float64)
# cut nans (necessary since version 2020)
nodemap_data = self._cut_nans(np_df)

self.facet_id = nodemap_data[:, self.nodemap_structure.index_facet_id]
self.coor_x = nodemap_data[:, self.nodemap_structure.index_coor_x]
self.coor_y = nodemap_data[:, self.nodemap_structure.index_coor_y]
self.coor_z = nodemap_data[:, self.nodemap_structure.index_coor_z]
self.disp_x = nodemap_data[:, self.nodemap_structure.index_disp_x]
self.disp_y = nodemap_data[:, self.nodemap_structure.index_disp_y]
self.disp_z = nodemap_data[:, self.nodemap_structure.index_disp_z]

if self.nodemap_structure.strain_is_percent:
self.eps_x = nodemap_data[:, self.nodemap_structure.index_eps_x] / 100.0
Expand All @@ -141,7 +175,7 @@ def read_data_file(self):
self.sig_vm = self._calc_sig_vm()

def set_data_manually(self, coor_x: np.array, coor_y: np.array, disp_x: np.array, disp_y: np.array,
eps_x: np.array, eps_y: np.array, eps_xy: np.array, eps_vm: np.array = None):
eps_x: np.array, eps_y: np.array, eps_xy: np.array, eps_vm: np.array = None):
"""Manually set data, e.g. for in-situ calculation in Aramis software.
Args:
Expand Down Expand Up @@ -232,7 +266,7 @@ def calc_eps_vm(self):
"""
# under plane stress assumption with nu = 0.5
nu = 0.5
eps_z = -nu/(1-nu)*(self.eps_x + self.eps_y)
eps_z = -nu / (1 - nu) * (self.eps_x + self.eps_y)

# total strain: eps = [[eps_x, eps_xy, 0], [eps_xy, eps_y, 0], [0, 0, eps_z]]
eps = np.array([[self.eps_x, self.eps_xy, np.zeros_like(self.eps_x)],
Expand Down Expand Up @@ -260,21 +294,145 @@ def calc_facet_size(self) -> float:
"""Returns DIC facet size."""
return np.min(
np.sqrt(
(self.coor_x[1:] - self.coor_x[0])**2.0
+ (self.coor_y[1:] - self.coor_y[0])**2.0
(self.coor_x[1:] - self.coor_x[0]) ** 2.0
+ (self.coor_y[1:] - self.coor_y[0]) ** 2.0
)
)

def _calc_sig_vm(self):
"""Returns the Von Mises stress."""
return np.sqrt(self.sig_x ** 2 + self.sig_y ** 2 - self.sig_x * self.sig_y + 3 * self.sig_xy ** 2)

def _calculate_principal_tensor_components(self, tensor_components: np.array):
"""Calculate principal tensor components
Args:
tensor_components: array of tensor components (e.g. stress or strain) with shape (3)
"""
xx, yy, xy = tensor_components

tensor = np.array([[xx, xy],
[xy, yy]])

# Compute the eigenvalues of the stress tensor
eigenvalues, _ = np.linalg.eig(tensor)

# Sort the eigenvalues to obtain the principal stresses
eigenvalues_sorted = np.sort(eigenvalues)[::-1]
return eigenvalues_sorted

def _calculate_principal_strains(self):
"""Calculate principal strains"""
strains = np.stack([self.eps_x, self.eps_y, self.eps_xy], axis=1)
principal_strains = np.apply_along_axis(self._calculate_principal_tensor_components, 1, strains)
self.eps_1 = principal_strains[:, 0]
self.eps_2 = principal_strains[:, 1]

def _calculate_principal_stresses(self):
"""Calculate principal stresses"""
stresses = np.stack([self.sig_x, self.sig_y, self.sig_xy], axis=1)
principal_stresses = np.apply_along_axis(self._calculate_principal_tensor_components, 1, stresses)
self.sig_1 = principal_stresses[:, 0]
self.sig_2 = principal_stresses[:, 1]

def _renumber_nodes(self, node_number: int, mapping: dict):
"""Renumber nodes according to mapping table.
Args:
node_number: node number
mapping: mapping of old node numbers to new node numbers
"""
return mapping[node_number]

def to_vtk(self, output_folder: str = None):
"""Returns a vtk file of the DIC data.
Args:
output_folder: path to output folder; If None, no vtk file will be saved. The output file name will be the
same as the input file name with the extension .vtk
Returns:
PyVista mesh object
"""
# create node
nodes = np.stack((self.coor_x, self.coor_y, np.zeros_like(self.coor_x)), axis=1)

# remap node numbers
old_facet_id = self.facet_id - 1
new_facet_id = np.arange(0, len(self.facet_id))
mapping = dict(zip(old_facet_id, new_facet_id))
renumber_nodes_vec = np.vectorize(self._renumber_nodes)

# define elements
elements = self.connections[:, 1:5]
elements[:, 0] = 3
elements[:, 1:] = renumber_nodes_vec(elements[:, 1:], mapping)

# define cell types
cell_types = np.full(len(elements[:, 0]), fill_value=CellType.TRIANGLE, dtype=np.uint8)

# define mesh
mesh = pyvista.UnstructuredGrid(elements, cell_types, nodes)

# Check if input data is 2D
if self.coor_z is None:
self.coor_z = np.zeros_like(self.coor_x)
if self.disp_z is None:
self.disp_z = np.zeros_like(self.coor_x)

# add data
mesh.point_data['x [mm]'] = self.coor_x
mesh.point_data['y [mm]'] = self.coor_y
mesh.point_data['z [mm]'] = self.coor_z
mesh.point_data['u_x [mm]'] = self.disp_x
mesh.point_data['u_y [mm]'] = self.disp_y
mesh.point_data['u_z [mm]'] = self.disp_z
mesh.point_data['eps_x [%]'] = self.eps_x * 100.0
mesh.point_data['eps_y [%]'] = self.eps_y * 100.0
mesh.point_data['eps_xy [1]'] = self.eps_xy
mesh.point_data['eps_vm [%]'] = self.eps_vm * 100.0
mesh.point_data['sig_x [MPa]'] = self.sig_x
mesh.point_data['sig_y [MPa]'] = self.sig_y
mesh.point_data['sig_xy [MPa]'] = self.sig_xy
mesh.point_data['sig_vm [MPa]'] = self.sig_vm

# Compute the principal stresses
self._calculate_principal_stresses()
mesh.point_data['sig_1 [MPa]'] = self.sig_1
mesh.point_data['sig_2 [MPa]'] = self.sig_2

# Compute the principal strains
self._calculate_principal_strains()
mesh.point_data['eps_1 [%]'] = self.eps_1 * 100.0
mesh.point_data['eps_2 [%]'] = self.eps_2 * 100.0

# add metadata
self.read_header()
for meta_attribute in self.meta_attributes:
meta_data = getattr(self, meta_attribute)
mesh.field_data[meta_attribute] = [meta_data]

# write vtk file
if output_folder is not None:
path = os.path.join(output_folder, self.nodemap_name[:-4] + '.vtk')
mesh.save(path, binary=False)
return mesh

@staticmethod
def _cut_nans(df):
"""Reads an array and deletes each row containing any nan value."""
cut_nans_array = df[~np.isnan(df).any(axis=1)]
return cut_nans_array

@staticmethod
def _cut_none_elements(df):
"""Reads an array and deletes each row containing any '-1' value."""
mask = np.any(df == -1, axis=1)
cut_none_elements = df[~mask]
return cut_none_elements


def apply_mask(data: InputData, mask: np.array) -> InputData:
masked_data = InputData()
Expand All @@ -299,8 +457,9 @@ class CrackTipInfo:
Methods:
* set_manually - manually redefine crack tip information
"""

def __init__(
self,
crack_tip_x: float = None,
Expand Down
6 changes: 6 additions & 0 deletions crackpy/structure_elements/data_files.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,10 +12,13 @@ class NodemapStructure(FileStructure):
def __init__(
self,
row_length=10,
index_facet_id=0,
index_coor_x=1,
index_coor_y=2,
index_coor_z=3,
index_disp_x=4,
index_disp_y=5,
index_disp_z=6,
index_eps_x=7,
index_eps_y=8,
index_eps_xy=9,
Expand All @@ -24,10 +27,13 @@ def __init__(
):
super().__init__()
self.row_length = row_length
self.index_facet_id = index_facet_id
self.index_coor_x = index_coor_x
self.index_coor_y = index_coor_y
self.index_coor_z = index_coor_z
self.index_disp_x = index_disp_x
self.index_disp_y = index_disp_y
self.index_disp_z = index_disp_z
self.index_eps_x = index_eps_x
self.index_eps_y = index_eps_y
self.index_eps_xy = index_eps_xy
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ def test_interpolation(self):
for eps_vm in interp_eps_vm.values():
self.assertIsInstance(eps_vm, np.ndarray)
self.assertEqual(eps_vm.shape, (256, 256))
self.assertAlmostEqual(eps_vm[0, 0], 0.000952, delta=1e-6)
self.assertAlmostEqual(eps_vm[0, 0], 0.001070, delta=1e-6)


if __name__ == '__main__':
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ def setUp(self):
self.data.set_data_manually(coor_x, coor_y, disp_x, disp_y, eps_x, eps_y, eps_xy)

def test_calc_eps_vm(self):
eps_vm_exp = np.asarray([5.887841, 7.423686, 9.018500])
eps_vm_exp = np.asarray([5.88784058, 8., 10.06644591])
self.data.calc_eps_vm()

# check equality to calculated results
Expand Down
1 change: 1 addition & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ numpy>=1.23.2
opencv_python>=4.5.4.60
pandas>=1.4.3
Pillow>=9.2.0
pyvista>=0.37.0
scikit_image>=0.19.3
scikit_learn>=1.1.2
scipy>=1.9.0
Expand Down
Loading

0 comments on commit 075d856

Please sign in to comment.