Skip to content

Commit

Permalink
Scientific colormaps, improved detection stability, bugfixes
Browse files Browse the repository at this point in the history
  • Loading branch information
melc-da committed Mar 17, 2023
2 parents 1cdff1d + 89a530d commit aaa86a0
Show file tree
Hide file tree
Showing 23 changed files with 213 additions and 222 deletions.
2 changes: 1 addition & 1 deletion crackpy/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,4 +5,4 @@
import crackpy.structure_elements

# package information
__version__ = "1.0.4"
__version__ = "1.1.0"
2 changes: 1 addition & 1 deletion crackpy/crack_detection/data/interpolation.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ def interpolate_on_array(input_by_nodemap, interp_size, offset=(0, 0), pixels=25
return interp_coors_by_nodemap, interp_disps_by_nodemap, interp_eps_vm_by_nodemap


def interpolate(input_data_object: InputData, size: int, offset=(0, 0), pixels: int=256):
def interpolate(input_data_object: InputData, size: int or float, offset=(0, 0), pixels: int=256):
"""Extracts and interpolates Coors, Disps and EpsVonMises
from an InputData object onto arrays of size pixels x pixels.
Expand Down
4 changes: 2 additions & 2 deletions crackpy/crack_detection/deep_learning/attention.py
Original file line number Diff line number Diff line change
Expand Up @@ -323,7 +323,7 @@ def plot(self,
triang.set_mask(mask)
plot = ax.tricontourf(triang,
heatmap.flatten(), contour_vector,
extend='neither', cmap='jet')
extend='neither')
ax.autoscale(False)
# ax.axis('off') # uncomment to turn off axis labels and ticks

Expand Down Expand Up @@ -431,7 +431,7 @@ def plot_overview(output, maps, side: str, scale: str = 'QUALITATIVE'):
# plot heatmap
plot = ax.tricontourf(coor_x.flatten(), coor_y.flatten(),
heatmap.flatten(), contour_vector,
extend='neither', cmap='jet')
extend='neither')
ax.autoscale(False)

# calculate crack tip segmentation
Expand Down
14 changes: 12 additions & 2 deletions crackpy/crack_detection/detection.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ def __init__(self, side: str = 'right', detection_window_size: float = 70, offse
Args:
side: of the specimen ('left' or 'right')
window: size of the detection window in mm
detection_window_size: size of the detection window in mm
offset: tuple of (x,y)-offset in mm
angle_det_radius: radius in mm around crack tip considered for angle detection
device: (torch.device)
Expand Down Expand Up @@ -54,6 +54,13 @@ def preprocess(interp_disps: np.ndarray) -> torch.Tensor:
"""
input_ch = torch.tensor(interp_disps, dtype=torch.float32)

# check if NaNs are present
if torch.isnan(input_ch).any():
print('NaNs in displacement data! '
'Please make sure that no NaNs are present '
'E.g. make the detection_window_boundary smaller.')

input_ch = preprocess.normalize(input_ch).unsqueeze(0)

return input_ch
Expand Down Expand Up @@ -105,7 +112,8 @@ def calculate_position_in_mm(self, crack_tip_px: list):
"""
# Transform to global coordinate system
crack_tip_x = crack_tip_px[1] * self.detection.detection_window_size / 255
crack_tip_y = crack_tip_px[0] * self.detection.detection_window_size / 255 - self.detection.detection_window_size / 2
crack_tip_y = crack_tip_px[0] * self.detection.detection_window_size / 255 \
- self.detection.detection_window_size / 2
if self.detection.side == 'left': # mirror x-value of crack tip position to left-hand side
crack_tip_x *= -1
crack_tip_x += self.detection.offset[0]
Expand Down Expand Up @@ -159,6 +167,7 @@ class CrackPathDetection:
* predict_path - predict the crack path and return the segmentation and skeletonized path
"""

def __init__(self, detection: CrackDetection, path_detector: UNet):
"""Initialize class arguments.
Expand Down Expand Up @@ -221,6 +230,7 @@ class CrackAngleEstimation:
* predict_angle - estimate the crack tip angle by means of a linear regression
"""

def __init__(self, detection: CrackDetection, crack_tip_in_px: list):
"""Initialize class arguments.
Expand Down
43 changes: 36 additions & 7 deletions crackpy/crack_detection/pipeline/pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import numpy as np
import torch

from crackpy.crack_detection.utils.plot import plot_prediction
from crackpy.crack_detection.utils.utilityfunctions import get_nodemaps_and_stage_nums
from crackpy.crack_detection.detection import CrackTipDetection, CrackPathDetection, CrackAngleEstimation, CrackDetection
from crackpy.fracture_analysis.data_processing import InputData
Expand All @@ -17,6 +18,7 @@ def __init__(
sides: list = None,
stage_nums: range = 'All',
detection_window_size: float = None,
detection_boundary: tuple = None,
start_offset: tuple = (0, 0),
angle_det_radius: float = 10
):
Expand All @@ -28,6 +30,11 @@ def __init__(
stage_nums: numbers of stages to predict the crack tip (Default: 'All' uses all files of later given path)
detection_window_size: window size used to predict the crack tip
(if None: detection_window_size is equal to specimen_size / 2 - 10)
detection_boundary: (x_min, x_max, y_min, y_max) hard boundary of the crack detection window (to avoid NaNs)
The 'left' side is mirrored to the 'right' side.
Example: (0, 70, -35, 35) for MT160 specimen
If None: detection_boundary is set to
(0, specimen_size / 2, -specimen_size / 4, specimen_size / 4)
start_offset: (offset_x, offset_y) offset from the standard starting window
adapted automatically during the pipeline
angle_det_radius: radius (in mm) around the crack tip
Expand All @@ -38,6 +45,10 @@ def __init__(

self.window_size = detection_window_size if detection_window_size is not None else (specimen_size / 2 - 10)
self.start_offset = start_offset
if detection_boundary is not None:
self.detection_boundary = detection_boundary
else:
self.detection_boundary = (0, specimen_size / 2, -specimen_size / 4, specimen_size / 4)
self.angle_det_radius = angle_det_radius

self.stage_nums = stage_nums
Expand Down Expand Up @@ -155,9 +166,7 @@ def run_detection(self) -> dict:
offset_x *= -1 # left side: offset in negative x-direction
stages_to_results = {}

for i, stage in enumerate(self.detection_stages):
print(f'\r Progress... {i+1}/{len(self.detection_stages)}', end='')

for stage in sorted(self.detection_stages):
# Init
results = {}

Expand All @@ -176,7 +185,7 @@ def run_detection(self) -> dict:
)

# Interpolate data on arrays (256 x 256 pixels)
interp_disps, _ = det.interpolate(data)
interp_disps, interp_eps_vm = det.interpolate(data)

# Preprocess input
input_ch = det.preprocess(interp_disps)
Expand All @@ -189,6 +198,7 @@ def run_detection(self) -> dict:

# Make prediction
pred = ct_det.make_prediction(input_ch)
crack_tip_seg = ct_det.calculate_segmentation(pred)

# Calculate segmentation and most likely crack tip position
crack_tip_pixels = ct_det.find_most_likely_tip_pos(pred)
Expand All @@ -203,7 +213,7 @@ def run_detection(self) -> dict:
cp_det = CrackPathDetection(detection=det, path_detector=self.path_detector)

# predict segmentation and path skeleton
cp_segmentation, _ = cp_det.predict_path(input_ch)
cp_segmentation, cp_skeleton = cp_det.predict_path(input_ch)

##########################
# Crack angle estimation
Expand All @@ -224,12 +234,13 @@ def run_detection(self) -> dict:

# Adjust crack detection window
###############################
if np.abs(offset_x) < self.setup.specimen_size / 2 - self.setup.window_size - 10:
x_min, x_max, y_min, y_max = self.setup.detection_boundary
if np.abs(offset_x) < x_max - self.setup.window_size:
if side == 'right' and crack_tip_x > offset_x + self.setup.window_size / 2:
offset_x += (crack_tip_x - offset_x - self.setup.window_size / 2)
if side == 'left' and crack_tip_x < offset_x - self.setup.window_size / 2:
offset_x -= (offset_x - self.setup.window_size / 2 - crack_tip_x)
if np.abs(offset_y) < self.setup.specimen_size / 2 - self.setup.window_size / 2 - 10:
if np.abs(offset_y) < np.minimum(y_min, y_max) - self.setup.window_size / 2:
if crack_tip_y > offset_y + self.setup.window_size / 4:
offset_y += (crack_tip_y - offset_y - self.setup.window_size / 4)
if crack_tip_y < offset_y - self.setup.window_size / 4:
Expand All @@ -240,6 +251,24 @@ def run_detection(self) -> dict:
results['crack_tip_y'] = crack_tip_y
results['angle'] = angle
stages_to_results[stage] = results
print(f'Stage {stage}/{sorted(self.detection_stages)[-1]}'
f' - crack tip: {crack_tip_x:.2f} mm, {crack_tip_y:.2f} mm, angle: {angle:.2f}°')

# Plot crack detection
plot_prediction(background=interp_eps_vm * 100,
interp_size=self.setup.window_size,
offset=(offset_x, offset_y),
save_name=nodemap[:-4],
crack_tip_prediction=np.asarray([crack_tip_pixels]),
crack_tip_seg=crack_tip_seg,
crack_tip_label=None,
crack_path=cp_skeleton,
f_min=0,
f_max=0.68,
title=nodemap[:-4] + f' - {side} side',
path=os.path.join(self.output_path, "plots", f"{side}"),
label='Von Mises strain [%]')

sides_to_results[side] = stages_to_results
self.sides_to_results = sides_to_results

Expand Down
29 changes: 13 additions & 16 deletions crackpy/crack_detection/utils/plot.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,17 +8,17 @@
def plot_prediction(
background: np.array,
interp_size: float or int,
offset: tuple=(0, 0),
crack_tip_prediction: np.array=None,
crack_tip_seg: np.array=None,
crack_tip_label: np.array=None,
crack_path: np.array=None,
f_min: float=None,
f_max: float=None,
save_name: str=None,
path: str=None,
title: str='',
label: str='Plot of Data [Unit]'
offset: tuple = (0, 0),
crack_tip_prediction: np.array = None,
crack_tip_seg: np.array = None,
crack_tip_label: np.array = None,
crack_path: np.array = None,
f_min: float = None,
f_max: float = None,
save_name: str = None,
path: str = None,
title: str = '',
label: str = 'Plot of Data [Unit]'
):
"""Plots crack tip labels and predictions over background.
Expand Down Expand Up @@ -49,9 +49,6 @@ def plot_prediction(
num_colors = 120
contour_vector = np.linspace(f_min, f_max, num_colors, endpoint=True)
label_vector = np.linspace(f_min, f_max, 10, endpoint=True)
# Colormap similar to Aramis
cm_jet = cm.get_cmap('jet', 512)
my_cmap = ListedColormap(cm_jet(np.linspace(0.1, 0.9, 256)))

fig = plt.figure(1)
ax = fig.add_subplot(111)
Expand All @@ -73,7 +70,7 @@ def plot_prediction(
triang.set_mask(mask)
plot = ax.tricontourf(triang,
background.flatten(), contour_vector,
extend=extend, cmap=my_cmap)
extend=extend)
ax.autoscale(False)
# ax.axis('off') # uncomment to turn of axis and labels

Expand Down Expand Up @@ -113,7 +110,7 @@ def plot_prediction(
if path is not None:
if not os.path.exists(path):
os.makedirs(path)
plt.savefig(os.path.join(path, save_name + '.png'), bbox_inches='tight', dpi=300)
plt.savefig(os.path.join(path, save_name + '.png'), bbox_inches='tight')
else:
plt.show()

Expand Down
9 changes: 5 additions & 4 deletions crackpy/fracture_analysis/analysis.py
Original file line number Diff line number Diff line change
Expand Up @@ -145,7 +145,7 @@ def run(self, progress=None, task_id=None):

except:
print('CJP optimization failed.')
self.res_cjp = None
self.res_cjp = {'Error': np.nan, 'K_F': np.nan, 'K_R': np.nan, 'K_S': np.nan, 'K_II': np.nan, 'T': np.nan}

try:
# calculate Williams coefficients with fitting method
Expand All @@ -161,12 +161,13 @@ def run(self, progress=None, task_id=None):
K_II = -np.sqrt(2 * np.pi) * self.williams_fit_b_n[1] / np.sqrt(1000)
T = 4 * self.williams_fit_a_n[2]

self.sifs_fit = {'Error': williams_results.cost,
'K_I': K_I, 'K_II': K_II, 'T': T}
self.sifs_fit = {'Error': williams_results.cost, 'K_I': K_I, 'K_II': K_II, 'T': T}

except:
print('Williams optimization failed.')
self.sifs_fit = None
self.williams_fit_a_n = {n: np.nan for index, n in enumerate(self.optimization.terms)}
self.williams_fit_b_n = {n: np.nan for index, n in enumerate(self.optimization.terms)}
self.sifs_fit = {'Error': np.nan, 'K_I': np.nan, 'K_II': np.nan, 'T': np.nan}

if self.integral_properties is not None:
# calculate Williams coefficients with Bueckner-Chen integral method
Expand Down
49 changes: 32 additions & 17 deletions crackpy/fracture_analysis/data_processing.py
Original file line number Diff line number Diff line change
Expand Up @@ -336,15 +336,23 @@ def _calculate_principal_stresses(self):
self.sig_1 = principal_stresses[:, 0]
self.sig_2 = principal_stresses[:, 1]

def _renumber_nodes(self, node_number: int, mapping: dict):
def _renumber_nodes(self, old_node_number: int, mapping: dict):
"""Renumber nodes according to mapping table.
Args:
node_number: node number
old_node_number: node number
mapping: mapping of old node numbers to new node numbers
Returns:
new_node_number: it is -1 if the node number is not in the mapping table
"""
return mapping[node_number]
try:
new_node_number = mapping[old_node_number]
except KeyError:
new_node_number = -1

return new_node_number

def to_vtk(self, output_folder: str = None):
"""Returns a vtk file of the DIC data.
Expand All @@ -357,30 +365,36 @@ def to_vtk(self, output_folder: str = None):
PyVista mesh object
"""
# create node
nodes = np.stack((self.coor_x, self.coor_y, np.zeros_like(self.coor_x)), axis=1)
nodes = np.stack((self.coor_x, self.coor_y, self.coor_z), axis=1)

# remap node numbers
old_facet_id = self.facet_id - 1
new_facet_id = np.arange(0, len(self.facet_id))
mapping = dict(zip(old_facet_id, new_facet_id))
renumber_nodes_vec = np.vectorize(self._renumber_nodes)

# define elements
elements = self.connections[:, 1:5]
elements[:, 0] = 3
elements[:, 1:] = renumber_nodes_vec(elements[:, 1:], mapping)
# create mesh
if self.connections is not None:
# define elements
elements = self.connections[:, 1:5]
elements[:, 0] = 3
elements[:, 1:] = renumber_nodes_vec(elements[:, 1:], mapping)

# define cell types
cell_types = np.full(len(elements[:, 0]), fill_value=CellType.TRIANGLE, dtype=np.uint8)
# Check if there are any elements with -1 as node number
mask = np.any(elements[:, 1:] == -1, axis=1)
elements = elements[~mask]

# define mesh
mesh = pyvista.UnstructuredGrid(elements, cell_types, nodes)
# define cell types
cell_types = np.full(len(elements[:, 0]), fill_value=CellType.TRIANGLE, dtype=np.uint8)

# define mesh
mesh = pyvista.UnstructuredGrid(elements, cell_types, nodes)

else:
print(f'No connectivity data provided for {self.nodemap_name}. Reconstructing a mesh from the nodes using Delaunay triangulation.')
cloud = pyvista.wrap(nodes)
mesh = cloud.delaunay_2d(alpha=1.0)

# Check if input data is 2D
if self.coor_z is None:
self.coor_z = np.zeros_like(self.coor_x)
if self.disp_z is None:
self.disp_z = np.zeros_like(self.coor_x)

# add data
mesh.point_data['x [mm]'] = self.coor_x
Expand All @@ -389,6 +403,7 @@ def to_vtk(self, output_folder: str = None):
mesh.point_data['u_x [mm]'] = self.disp_x
mesh.point_data['u_y [mm]'] = self.disp_y
mesh.point_data['u_z [mm]'] = self.disp_z
mesh.point_data['u_sum [mm]'] = np.sqrt(self.disp_x ** 2 + self.disp_y ** 2 + self.disp_z ** 2)
mesh.point_data['eps_x [%]'] = self.eps_x * 100.0
mesh.point_data['eps_y [%]'] = self.eps_y * 100.0
mesh.point_data['eps_xy [1]'] = self.eps_xy
Expand Down
Loading

0 comments on commit aaa86a0

Please sign in to comment.