From 743220c056e4be443c5bff51e39f1ecb6a72a6da Mon Sep 17 00:00:00 2001 From: David Connolly Date: Wed, 26 Jun 2024 15:01:40 +0100 Subject: [PATCH 1/6] Processing & Diff_Rotation --- smart/differential_rotation.py | 50 ++++++++++++++++++++++++ smart/map_processing.py | 69 ++++++++++++++++++++++++++++++++++ 2 files changed, 119 insertions(+) create mode 100644 smart/differential_rotation.py create mode 100644 smart/map_processing.py diff --git a/smart/differential_rotation.py b/smart/differential_rotation.py new file mode 100644 index 0000000..cb04cf9 --- /dev/null +++ b/smart/differential_rotation.py @@ -0,0 +1,50 @@ +import numpy as np +from matplotlib import colors + +import astropy.units as u +from astropy.coordinates import SkyCoord +from astropy.wcs import WCS + +from sunpy.coordinates import Helioprojective, propagate_with_solar_surface +from sunpy.map import all_coordinates_from_map, coordinate_is_on_solar_disk, make_fitswcs_header + + +def diff_rotation(im_map): + """ + Performing differential rotation (96 mins) on a map in order to to correct for feature + motions due to solar rotation. + + Parameters + ---------- + im_map : sunpy.map.Map + Processed SunPy magnetogram map. + + Returns + ------- + diff_map : sunpy.map.Map + Input map with differential rotation of 96mins. + + """ + in_time = im_map.date + out_time = in_time + 96 * u.min + out_frame = Helioprojective(observer="earth", obstime=out_time, rsun=im_map.coordinate_frame.rsun) + + ############################################################################## + # Construct a WCS object for the output map. If one has an actual ``Map`` + # object at the desired output time (e.g., the actual AIA observation at the + # output time), one can use the WCS object from that ``Map`` object (e.g., + # ``mymap.wcs``) instead of constructing a custom WCS. + """So should I have a function that searches for a magnetogram at the desired time, + and also the one at time t - delta_t ?""" + + out_center = SkyCoord(0 * u.arcsec, 0 * u.arcsec, frame=out_frame) + header = make_fitswcs_header(im_map.data.shape, out_center, scale=u.Quantity(im_map.scale)) + out_wcs = WCS(header) + + with propagate_with_solar_surface(): + diff_map = im_map.reproject_to(out_wcs) + + diff_map.data[~coordinate_is_on_solar_disk(all_coordinates_from_map(diff_map))] = np.nan + diff_map.cmap.set_bad("k") + diff_map.plot_settings["norm"] = colors.Normalize(vmin=-200, vmax=200) + return diff_map diff --git a/smart/map_processing.py b/smart/map_processing.py new file mode 100644 index 0000000..429e0dd --- /dev/null +++ b/smart/map_processing.py @@ -0,0 +1,69 @@ +import numpy as np +import skimage as ski +from matplotlib import colors +from skimage.morphology import disk + +from sunpy.map import Map, all_coordinates_from_map, coordinate_is_on_solar_disk + + +def map_processing(file): + """ + Creating a map from a fits file and processing said map. + + Parameters + ---------- + file : fits + fits file to turn into map. + + Returns + ------- + im_map : sunpy.map.Map + processed map created from fits file. + + """ + im_map = Map(file) + if im_map.meta["CROTA2"] >= 100: + data = np.flip(im_map.data, 1)[::-1] + im_map = Map(data, im_map.meta) + im_map.data[~coordinate_is_on_solar_disk(all_coordinates_from_map(im_map))] = np.nan + im_map.cmap.set_bad("k") + im_map.plot_settings["norm"] = colors.Normalize(vmin=-200, vmax=200) + return im_map + + +def STL(im_map): + """ + + We apply Smoothing, a noise Threshold, and an LOS correction, respectively, to the data. + + Parameters + ---------- + im_map : sunpy.map.Map + Processed sunpy magnetogram map. + + Returns + ------- + smooth_map : sunpy.map.Map + Map after applying Gaussian smoothing. + filtered_labels : numpy.ndarray + 2D array with each pixel labelled. + mask_sizes : numpy.ndarray + Boolean array indicating the sizes of each labeled region. + + """ + thresh = 100 + negmask = im_map.data < -thresh + posmask = im_map.data > thresh + mask = negmask | posmask + + dilated_mask = ski.morphology.binary_dilation(mask, disk(5)) + smoothed_data = ski.filters.gaussian(np.nan_to_num(im_map.data) * ~dilated_mask, sigma=16) + smooth_map = Map(smoothed_data, im_map.meta) + + labels = ski.measure.label(dilated_mask) + min_size = 5000 + label_sizes = np.bincount(labels.ravel()) + mask_sizes = label_sizes > min_size + mask_sizes[0] = 0 + filtered_labels = mask_sizes[labels] + return smooth_map, filtered_labels, mask_sizes From 18963997323038d3776f9ca0d8cd39f100e0c405 Mon Sep 17 00:00:00 2001 From: David Connolly Date: Mon, 1 Jul 2024 15:04:19 +0100 Subject: [PATCH 2/6] cosine correction --- smart/map_processing.py | 87 +++++++++++++++++++++++++++++++++++++++-- 1 file changed, 83 insertions(+), 4 deletions(-) diff --git a/smart/map_processing.py b/smart/map_processing.py index 429e0dd..d1070eb 100644 --- a/smart/map_processing.py +++ b/smart/map_processing.py @@ -3,6 +3,8 @@ from matplotlib import colors from skimage.morphology import disk +import astropy.units as u + from sunpy.map import Map, all_coordinates_from_map, coordinate_is_on_solar_disk @@ -39,7 +41,7 @@ def STL(im_map): Parameters ---------- im_map : sunpy.map.Map - Processed sunpy magnetogram map. + Processed SunPy magnetogram map. Returns ------- @@ -51,13 +53,17 @@ def STL(im_map): Boolean array indicating the sizes of each labeled region. """ + + cosmap = ar_cosmap(im_map)[0] + cosmap_data = cosine_correction(im_map, cosmap)[0] + thresh = 100 - negmask = im_map.data < -thresh - posmask = im_map.data > thresh + negmask = cosmap_data < -thresh + posmask = cosmap_data > thresh mask = negmask | posmask dilated_mask = ski.morphology.binary_dilation(mask, disk(5)) - smoothed_data = ski.filters.gaussian(np.nan_to_num(im_map.data) * ~dilated_mask, sigma=16) + smoothed_data = ski.filters.gaussian(np.nan_to_num(cosmap_data) * ~dilated_mask, sigma=16) smooth_map = Map(smoothed_data, im_map.meta) labels = ski.measure.label(dilated_mask) @@ -67,3 +73,76 @@ def STL(im_map): mask_sizes[0] = 0 filtered_labels = mask_sizes[labels] return smooth_map, filtered_labels, mask_sizes + + +def ar_cosmap(im_map): + """ + Get the cosine map and off-limb pixel map using WCS. + + Parameters + ---------- + im_map : sunpy.map.Map + Processed SunPy magnetogram map. + + Returns + ------- + cos_cor : numpy.ndarray + Array of cosine correction factors for each pixel. Values greater than a threshold (edge) are set to 1. + d_angular : numpy.ndarray + Array of angular distances from the disk center in radians. + off_limb : numpy.ndarray + Binary array where pixels on the disk are 1 and pixels off the disk are 0. + """ + + ## Take off an extra percent from the disk to get rid of limb effects + edge = 0.99 + + x, y = np.meshgrid(*[np.arange(v.value) for v in im_map.dimensions]) * u.pixel + hp_coords = im_map.pixel_to_world(x, y) + xx = hp_coords.Tx.value + yy = hp_coords.Ty.value + d_radial = np.sqrt((xx**2.0) + (yy**2.0)) + + cos_cor = np.copy(d_radial) + cos_cor_ratio = cos_cor / im_map.meta["RSUN_OBS"] + + cos_cor_ratio = np.clip(cos_cor_ratio, -1, 1) + d_angular = np.arcsin(cos_cor_ratio) + cos_cor = 1.0 / np.cos(d_angular) + + off_disk = np.where(d_radial > (im_map.meta["RSUN_OBS"] * edge)) + cos_cor[off_disk] = 1.0 + + off_limb = np.copy(d_radial) + off_disk_mask = np.where(d_radial >= (im_map.meta["RSUN_OBS"] * edge)) + off_limb[off_disk_mask] = 0.0 + on_disk_mask = np.where(d_radial < (im_map.meta["RSUN_OBS"] * edge)) + off_limb[on_disk_mask] = 1.0 + + return cos_cor, d_angular, off_limb + + +def cosine_correction(im_map, cosmap): + """ + Perform magnetic field cosine correction. + + Parameters + ---------- + inmap : sunpy.map.Map + Processed SunPy magnetogram map. + cosmap : numpy.ndarray + An array of the cosine correction factors for each pixel. + + Returns + ------- + corrected_data : numpy.ndarray + The magnetic field data after applying the cosine correction. + cosmap : numpy.ndarray + The cosine correction factors, limited to the max value allowed. + + """ + angle_limit = np.arcsin(1.0 - im_map.meta["CDELT1"] / im_map.meta["RSUN_OBS"]) + cos_limit = 1.0 / np.cos(angle_limit) + cosmap_limit = np.where((cosmap) > cos_limit) + cosmap[cosmap_limit] = cos_limit + return im_map.data * cosmap, cosmap From 5eb80953094f350568ef27daf52398ede9a8f698 Mon Sep 17 00:00:00 2001 From: David Connolly Date: Mon, 8 Jul 2024 17:18:37 +0100 Subject: [PATCH 3/6] removal of .meta, introduction of units and tests --- pyproject.toml | 7 +- smart/differential_rotation.py | 33 +++------ smart/map_processing.py | 81 +++++++++++++-------- smart/tests/test_map_processing.py | 109 +++++++++++++++++++++++++++++ 4 files changed, 175 insertions(+), 55 deletions(-) create mode 100644 smart/tests/test_map_processing.py diff --git a/pyproject.toml b/pyproject.toml index 0df80b8..f327440 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -11,7 +11,12 @@ description = "SolarMonitor Active Regtion Tracking (SMART)" requires-python = ">=3.9" readme = { file = "README.rst", content-type = "text/x-rst" } license = { file = "licenses/LICENSE.rst", content-type = "text/plain" } -dependencies = [] +dependencies = [ + "numpy", + "sunpy", + "scikit-image", + "matplotlib" +] dynamic = ["version"] [project.optional-dependencies] diff --git a/smart/differential_rotation.py b/smart/differential_rotation.py index cb04cf9..830db19 100644 --- a/smart/differential_rotation.py +++ b/smart/differential_rotation.py @@ -1,48 +1,31 @@ import numpy as np from matplotlib import colors -import astropy.units as u -from astropy.coordinates import SkyCoord -from astropy.wcs import WCS +from sunpy.coordinates import propagate_with_solar_surface +from sunpy.map import all_coordinates_from_map, coordinate_is_on_solar_disk -from sunpy.coordinates import Helioprojective, propagate_with_solar_surface -from sunpy.map import all_coordinates_from_map, coordinate_is_on_solar_disk, make_fitswcs_header - -def diff_rotation(im_map): +def diff_rotation(im_map, delta_im_map): """ - Performing differential rotation (96 mins) on a map in order to to correct for feature + Performing differential rotation on a map in order to to correct for feature motions due to solar rotation. Parameters ---------- im_map : sunpy.map.Map Processed SunPy magnetogram map. + delta_im_map : sunpy.map.Map + Processed SunPy magnetogram taken at time Δt before im_map. Returns ------- diff_map : sunpy.map.Map - Input map with differential rotation of 96mins. + delta_im_map differentially rotated to match im_map. """ - in_time = im_map.date - out_time = in_time + 96 * u.min - out_frame = Helioprojective(observer="earth", obstime=out_time, rsun=im_map.coordinate_frame.rsun) - - ############################################################################## - # Construct a WCS object for the output map. If one has an actual ``Map`` - # object at the desired output time (e.g., the actual AIA observation at the - # output time), one can use the WCS object from that ``Map`` object (e.g., - # ``mymap.wcs``) instead of constructing a custom WCS. - """So should I have a function that searches for a magnetogram at the desired time, - and also the one at time t - delta_t ?""" - - out_center = SkyCoord(0 * u.arcsec, 0 * u.arcsec, frame=out_frame) - header = make_fitswcs_header(im_map.data.shape, out_center, scale=u.Quantity(im_map.scale)) - out_wcs = WCS(header) with propagate_with_solar_surface(): - diff_map = im_map.reproject_to(out_wcs) + diff_map = delta_im_map.reproject_to(im_map.wcs) diff_map.data[~coordinate_is_on_solar_disk(all_coordinates_from_map(diff_map))] = np.nan diff_map.cmap.set_bad("k") diff --git a/smart/map_processing.py b/smart/map_processing.py index d1070eb..51fa57d 100644 --- a/smart/map_processing.py +++ b/smart/map_processing.py @@ -24,16 +24,22 @@ def map_processing(file): """ im_map = Map(file) - if im_map.meta["CROTA2"] >= 100: - data = np.flip(im_map.data, 1)[::-1] - im_map = Map(data, im_map.meta) + if im_map.meta["CROTA2"] != 0: + im_map = im_map.rotate(order=3) im_map.data[~coordinate_is_on_solar_disk(all_coordinates_from_map(im_map))] = np.nan im_map.cmap.set_bad("k") im_map.plot_settings["norm"] = colors.Normalize(vmin=-200, vmax=200) return im_map -def STL(im_map): +@u.quantity_input +def STL( + im_map, + thresh=100, + dilation_radius: u.arcsec = 9000 * u.arcsec, + sigma: u.arcsec = 29000 * u.arcsec, + min_size: u.arcsec = 9000000 * u.arcsec, +): """ We apply Smoothing, a noise Threshold, and an LOS correction, respectively, to the data. @@ -42,6 +48,14 @@ def STL(im_map): ---------- im_map : sunpy.map.Map Processed SunPy magnetogram map. + thresh : int, optional + Threshold value (in Gauss) to identify regions of interest (default is 100). + dilation_radius : int, optional + Radius of the disk for binary dilation (default is 9000 arcsecs). + sigma : int, optional + Standard deviation for Gaussian smoothinß (default is 29000 arcsecs). + min_size : int, optional + Minimum size of regions to keep in final mask (default is 9000000 arcsecs**2). Returns ------- @@ -54,20 +68,22 @@ def STL(im_map): """ - cosmap = ar_cosmap(im_map)[0] - cosmap_data = cosine_correction(im_map, cosmap)[0] + arcsec_to_pixel = 1800 * (im_map.scale[0] + im_map.scale[1]) + dilation_radius = (np.round(dilation_radius / arcsec_to_pixel)).value + sigma = (np.round(sigma / arcsec_to_pixel)).value + min_size = (np.round(min_size / arcsec_to_pixel)).value + + cosmap_data = cosine_correction(im_map)[0] - thresh = 100 negmask = cosmap_data < -thresh posmask = cosmap_data > thresh mask = negmask | posmask - dilated_mask = ski.morphology.binary_dilation(mask, disk(5)) - smoothed_data = ski.filters.gaussian(np.nan_to_num(cosmap_data) * ~dilated_mask, sigma=16) + dilated_mask = ski.morphology.binary_dilation(mask, disk(dilation_radius)) + smoothed_data = ski.filters.gaussian(np.nan_to_num(cosmap_data) * ~dilated_mask, sigma) smooth_map = Map(smoothed_data, im_map.meta) labels = ski.measure.label(dilated_mask) - min_size = 5000 label_sizes = np.bincount(labels.ravel()) mask_sizes = label_sizes > min_size mask_sizes[0] = 0 @@ -75,7 +91,7 @@ def STL(im_map): return smooth_map, filtered_labels, mask_sizes -def ar_cosmap(im_map): +def get_cosine_correction(im_map): """ Get the cosine map and off-limb pixel map using WCS. @@ -94,35 +110,35 @@ def ar_cosmap(im_map): Binary array where pixels on the disk are 1 and pixels off the disk are 0. """ - ## Take off an extra percent from the disk to get rid of limb effects + # Take off an extra percent from the disk to get rid of limb effects edge = 0.99 x, y = np.meshgrid(*[np.arange(v.value) for v in im_map.dimensions]) * u.pixel hp_coords = im_map.pixel_to_world(x, y) - xx = hp_coords.Tx.value - yy = hp_coords.Ty.value - d_radial = np.sqrt((xx**2.0) + (yy**2.0)) + xx = hp_coords.Tx.to(u.arcsec) + yy = hp_coords.Ty.to(u.arcsec) + d_radial = np.sqrt(xx**2 + yy**2) cos_cor = np.copy(d_radial) - cos_cor_ratio = cos_cor / im_map.meta["RSUN_OBS"] + cos_cor_ratio = cos_cor / im_map.rsun_obs cos_cor_ratio = np.clip(cos_cor_ratio, -1, 1) d_angular = np.arcsin(cos_cor_ratio) - cos_cor = 1.0 / np.cos(d_angular) + cos_cor = 1 / np.cos(d_angular) - off_disk = np.where(d_radial > (im_map.meta["RSUN_OBS"] * edge)) - cos_cor[off_disk] = 1.0 + off_disk = d_radial > (im_map.rsun_obs * edge) + cos_cor[off_disk] = 1 - off_limb = np.copy(d_radial) - off_disk_mask = np.where(d_radial >= (im_map.meta["RSUN_OBS"] * edge)) + off_limb = np.copy(d_radial.value) + off_disk_mask = np.where(d_radial >= (im_map.rsun_obs * edge)) off_limb[off_disk_mask] = 0.0 - on_disk_mask = np.where(d_radial < (im_map.meta["RSUN_OBS"] * edge)) + on_disk_mask = np.where(d_radial < (im_map.rsun_obs * edge)) off_limb[on_disk_mask] = 1.0 return cos_cor, d_angular, off_limb -def cosine_correction(im_map, cosmap): +def cosine_correction(im_map, cosmap=None): """ Perform magnetic field cosine correction. @@ -130,8 +146,9 @@ def cosine_correction(im_map, cosmap): ---------- inmap : sunpy.map.Map Processed SunPy magnetogram map. - cosmap : numpy.ndarray + cosmap : numpy.ndarray, optional An array of the cosine correction factors for each pixel. + If not provided, computed using get_cosine_correction. Returns ------- @@ -141,8 +158,14 @@ def cosine_correction(im_map, cosmap): The cosine correction factors, limited to the max value allowed. """ - angle_limit = np.arcsin(1.0 - im_map.meta["CDELT1"] / im_map.meta["RSUN_OBS"]) - cos_limit = 1.0 / np.cos(angle_limit) - cosmap_limit = np.where((cosmap) > cos_limit) - cosmap[cosmap_limit] = cos_limit - return im_map.data * cosmap, cosmap + if cosmap is None: + cosmap = get_cosine_correction(im_map)[0] + + scale = (im_map.scale[0] + im_map.scale[1]) / 2 + + angle_limit = np.arcsin(1 - (scale / im_map.rsun_obs).value) + cos_limit = 1 / np.cos(angle_limit) + cosmap = np.clip(cosmap, None, cos_limit) + + corrected_data = im_map.data * cosmap + return corrected_data, cosmap diff --git a/smart/tests/test_map_processing.py b/smart/tests/test_map_processing.py new file mode 100644 index 0000000..8c1910b --- /dev/null +++ b/smart/tests/test_map_processing.py @@ -0,0 +1,109 @@ +import numpy as np +import pytest + +import astropy.units as u + +from sunpy.map import Map, all_coordinates_from_map, coordinate_is_on_solar_disk +from sunpy.map.mapbase import GenericMap + + +def mag_sample(): + return "https://solmon.dias.ie/data/2024/06/06/HMI/fits/hmi.m_720s_nrt.20240606_230000_TAI.3.magnetogram.fits" + + +def mag_map_sample(): + from smart.map_processing import map_processing + + return map_processing( + "https://solmon.dias.ie/data/2024/06/06/HMI/fits/hmi.m_720s_nrt.20240606_230000_TAI.3.magnetogram.fits" + ) + + +def create_fake_map(value, shape=((4098, 4098))): + fake_data = np.ones(shape) * value + return Map(fake_data, mag_map_sample().meta) + + +@pytest.fixture +def test_map_processing(file): + from smart.map_processing import map_processing + + processed_map = map_processing(file) + + assert isinstance(processed_map, GenericMap), "Result is not a SunPy Map." + + coordinates = all_coordinates_from_map(processed_map) + on_solar_disk = coordinate_is_on_solar_disk(coordinates) + assert np.all(np.isnan(processed_map.data[~on_solar_disk])), "Off-disk NaN values not set correctly." + + +@pytest.fixture +def test_get_cosine_correction_shape(im_map): + from smart.map_processing import get_cosine_correction + + cos_cor, d_angular, off_limb = get_cosine_correction(im_map) + assert cos_cor.shape == im_map.data.shape, "cos_cor shape != im_map.data.shape" + assert d_angular.shape == im_map.data.shape, "d_angular shape != im_map.data.shape" + assert off_limb.shape == im_map.data.shape, "off_limb shape != im_map.data.shape" + + +@pytest.fixture +def test_get_cosine_correction_limits(im_map): + from smart.map_processing import get_cosine_correction + + cos_cor, d_angular, off_limb = get_cosine_correction(im_map) + + edge = 0.99 + x, y = np.meshgrid(*[np.arange(v.value) for v in im_map.dimensions]) * u.pixel + hp_coords = im_map.pixel_to_world(x, y) + xx = hp_coords.Tx.to(u.arcsec) + yy = hp_coords.Ty.to(u.arcsec) + d_radial = np.sqrt(xx**2 + yy**2) + + off_disk = d_radial >= (im_map.rsun_obs * edge) + on_disk = d_radial < (im_map.rsun_obs * edge) + + assert np.all(cos_cor >= 0), "cos_cor lower limits incorrect" + assert np.all(cos_cor <= 1 / np.cos(np.arcsin(edge))), "cos_cor upper limits incorrect" + + assert np.all(d_angular >= np.arcsin(-1) * u.rad), "d_angular lower limits incorrect" + assert np.all(d_angular <= np.arcsin(1) * u.rad), "d_angular upper limist incorrect" + + assert np.all(off_limb[off_disk] == 0), "not all off_disk values = 0" + assert np.all(off_limb[on_disk] == 1), "not all on_disk values = 1" + + +@pytest.fixture +def test_cosine_correction(): + from smart.map_processing import cosine_correction + + fake_data = np.ones((4098, 4098)) + fake_map = Map(fake_data, mag_map_sample().meta) + fake_cosmap = fake_data * 0.5 + + test_corrected_data, fake_cosmap = cosine_correction(fake_map, fake_cosmap) + assert np.all(test_corrected_data == 0.5), "corrected_data not being correctly calculated" + assert test_corrected_data.shape == fake_map.data.shape, "output data shape != original data shape" + + +""" +@pytest.fixture +def test_STL(): + from smart.map_processing import STL + + fake_map = create_fake_map(3) + smooth_map, filtered_labels, mask_sizes = STL( + fake_map, thresh=2, min_size=len(fake_map.data.flatten() + 1) + ) + + num_true = np.count_nonzero(filtered_labels == True) + + assert isinstance(smooth_map, type(fake_map)), "smooth_map no longer of type Map" + assert np.all(smooth_map.data == 0), "smooth_map.data not behaving as expected" + + assert filtered_labels.shape == fake_map.data.shape, "filtered_labels.shape != map.data.shape" + assert num_true == 0, "filtered_labels detected despite threshold" + + assert mask_sizes.shape[0] > 0, "no mask_sizes" + assert np.sum(mask_sizes) == 0, "no regions should have met the min_size requirement" +""" From 95c1b9ec7eae24dc1df1c1d576f31c42557feb5c Mon Sep 17 00:00:00 2001 From: David Connolly Date: Fri, 19 Jul 2024 15:08:41 +0100 Subject: [PATCH 4/6] IGM (indexed grown mask) --- smart/IGM.py | 116 +++++++++++++++++++++++++++++ smart/differential_rotation.py | 16 ++-- smart/map_processing.py | 83 +++++++++------------ smart/tests/test_map_processing.py | 42 ++++------- 4 files changed, 171 insertions(+), 86 deletions(-) create mode 100644 smart/IGM.py diff --git a/smart/IGM.py b/smart/IGM.py new file mode 100644 index 0000000..f2cf782 --- /dev/null +++ b/smart/IGM.py @@ -0,0 +1,116 @@ +import matplotlib.pyplot as plt +import numpy as np +import skimage as ski +from skimage.morphology import disk + +import astropy.units as u + +from sunpy.map import Map + +from smart.map_processing import STL + + +def IGM(im_map: Map, diff_map: Map, dilation_radius: u.Quantity[u.arcsec] = 2.5 * u.arcsec): + """ + Performing Indexing and Growing of the Mask (hence the name IGM). + + Transient features are removed by comparing the mask at time 't' and the mask differentially + rotated to time 't'. ARs are then assigned ascending integer values (starting from one) in + order of decreasing size. + + Parameters + ---------- + im_map : Map + Processed magnetogram map from time 't'. + diff_map : Map + Processed magnetogtam map from time 't - delta_t' differentially rotated to time t. + dilation_radius : int, optional + Radius of the disk for binary dilation (default is 2.5 arcsecs). + + Returns + ------- + sorted_labels : numpy.ndarray + Individual contiguous features are indexed by assigning ascending integer + values (beginning with one) in order of decreasing feature size. + + """ + arcsec_to_pixel = ((im_map.scale[0] + im_map.scale[1]) / 2) ** (-1) + dilation_radius = (np.round(dilation_radius * arcsec_to_pixel)).to_value(u.pix) + + filtered_labels = STL(im_map)[1] + filtered_labels_dt = STL(diff_map)[1] + + dilated_mask = ski.morphology.binary_dilation(filtered_labels, disk(dilation_radius)) + dilated_mask_dt = ski.morphology.binary_dilation(filtered_labels_dt, disk(dilation_radius)) + + transient_features = dilated_mask_dt & ~dilated_mask + final_mask = dilated_mask & ~transient_features + final_labels = ski.measure.label(final_mask) + + regions = ski.measure.regionprops(final_labels) + region_sizes = [(region.label, region.area) for region in regions] + + sorted_region_sizes = sorted(region_sizes, key=lambda x: x[1], reverse=True) + sorted_labels = np.zeros_like(final_labels) + for new_label, (old_label, _) in enumerate(sorted_region_sizes, start=1): + sorted_labels[final_labels == old_label] = new_label + + return sorted_labels + + +def plot_IGM(im_map: Map, diff_map: Map, contours=True, labels=True, figtext=True): + """ + Plotting the fully processed and segmented magnetogram with labels and AR contours optionally displayed. + + Parameters + ---------- + im_map : Map + Processed magnetogram map from time 't'. + diff_map : Map + Processed magnetogtam map from time 't - delta_t' differentially rotated to time t. + contours : bool, optional + If True, contours of the detected regions displayed on map (default is True). + labels : bool, optional + If True, labels with the region numbers will be overlaid on the regions (default is True). + figtext : bool, optional + If True, figtext with the total number of detected regions is displayed on the map (default is True). + + Returns + ------- + None. + + """ + sorted_labels = IGM(im_map, diff_map) + + fig = plt.figure() + ax = fig.add_subplot(projection=im_map) + im_map.plot(axes=ax) + + unique_labels = np.unique(sorted_labels) + unique_labels = unique_labels[unique_labels != 0] + + if contours: + for label_value in unique_labels: + region_mask = sorted_labels == label_value + ax.contour(region_mask, colors="purple", linewidths=1) + + if labels: + for label_value in unique_labels: + region_mask = sorted_labels == label_value + region = ski.measure.regionprops(region_mask.astype(int))[0] + centroid = region.centroid + ax.text( + centroid[1], + centroid[0], + str(label_value), + color="red", + fontsize=12, + weight="bold", + ha="center", + va="center", + ) + + if figtext: + plt.figtext(0.47, 0.2, f"Number of regions = {len(unique_labels)}", color="white") + + plt.show() diff --git a/smart/differential_rotation.py b/smart/differential_rotation.py index 830db19..665d2b1 100644 --- a/smart/differential_rotation.py +++ b/smart/differential_rotation.py @@ -1,25 +1,22 @@ -import numpy as np -from matplotlib import colors - from sunpy.coordinates import propagate_with_solar_surface -from sunpy.map import all_coordinates_from_map, coordinate_is_on_solar_disk +from sunpy.map import Map -def diff_rotation(im_map, delta_im_map): +def diff_rotation(im_map: Map, delta_im_map: Map): """ Performing differential rotation on a map in order to to correct for feature motions due to solar rotation. Parameters ---------- - im_map : sunpy.map.Map + im_map : Map Processed SunPy magnetogram map. - delta_im_map : sunpy.map.Map + delta_im_map : Map Processed SunPy magnetogram taken at time Δt before im_map. Returns ------- - diff_map : sunpy.map.Map + diff_map : Map delta_im_map differentially rotated to match im_map. """ @@ -27,7 +24,4 @@ def diff_rotation(im_map, delta_im_map): with propagate_with_solar_surface(): diff_map = delta_im_map.reproject_to(im_map.wcs) - diff_map.data[~coordinate_is_on_solar_disk(all_coordinates_from_map(diff_map))] = np.nan - diff_map.cmap.set_bad("k") - diff_map.plot_settings["norm"] = colors.Normalize(vmin=-200, vmax=200) return diff_map diff --git a/smart/map_processing.py b/smart/map_processing.py index 51fa57d..1610682 100644 --- a/smart/map_processing.py +++ b/smart/map_processing.py @@ -8,24 +8,21 @@ from sunpy.map import Map, all_coordinates_from_map, coordinate_is_on_solar_disk -def map_processing(file): +def map_threshold(im_map): """ Creating a map from a fits file and processing said map. Parameters ---------- - file : fits - fits file to turn into map. + file : Map + Unprocessed magnetogram map. Returns ------- - im_map : sunpy.map.Map - processed map created from fits file. + im_map : Map + Processed magnetogram map. """ - im_map = Map(file) - if im_map.meta["CROTA2"] != 0: - im_map = im_map.rotate(order=3) im_map.data[~coordinate_is_on_solar_disk(all_coordinates_from_map(im_map))] = np.nan im_map.cmap.set_bad("k") im_map.plot_settings["norm"] = colors.Normalize(vmin=-200, vmax=200) @@ -34,11 +31,11 @@ def map_processing(file): @u.quantity_input def STL( - im_map, - thresh=100, - dilation_radius: u.arcsec = 9000 * u.arcsec, - sigma: u.arcsec = 29000 * u.arcsec, - min_size: u.arcsec = 9000000 * u.arcsec, + im_map: Map, + thresh: u.Quantity[u.Gauss] = 100 * u.Gauss, + dilation_radius: u.Quantity[u.arcsec] = 2 * u.arcsec, + sigma: u.Quantity[u.arcsec] = 10 * u.arcsec, + min_size: u.Quantity[u.arcsec] = 2250 * u.arcsec, ): """ @@ -46,20 +43,20 @@ def STL( Parameters ---------- - im_map : sunpy.map.Map + im_map : Map Processed SunPy magnetogram map. thresh : int, optional - Threshold value (in Gauss) to identify regions of interest (default is 100). + Threshold value to identify regions of interest (default is 100 Gauss). dilation_radius : int, optional - Radius of the disk for binary dilation (default is 9000 arcsecs). + Radius of the disk for binary dilation (default is 2 arcsecs). sigma : int, optional - Standard deviation for Gaussian smoothinß (default is 29000 arcsecs). + Standard deviation for Gaussian smoothing (default is 10 arcsecs). min_size : int, optional - Minimum size of regions to keep in final mask (default is 9000000 arcsecs**2). + Minimum size of regions to keep in final mask (default is 2250 arcsecs**2). Returns ------- - smooth_map : sunpy.map.Map + smooth_map : Map Map after applying Gaussian smoothing. filtered_labels : numpy.ndarray 2D array with each pixel labelled. @@ -68,12 +65,12 @@ def STL( """ - arcsec_to_pixel = 1800 * (im_map.scale[0] + im_map.scale[1]) - dilation_radius = (np.round(dilation_radius / arcsec_to_pixel)).value - sigma = (np.round(sigma / arcsec_to_pixel)).value - min_size = (np.round(min_size / arcsec_to_pixel)).value + arcsec_to_pixel = ((im_map.scale[0] + im_map.scale[1]) / 2) ** (-1) + dilation_radius = (np.round(dilation_radius * arcsec_to_pixel)).to_value(u.pix) + sigma = (np.round(sigma * arcsec_to_pixel)).to_value(u.pix) + min_size = (np.round(min_size * arcsec_to_pixel)).to_value(u.pix) - cosmap_data = cosine_correction(im_map)[0] + cosmap_data = cosine_correction(im_map) negmask = cosmap_data < -thresh posmask = cosmap_data > thresh @@ -91,13 +88,13 @@ def STL( return smooth_map, filtered_labels, mask_sizes -def get_cosine_correction(im_map): +def get_cosine_correction(im_map: Map): """ Get the cosine map and off-limb pixel map using WCS. Parameters ---------- - im_map : sunpy.map.Map + im_map : Map Processed SunPy magnetogram map. Returns @@ -113,15 +110,12 @@ def get_cosine_correction(im_map): # Take off an extra percent from the disk to get rid of limb effects edge = 0.99 - x, y = np.meshgrid(*[np.arange(v.value) for v in im_map.dimensions]) * u.pixel - hp_coords = im_map.pixel_to_world(x, y) - xx = hp_coords.Tx.to(u.arcsec) - yy = hp_coords.Ty.to(u.arcsec) - d_radial = np.sqrt(xx**2 + yy**2) - - cos_cor = np.copy(d_radial) - cos_cor_ratio = cos_cor / im_map.rsun_obs + coordinates = all_coordinates_from_map(im_map) + x = coordinates.Tx.to(u.arcsec) + y = coordinates.Ty.to(u.arcsec) + d_radial = np.sqrt(x**2 + y**2) + cos_cor_ratio = d_radial / im_map.rsun_obs cos_cor_ratio = np.clip(cos_cor_ratio, -1, 1) d_angular = np.arcsin(cos_cor_ratio) cos_cor = 1 / np.cos(d_angular) @@ -129,22 +123,19 @@ def get_cosine_correction(im_map): off_disk = d_radial > (im_map.rsun_obs * edge) cos_cor[off_disk] = 1 - off_limb = np.copy(d_radial.value) - off_disk_mask = np.where(d_radial >= (im_map.rsun_obs * edge)) - off_limb[off_disk_mask] = 0.0 - on_disk_mask = np.where(d_radial < (im_map.rsun_obs * edge)) - off_limb[on_disk_mask] = 1.0 - + off_limb = np.zeros_like(d_radial.value) + off_limb[off_disk] = 0 + off_limb[~off_disk] = 1 return cos_cor, d_angular, off_limb -def cosine_correction(im_map, cosmap=None): +def cosine_correction(im_map: Map, cosmap=None): """ Perform magnetic field cosine correction. Parameters ---------- - inmap : sunpy.map.Map + inmap : Map Processed SunPy magnetogram map. cosmap : numpy.ndarray, optional An array of the cosine correction factors for each pixel. @@ -153,9 +144,7 @@ def cosine_correction(im_map, cosmap=None): Returns ------- corrected_data : numpy.ndarray - The magnetic field data after applying the cosine correction. - cosmap : numpy.ndarray - The cosine correction factors, limited to the max value allowed. + The magnetic field data after applying the cosine correction (units = Gauss). """ if cosmap is None: @@ -167,5 +156,5 @@ def cosine_correction(im_map, cosmap=None): cos_limit = 1 / np.cos(angle_limit) cosmap = np.clip(cosmap, None, cos_limit) - corrected_data = im_map.data * cosmap - return corrected_data, cosmap + corrected_data = im_map.data * cosmap * u.Gauss + return corrected_data diff --git a/smart/tests/test_map_processing.py b/smart/tests/test_map_processing.py index 8c1910b..77b99b8 100644 --- a/smart/tests/test_map_processing.py +++ b/smart/tests/test_map_processing.py @@ -6,29 +6,27 @@ from sunpy.map import Map, all_coordinates_from_map, coordinate_is_on_solar_disk from sunpy.map.mapbase import GenericMap +from smart.map_processing import cosine_correction, get_cosine_correction, map_threshold -def mag_sample(): + +@pytest.fixture +def hmi_nrt(): return "https://solmon.dias.ie/data/2024/06/06/HMI/fits/hmi.m_720s_nrt.20240606_230000_TAI.3.magnetogram.fits" +@pytest.fixture def mag_map_sample(): - from smart.map_processing import map_processing - - return map_processing( - "https://solmon.dias.ie/data/2024/06/06/HMI/fits/hmi.m_720s_nrt.20240606_230000_TAI.3.magnetogram.fits" - ) + return map_threshold(Map(hmi_nrt())) +@pytest.fixture def create_fake_map(value, shape=((4098, 4098))): fake_data = np.ones(shape) * value return Map(fake_data, mag_map_sample().meta) -@pytest.fixture -def test_map_processing(file): - from smart.map_processing import map_processing - - processed_map = map_processing(file) +def test_map_threshold(im_map): + processed_map = map_threshold(im_map) assert isinstance(processed_map, GenericMap), "Result is not a SunPy Map." @@ -37,28 +35,21 @@ def test_map_processing(file): assert np.all(np.isnan(processed_map.data[~on_solar_disk])), "Off-disk NaN values not set correctly." -@pytest.fixture def test_get_cosine_correction_shape(im_map): - from smart.map_processing import get_cosine_correction - cos_cor, d_angular, off_limb = get_cosine_correction(im_map) assert cos_cor.shape == im_map.data.shape, "cos_cor shape != im_map.data.shape" assert d_angular.shape == im_map.data.shape, "d_angular shape != im_map.data.shape" assert off_limb.shape == im_map.data.shape, "off_limb shape != im_map.data.shape" -@pytest.fixture def test_get_cosine_correction_limits(im_map): - from smart.map_processing import get_cosine_correction - cos_cor, d_angular, off_limb = get_cosine_correction(im_map) edge = 0.99 - x, y = np.meshgrid(*[np.arange(v.value) for v in im_map.dimensions]) * u.pixel - hp_coords = im_map.pixel_to_world(x, y) - xx = hp_coords.Tx.to(u.arcsec) - yy = hp_coords.Ty.to(u.arcsec) - d_radial = np.sqrt(xx**2 + yy**2) + coordinates = all_coordinates_from_map(im_map) + x = coordinates.Tx.to(u.arcsec) + y = coordinates.Ty.to(u.arcsec) + d_radial = np.sqrt(x**2 + y**2) off_disk = d_radial >= (im_map.rsun_obs * edge) on_disk = d_radial < (im_map.rsun_obs * edge) @@ -73,21 +64,16 @@ def test_get_cosine_correction_limits(im_map): assert np.all(off_limb[on_disk] == 1), "not all on_disk values = 1" -@pytest.fixture def test_cosine_correction(): - from smart.map_processing import cosine_correction - fake_data = np.ones((4098, 4098)) fake_map = Map(fake_data, mag_map_sample().meta) fake_cosmap = fake_data * 0.5 - test_corrected_data, fake_cosmap = cosine_correction(fake_map, fake_cosmap) - assert np.all(test_corrected_data == 0.5), "corrected_data not being correctly calculated" + test_corrected_data = cosine_correction(fake_map, fake_cosmap) assert test_corrected_data.shape == fake_map.data.shape, "output data shape != original data shape" """ -@pytest.fixture def test_STL(): from smart.map_processing import STL From dcbd083e24fcd7d28826db00e673fd0812af72b6 Mon Sep 17 00:00:00 2001 From: David Connolly Date: Tue, 30 Jul 2024 18:38:11 +0100 Subject: [PATCH 5/6] minor changes --- smart/{ => segmentation}/IGM.py | 23 +++++---- .../differential_rotation.py | 2 +- smart/{ => segmentation}/map_processing.py | 10 ++-- smart/tests/test_map_processing.py | 51 +++++++++++-------- 4 files changed, 47 insertions(+), 39 deletions(-) rename smart/{ => segmentation}/IGM.py (83%) rename smart/{ => segmentation}/differential_rotation.py (94%) rename smart/{ => segmentation}/map_processing.py (93%) diff --git a/smart/IGM.py b/smart/segmentation/IGM.py similarity index 83% rename from smart/IGM.py rename to smart/segmentation/IGM.py index f2cf782..c35d172 100644 --- a/smart/IGM.py +++ b/smart/segmentation/IGM.py @@ -7,12 +7,14 @@ from sunpy.map import Map -from smart.map_processing import STL +from smart.segmentation.map_processing import smooth_los_threshold -def IGM(im_map: Map, diff_map: Map, dilation_radius: u.Quantity[u.arcsec] = 2.5 * u.arcsec): +def index_and_grow_mask( + current_map: Map, previous_map: Map, dilation_radius: u.Quantity[u.arcsec] = 2.5 * u.arcsec +): """ - Performing Indexing and Growing of the Mask (hence the name IGM). + Performing indexing and growing of the Mask. Transient features are removed by comparing the mask at time 't' and the mask differentially rotated to time 't'. ARs are then assigned ascending integer values (starting from one) in @@ -20,9 +22,9 @@ def IGM(im_map: Map, diff_map: Map, dilation_radius: u.Quantity[u.arcsec] = 2.5 Parameters ---------- - im_map : Map + current_map : Map Processed magnetogram map from time 't'. - diff_map : Map + previous_map : Map Processed magnetogtam map from time 't - delta_t' differentially rotated to time t. dilation_radius : int, optional Radius of the disk for binary dilation (default is 2.5 arcsecs). @@ -34,11 +36,11 @@ def IGM(im_map: Map, diff_map: Map, dilation_radius: u.Quantity[u.arcsec] = 2.5 values (beginning with one) in order of decreasing feature size. """ - arcsec_to_pixel = ((im_map.scale[0] + im_map.scale[1]) / 2) ** (-1) - dilation_radius = (np.round(dilation_radius * arcsec_to_pixel)).to_value(u.pix) + pixel_size = (current_map.scale[0] + current_map.scale[1]) / 2 + dilation_radius = (np.round(dilation_radius / pixel_size)).to_value(u.pix) - filtered_labels = STL(im_map)[1] - filtered_labels_dt = STL(diff_map)[1] + filtered_labels = smooth_los_threshold(current_map)[1] + filtered_labels_dt = smooth_los_threshold(previous_map)[1] dilated_mask = ski.morphology.binary_dilation(filtered_labels, disk(dilation_radius)) dilated_mask_dt = ski.morphology.binary_dilation(filtered_labels_dt, disk(dilation_radius)) @@ -58,7 +60,7 @@ def IGM(im_map: Map, diff_map: Map, dilation_radius: u.Quantity[u.arcsec] = 2.5 return sorted_labels -def plot_IGM(im_map: Map, diff_map: Map, contours=True, labels=True, figtext=True): +def plot_indexed_grown_mask(im_map: Map, sorted_labels, contours=True, labels=True, figtext=True): """ Plotting the fully processed and segmented magnetogram with labels and AR contours optionally displayed. @@ -80,7 +82,6 @@ def plot_IGM(im_map: Map, diff_map: Map, contours=True, labels=True, figtext=Tru None. """ - sorted_labels = IGM(im_map, diff_map) fig = plt.figure() ax = fig.add_subplot(projection=im_map) diff --git a/smart/differential_rotation.py b/smart/segmentation/differential_rotation.py similarity index 94% rename from smart/differential_rotation.py rename to smart/segmentation/differential_rotation.py index 665d2b1..01bec52 100644 --- a/smart/differential_rotation.py +++ b/smart/segmentation/differential_rotation.py @@ -24,4 +24,4 @@ def diff_rotation(im_map: Map, delta_im_map: Map): with propagate_with_solar_surface(): diff_map = delta_im_map.reproject_to(im_map.wcs) - return diff_map + return diff_map, delta_im_map.date diff --git a/smart/map_processing.py b/smart/segmentation/map_processing.py similarity index 93% rename from smart/map_processing.py rename to smart/segmentation/map_processing.py index 1610682..810fb0c 100644 --- a/smart/map_processing.py +++ b/smart/segmentation/map_processing.py @@ -30,7 +30,7 @@ def map_threshold(im_map): @u.quantity_input -def STL( +def smooth_los_threshold( im_map: Map, thresh: u.Quantity[u.Gauss] = 100 * u.Gauss, dilation_radius: u.Quantity[u.arcsec] = 2 * u.arcsec, @@ -65,10 +65,10 @@ def STL( """ - arcsec_to_pixel = ((im_map.scale[0] + im_map.scale[1]) / 2) ** (-1) - dilation_radius = (np.round(dilation_radius * arcsec_to_pixel)).to_value(u.pix) - sigma = (np.round(sigma * arcsec_to_pixel)).to_value(u.pix) - min_size = (np.round(min_size * arcsec_to_pixel)).to_value(u.pix) + pixel_size = (im_map.scale[0] + im_map.scale[1]) / 2 + dilation_radius = (np.round(dilation_radius / pixel_size)).to_value(u.pix) + sigma = (np.round(sigma / pixel_size)).to_value(u.pix) + min_size = (np.round(min_size / pixel_size)).to_value(u.pix) cosmap_data = cosine_correction(im_map) diff --git a/smart/tests/test_map_processing.py b/smart/tests/test_map_processing.py index 77b99b8..d3df159 100644 --- a/smart/tests/test_map_processing.py +++ b/smart/tests/test_map_processing.py @@ -6,7 +6,12 @@ from sunpy.map import Map, all_coordinates_from_map, coordinate_is_on_solar_disk from sunpy.map.mapbase import GenericMap -from smart.map_processing import cosine_correction, get_cosine_correction, map_threshold +from smart.segmentation.map_processing import ( + cosine_correction, + get_cosine_correction, + map_threshold, + smooth_los_threshold, +) @pytest.fixture @@ -64,32 +69,34 @@ def test_get_cosine_correction_limits(im_map): assert np.all(off_limb[on_disk] == 1), "not all on_disk values = 1" -def test_cosine_correction(): - fake_data = np.ones((4098, 4098)) - fake_map = Map(fake_data, mag_map_sample().meta) - fake_cosmap = fake_data * 0.5 +def test_cosine_correction(im_map): + coordinates = all_coordinates_from_map(im_map) + + los_radial = np.cos(coordinates.Tx.to(u.rad)) * np.cos(coordinates.Ty.to(u.rad)) + im_map.data[:, :] = los_radial - test_corrected_data = cosine_correction(fake_map, fake_cosmap) - assert test_corrected_data.shape == fake_map.data.shape, "output data shape != original data shape" + fake_map = Map(los_radial, im_map.meta) + fake_cosmap = np.ones((len(los_radial), len(los_radial))) + cosmap, corrected_data = cosine_correction(fake_map, fake_cosmap) + corrected_data_value = corrected_data.to_value(u.Gauss) + assert np.allclose(corrected_data_value, 1, atol=1e-4), "cosine corrected data not behaving as expected" -""" -def test_STL(): - from smart.map_processing import STL - fake_map = create_fake_map(3) - smooth_map, filtered_labels, mask_sizes = STL( - fake_map, thresh=2, min_size=len(fake_map.data.flatten() + 1) - ) +def test_smooth_los_threshold(): + under_thresh = create_fake_map(1) + over_thresh = create_fake_map(1000) - num_true = np.count_nonzero(filtered_labels == True) + smooth_under, fl_under, mask_under = smooth_los_threshold(under_thresh, thresh=500 * u.Gauss) + smooth_over, fl_over, mask_over = smooth_los_threshold(over_thresh, thresh=500 * u.Gauss) - assert isinstance(smooth_map, type(fake_map)), "smooth_map no longer of type Map" - assert np.all(smooth_map.data == 0), "smooth_map.data not behaving as expected" + assert isinstance(smooth_under, type(under_thresh)), "smooth_under is no longer a Map" + assert isinstance(smooth_over, type(over_thresh)), "smooth_over is no longer a Map" - assert filtered_labels.shape == fake_map.data.shape, "filtered_labels.shape != map.data.shape" - assert num_true == 0, "filtered_labels detected despite threshold" + assert np.sum(fl_under) == 0, "fl should all be False when all data is below threshold" + assert np.sum(fl_over) == len( + fl_over.flatten() + ), "fl should all be True when all data is above threshold " - assert mask_sizes.shape[0] > 0, "no mask_sizes" - assert np.sum(mask_sizes) == 0, "no regions should have met the min_size requirement" -""" + assert np.sum(mask_under) == 0, "no regions should have been detected in 'under_thresh'" + assert np.sum(mask_over) > 0, "background region should have been detected in 'over thresh'" From 2cecf18b21000909e03ff311334a3be8403c84ea Mon Sep 17 00:00:00 2001 From: David Connolly Date: Tue, 20 Aug 2024 14:24:41 +0100 Subject: [PATCH 6/6] PSL first funcs WIP --- smart/characterization/PSL.py | 34 ++++++++++++++++++++++++++++++++++ 1 file changed, 34 insertions(+) create mode 100644 smart/characterization/PSL.py diff --git a/smart/characterization/PSL.py b/smart/characterization/PSL.py new file mode 100644 index 0000000..2a6abf4 --- /dev/null +++ b/smart/characterization/PSL.py @@ -0,0 +1,34 @@ +import numpy as np +import skimage as ski +from skimage.morphology import disk + +import astropy.units as u + + +@u.quantity_input +def separate_polarities(im_map, feature_map, dilation_radius: u.Quantity[u.arcsec] = 2 * u.arcsec): + """ + Separate feature masks into a negative mask and positive mask, and dilating both masks. + + Parameters + ---------- + im_map : Map + Processed SunPy magnetogram map. + feature_map : + """ + posmask = (feature_map > 0).astype(int) + negmask = (feature_map < 0).astype(int) + + arcsec_to_pixel = ((im_map.scale[0] + im_map.scale[1]) / 2) ** (-1) + dilation_radius = (np.round(dilation_radius * arcsec_to_pixel)).to_value(u.pix) + + dilated_posmask = ski.morphology.binary_dilation(posmask, disk(dilation_radius)) + dilated_negmask = ski.morphology.binary_dilation(negmask, disk(dilation_radius)) + return dilated_posmask, dilated_negmask + + +def PSL_length(dilated_posmask, dilated_negmask): + PSL_mask = ((dilated_posmask + dilated_negmask) > 1).astype(int) + PSL_thinmask = ski.morphology.thin(PSL_mask) + PSL_length = np.sum(PSL_thinmask) + return PSL_length