diff --git a/queens/data_processors/__init__.py b/queens/data_processors/__init__.py
index 6489b6dea..9d3004891 100644
--- a/queens/data_processors/__init__.py
+++ b/queens/data_processors/__init__.py
@@ -17,18 +17,16 @@
Modules for extracting and processing data from simulation output files.
"""
-from queens.data_processors.csv import Csv
-from queens.data_processors.ensight import Ensight
-from queens.data_processors.ensight_interface import EnsightInterfaceDiscrepancy
-from queens.data_processors.numpy import Numpy
-from queens.data_processors.pvd import Pvd
-from queens.data_processors.txt import Txt
+from queens.data_processors.csv_file import CsvFile
+from queens.data_processors.ensight_file import EnsightFile
+from queens.data_processors.numpy_file import NumpyFile
+from queens.data_processors.pvd_file import PvdFile
+from queens.data_processors.txt_file import TxtFile
VALID_TYPES = {
- "csv": Csv,
- "ensight": Ensight,
- "ensight_interface_discrepancy": EnsightInterfaceDiscrepancy,
- "numpy": Numpy,
- "pvd": Pvd,
- "txt": Txt,
+ "csv": CsvFile,
+ "ensight": EnsightFile,
+ "numpy": NumpyFile,
+ "pvd": PvdFile,
+ "txt": TxtFile,
}
diff --git a/queens/data_processors/data_processor.py b/queens/data_processors/_data_processor.py
similarity index 100%
rename from queens/data_processors/data_processor.py
rename to queens/data_processors/_data_processor.py
diff --git a/queens/data_processors/csv.py b/queens/data_processors/csv_file.py
similarity index 99%
rename from queens/data_processors/csv.py
rename to queens/data_processors/csv_file.py
index 2b691d485..08e7165d9 100644
--- a/queens/data_processors/csv.py
+++ b/queens/data_processors/csv_file.py
@@ -19,14 +19,14 @@
import numpy as np
import pandas as pd
-from queens.data_processors.data_processor import DataProcessor
+from queens.data_processors._data_processor import DataProcessor
from queens.utils.logger_settings import log_init_args
from queens.utils.valid_options import get_option
_logger = logging.getLogger(__name__)
-class Csv(DataProcessor):
+class CsvFile(DataProcessor):
"""Class for extracting data from csv files.
Attributes:
@@ -100,7 +100,7 @@ def __init__(
The paths can contain regex expressions.
Returns:
- Instance of Csv class
+ Instance of CsvFile class
"""
super().__init__(
file_name_identifier=file_name_identifier,
diff --git a/queens/data_processors/ensight.py b/queens/data_processors/ensight_file.py
similarity index 98%
rename from queens/data_processors/ensight.py
rename to queens/data_processors/ensight_file.py
index 44cdfa9ec..2e03dde03 100644
--- a/queens/data_processors/ensight.py
+++ b/queens/data_processors/ensight_file.py
@@ -20,13 +20,13 @@
import vtk
from vtkmodules.util.numpy_support import numpy_to_vtk, vtk_to_numpy
-from queens.data_processors.data_processor import DataProcessor
+from queens.data_processors._data_processor import DataProcessor
from queens.utils.logger_settings import log_init_args
_logger = logging.getLogger(__name__)
-class Ensight(DataProcessor):
+class EnsightFile(DataProcessor):
"""Class for data-processing ensight output.
Attributes:
@@ -83,9 +83,6 @@ def __init__(
The paths can contain regex expressions.
external_geometry (obj): QUEENS external geometry object
experimental_data_reader (obj): Experimental data reader object
-
- Returns:
- Instance of Ensight class (obj)
"""
super().__init__(
file_name_identifier=file_name_identifier,
@@ -313,7 +310,7 @@ def _get_data_from_experimental_coordinates(self, vtk_data_obj, time_value):
experimental_coordinates_for_snapshot, axis=1
)
# interpolate vtk solution to experimental coordinates
- interpolated_data = Ensight._interpolate_vtk(
+ interpolated_data = EnsightFile._interpolate_vtk(
experimental_coordinates_for_snapshot,
vtk_data_obj,
self.vtk_array_type,
@@ -366,7 +363,7 @@ def _get_data_from_geometric_set(
]
# interpolate vtk solution to experimental coordinates
- interpolated_data = Ensight._interpolate_vtk(
+ interpolated_data = EnsightFile._interpolate_vtk(
geometric_set_coordinates,
vtk_data_obj,
self.vtk_array_type,
diff --git a/queens/data_processors/ensight_interface.py b/queens/data_processors/ensight_interface.py
deleted file mode 100644
index 53ff47dd2..000000000
--- a/queens/data_processors/ensight_interface.py
+++ /dev/null
@@ -1,507 +0,0 @@
-#
-# SPDX-License-Identifier: LGPL-3.0-or-later
-# Copyright (c) 2024-2025, QUEENS contributors.
-#
-# This file is part of QUEENS.
-#
-# QUEENS is free software: you can redistribute it and/or modify it under the terms of the GNU
-# Lesser General Public License as published by the Free Software Foundation, either version 3 of
-# the License, or (at your option) any later version. QUEENS is distributed in the hope that it will
-# be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
-# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You
-# should have received a copy of the GNU Lesser General Public License along with QUEENS. If not,
-# see .
-#
-"""Data processor module for vtk ensight boundary data."""
-
-import re
-from pathlib import Path
-
-import numpy as np
-import vtk
-from vtkmodules.util.numpy_support import vtk_to_numpy
-
-from queens.data_processors.data_processor import DataProcessor
-from queens.utils.logger_settings import log_init_args
-
-
-class EnsightInterfaceDiscrepancy(DataProcessor):
- """Discrepancy measure for boundaries and shapes.
-
- *data_processor* class uses full ensight result in vtk to measure distance of
- surface from simulations to experiment.
-
- Attributes:
- time_tol (float): Time tolerance for given reference time points.
- visualization_bool (bool): Boolean for vtk visualization control.
- displacement_fields (str): String with exact field names for displacement to apply.
- problem_dimension (string): String to determine problems in spatial dimension.
- experimental_ref_data_lst (list): Experimental reference data to which the
- discrepancy measure is computed.
- """
-
- @log_init_args
- def __init__(
- self,
- file_name_identifier=None,
- file_options_dict=None,
- files_to_be_deleted_regex_lst=None,
- ):
- """Initialize ensight_interface class.
-
- Args:
- data_processor_name (str): Name of the data processor.
- file_name_identifier (str): Identifier of file name. The file prefix can contain regex
- expression and subdirectories.
- file_options_dict (dict): Dictionary with read-in options for the file:
- - path_to_ref_data (str): Path to experimental reference data to which the
- discrepancy measure is computed.
- - time_tol (float): time tolerance for given reference time points
- - visualization (bool): boolean for vtk visualization control
- - displacement_fields (str): String with exact field names for displacement to apply
- - problem_dimension (string): string to determine problems spatial dimension
- files_to_be_deleted_regex_lst (lst): List with paths to files that should be deleted.
- The paths can contain regex expressions.
- """
- super().__init__(
- file_name_identifier=file_name_identifier,
- file_options_dict=file_options_dict,
- files_to_be_deleted_regex_lst=files_to_be_deleted_regex_lst,
- )
-
- path_ref_data_str = file_options_dict.get("path_to_ref_data")
- if not path_ref_data_str:
- raise ValueError(
- "You must provide the option 'path_to_ref_data' within the 'file_options_dict' "
- f"in '{self.__class__.__name__}'. Abort ..."
- )
- path_ref_data = Path(path_ref_data_str)
- experimental_reference_data = self.read_monitorfile(path_ref_data)
-
- time_tol = file_options_dict.get("time_tol")
- if not time_tol:
- raise ValueError(
- "You must provide the option 'time_tol' within the 'file_options_dict' "
- f"in '{self.__class__.__name__}'. Abort ..."
- )
-
- visualization_bool = file_options_dict.get("visualization", False)
- if not isinstance(visualization_bool, bool):
- raise TypeError(
- "The option 'visualization' must be of type 'bool' "
- f"but you provided type {type(visualization_bool)}. Abort..."
- )
-
- displacement_fields = file_options_dict.get("displacement_fields", ["displacement"])
- if not isinstance(displacement_fields, list):
- raise TypeError(
- "The option 'displacement_fields' must be of type 'list' "
- f"but you provided type {type(displacement_fields)}. Abort..."
- )
-
- problem_dimension = file_options_dict.get("problem_dimension", "2d")
- if not isinstance(problem_dimension, str):
- raise TypeError(
- "The option 'problem_dimension' must be of type 'str' "
- f"but you provided type {type(problem_dimension)}. Abort..."
- )
-
- self.time_tol = time_tol
- self.visualization_bool = visualization_bool
- self.displacement_fields = displacement_fields
- self.problem_dimension = problem_dimension
- self.experimental_ref_data_lst = experimental_reference_data
-
- @staticmethod
- def read_monitorfile(path_to_experimental_reference_data):
- """Read Monitor file.
-
- The Monitor File contains measurements from the
- experiments.
-
- Args:
- path_to_experimental_reference_data (path obj):
- Path to experimental reference data
-
- Returns:
- monfile_data (list): Data from monitor file in numbers
- """
- with open(path_to_experimental_reference_data, encoding="utf-8") as my_file:
- lines = my_file.readlines()
- i = 0
- npoints = 0
- steps = 0
- # lines specifying number of spatial dimensions and dimension ids
- npoint_lines = []
- # measurements for all points in different time steps
- steps_lines = []
- # sort lines into npoint_lines and steps_lines
- for line in lines:
- if line.startswith("#"):
- continue
- line = line.strip()
- if line.startswith("steps"):
- firstline = line
- steps = re.findall("^(?:steps )(.+)(?= npoints)", firstline, re.M)
- steps = int(steps[0])
- npoints = re.findall("^(?:steps )(?:.+)?(?: npoints )(.+)", firstline, re.M)
- npoints = int(npoints[0])
- continue
- if i < npoints:
- npoint_lines.append(line.split())
- i += 1
- continue
- if i - npoints - 1 < steps:
- steps_lines.append(line.split())
-
- if npoints == 0 or steps == 0:
- raise ValueError(
- "read_monitorfile did not find useful content. Monitor format is probably wrong"
- )
-
- # read numeric content from file data
- npoint_lines = [[int(ii) for ii in i] for i in npoint_lines]
- steps_lines = [[float(ii) for ii in i] for i in steps_lines]
-
- # prefill monfile_data of adequate size with zeros
- # monfile_data has dimensions
- # [number of timesteps][2][number of points][2][3dim]
- # it contains pairs of points on the interface and in the domain (for distance
- # in prescribed direction) measured in experiment
- monfile_data = []
- for i in steps_lines:
- monfile_data.append(
- [[0.0e0], [[[0, 0, 0] for j in range(0, 2)] for k in range(0, npoints)]]
- )
-
- # for all npoint_lines read according data from steps_lines to monfile_data
- # loop over time steps
-
- for i, steps_line in enumerate(steps_lines):
- k = 1
- # save time value for time step
- monfile_data[i][0] = steps_line[0]
- # loop over points
- for ii, npoint_line in enumerate(npoint_lines):
- for x in range(0, 2):
- for iii in range(0, npoint_line[0]):
- monfile_data[i][1][ii][x][npoint_line[iii + 1]] = steps_line[k]
- k += 1
- return monfile_data
-
- def get_raw_data_from_file(self, file_path):
- """Read-in EnSight file using vtkGenericEnSightReader.
-
- Args:
- file_path (str): Actual path to the file of interest.
-
- Returns:
- raw_data (obj): Raw data from file.
- """
- raw_data = vtk.vtkGenericEnSightReader()
- raw_data.SetCaseFileName(file_path)
- raw_data.ReadAllVariablesOn()
- raw_data.Update()
- return raw_data
-
- def filter_and_manipulate_raw_data(self, raw_data):
- """Get deformed boundary from vtk.
-
- Create vtk representation of deformed external_geometry_obj and
- evaluate surface distance measurement for every given time step from the
- experiment.
-
- Args:
- raw_data (obj): Raw data from file.
-
- Returns:
- residual (list): Full residual from this data_processor class
- """
- residual_distance_lst = []
- points = vtk.vtkPoints()
- vertices = vtk.vtkCellArray()
-
- for current_step_experimental_data in self.experimental_ref_data_lst:
- grid = self.deformed_grid(raw_data, current_step_experimental_data[0])
- geo = vtk.vtkGeometryFilter()
- geo.SetInputData(grid)
- geo.Update()
- geometry_output = geo.GetOutput()
- outline_out, outline_data = self._get_dim_dependent_vtk_output(geometry_output)
-
- for measured_point_pair in current_step_experimental_data[1]:
- point_vector = self.stretch_vector(
- measured_point_pair[0], measured_point_pair[1], 10
- )
- intersection_points = self._get_intersection_points(
- outline_data, outline_out, point_vector
- )
- distance = self.compute_distance(intersection_points, measured_point_pair)
- residual_distance_lst.append(distance)
-
- self._visualize_intermediate_discrepancy_measure(
- points, vertices, intersection_points, point_vector
- )
-
- self._visualize_final_discrepancy_measure(outline_out, points, vertices)
-
- return residual_distance_lst
-
- def _get_intersection_points(self, outline_data, outline_out, point_vector):
- """Get intersection points."""
- counter = 0
- intersection_points_lst = []
-
- while counter < len(outline_data):
- numpoints = outline_data.item(counter)
- locpointids = outline_data[counter + 1 : counter + 1 + numpoints]
-
- locations = []
- for idx in np.nditer(locpointids):
- x = [0, 0, 0]
- outline_out.GetPoint(idx, x)
- locations.append(x)
-
- local_points = vtk.vtkPoints()
- for location in locations:
- local_points.InsertNextPoint(location)
-
- local_element = self._get_local_element(locations)
- local_element.Initialize(len(locations), local_points)
-
- intersection_point = [0, 0, 0]
- pcoords = [0, 0, 0]
-
- # key line of the algorithm: line intersection
- intersectionfound = local_element.IntersectWithLine(
- point_vector[0],
- point_vector[1],
- 1e-12,
- vtk.reference(0),
- intersection_point,
- pcoords,
- vtk.reference(0),
- )
-
- if intersectionfound:
- intersection_points_lst.append(intersection_point)
-
- counter += numpoints
- counter += 1
-
- return intersection_points_lst
-
- def _get_local_element(self, locations):
- """Get the local element based on input dimension."""
- if len(locations) == 2:
- local_element = vtk.vtkLine()
- elif len(locations) == 3:
- local_element = vtk.vtkTriangle()
- elif len(locations) == 4:
- local_element = vtk.vtkQuad()
- else:
- raise ValueError("Unknown local_element type for structure surface discretization.")
- return local_element
-
- def _get_dim_dependent_vtk_output(self, geoout):
- """Return the vtk output dependent of problem dimension."""
- if self.problem_dimension == "2d":
- outline = vtk.vtkFeatureEdges()
- outline.SetInputData(geoout)
- outline.Update()
- outlineout = outline.GetOutput()
-
- outlines = outlineout.GetLines()
- outline_data_vtk = outlines.GetData()
-
- elif self.problem_dimension == "3d":
- outlineout = geoout
- outlines = outlineout.GetPolys()
- outline_data_vtk = outlines.GetData()
-
- else:
- raise KeyError(
- "The problem dimension must be either '2d' or '3d' "
- f"but you provided {self.problem_dimension}! Abort..."
- )
-
- outline_data = vtk_to_numpy(outline_data_vtk)
- return outlineout, outline_data
-
- def _visualize_intermediate_discrepancy_measure(
- self, points, vertices, intersectionpoints, point_vector
- ):
- """Visualize intermediate discrepancy measure in vtk."""
- if self.visualization_bool:
- for j in point_vector:
- x = points.InsertNextPoint(j)
- vertices.InsertNextCell(1)
- vertices.InsertCellPoint(x)
-
- for idx, j in enumerate(intersectionpoints):
- x = points.InsertNextPoint(j)
- vertices.InsertNextCell(idx + 1)
- vertices.InsertCellPoint(x)
-
- def _visualize_final_discrepancy_measure(self, outlineout, points, vertices):
- """Visualize the final discrepancy measure in vtk."""
- if self.visualization_bool:
- colors = vtk.vtkNamedColors()
- renderer = vtk.vtkRenderer()
- ren_win = vtk.vtkRenderWindow()
- ren_win.AddRenderer(renderer)
- iren = vtk.vtkRenderWindowInteractor()
- iren.SetRenderWindow(ren_win)
-
- pointdata = vtk.vtkPolyData()
-
- pointdata.SetPoints(points)
- pointdata.SetVerts(vertices)
-
- ugrid_mapper = vtk.vtkDataSetMapper()
- ugrid_mapper.SetInputData(outlineout)
-
- point_mapper = vtk.vtkPolyDataMapper()
-
- point_mapper.SetInputData(pointdata)
- point_actor = vtk.vtkActor()
- point_actor.SetMapper(point_mapper)
- point_actor.GetProperty().SetColor([0.0, 0.0, 1.0])
- point_actor.GetProperty().SetPointSize(10)
- point_actor.GetProperty().SetRenderPointsAsSpheres(True)
-
- ugrid_actor = vtk.vtkActor()
- ugrid_actor.SetMapper(ugrid_mapper)
- ugrid_actor.GetProperty().SetColor(colors.GetColor3d("Peacock"))
- ugrid_actor.GetProperty().EdgeVisibilityOn()
-
- renderer.AddActor(ugrid_actor)
- renderer.AddActor(point_actor)
- renderer.SetBackground(colors.GetColor3d("Beige"))
-
- renderer.ResetCamera()
- renderer.GetActiveCamera().Elevation(1.0)
- renderer.GetActiveCamera().Azimuth(0.01)
- renderer.GetActiveCamera().Dolly(0)
-
- ren_win.SetSize(640, 480)
-
- # Generate viewer
- ren_win.Render()
- iren.Start()
-
- def stretch_vector(self, vec1, vec2, scalar):
- """Extend a vector by scalar factor on both ends.
-
- Args:
- vec1 (list): root point coordinates
- vec2 (list): directional point coordinates
- scalar (float): scalar multiplier
-
- Returns:
- vec (list): vector from modified root to modified direction point
- """
- vec = [[], []]
-
- for vec1_ele, vec2_ele in zip(vec1, vec2):
- vec[0].append(vec1_ele - scalar * (vec2_ele - vec1_ele))
- vec[1].append(vec2_ele + scalar * (vec2_ele - vec1_ele))
-
- return vec
-
- def compute_distance(self, intersection_points, measured_points):
- """Find the furthest point for a set of intersection points.
-
- Args:
- intersection_points (list): intersection point coordinates
- measured_points (list): pair of points from monitor file
-
- Returns:
- distance (float): signed distance between root point and furthest outward
- intersection point; positive if in positive direction from root
- """
- distance = np.inf
-
- np1m = np.array(measured_points[0])
- np2m = np.array(measured_points[1])
-
- for p in intersection_points:
- npp = np.array(p)
- dist = np.linalg.norm(p - np1m, ord=2)
- if np.dot(np2m - np1m, npp - np1m) < 0:
- dist *= -1
- distance = min(distance, dist)
-
- return distance
-
- def deformed_grid(self, raw_data, time):
- """Read deformed grid from Ensight file at specified time.
-
- Initially, the undeformed grid is read from the Ensight file
- Afterward, *warpbyvector* applies the displacement of *structure* field at time *time*
- such that the final result is the deformed grid at the specified time.
-
- Args:
- raw_data (obj): Raw data from file
- time (float): Time value for data processing
-
- Returns:
- deformed_grid (vtkUnstructuredGrid): Deformed grid for given time
- """
- time_steps_in_ensight = raw_data.GetTimeSets()
-
- times_iter = time_steps_in_ensight.NewIterator()
- times_iter.GoToFirstItem()
-
- steps = np.array([])
-
- while not times_iter.IsDoneWithTraversal():
- curr = times_iter.GetCurrentObject()
- steps = np.append(steps, vtk_to_numpy(curr))
- times_iter.GoToNextItem()
-
- steps = np.unique(steps)
- idx = np.where(abs(steps - time) < self.time_tol)
- ensight_time = steps[idx]
-
- if len(ensight_time) > 1:
- raise ValueError(
- "point in time from *.monitor file used with time_tol is not unique in results"
- )
- if len(ensight_time) == 0:
- raise ValueError(
- "point in time from *.monitor file used with time_tol not existing in results"
- )
-
- raw_data.SetTimeValue(ensight_time)
- raw_data.Update()
-
- output = raw_data.GetOutput()
- number_of_blocks = output.GetNumberOfBlocks()
- if number_of_blocks != 1:
- raise ValueError(
- "ensight reader output has more or less than one block. This is not expected."
- "Investigate your data!"
- )
- block = output.GetBlock(0)
-
- block.GetPointData().SetActiveVectors(self.displacement_fields[0])
-
- vtk_warp_vector = vtk.vtkWarpVector()
- vtk_warp_vector.SetScaleFactor(1.0)
- vtk_warp_vector.SetInputData(block)
- vtk_warp_vector.Update()
- if len(self.displacement_fields) > 1:
- for i, field in enumerate(self.displacement_fields):
- if i > 0:
- second_block = vtk_warp_vector.GetOutput()
- second_block.GetPointData().SetActiveVectors(field)
- wvb = vtk.vtkWarpVector()
- wvb.SetScaleFactor(1.0)
- wvb.SetInputData(second_block)
- wvb.Update()
- vtk_warp_vector = wvb
-
- deformed_grid = vtk_warp_vector.GetUnstructuredGridOutput()
-
- return deformed_grid
diff --git a/queens/data_processors/numpy.py b/queens/data_processors/numpy_file.py
similarity index 96%
rename from queens/data_processors/numpy.py
rename to queens/data_processors/numpy_file.py
index 2dab1a262..62b64ade1 100644
--- a/queens/data_processors/numpy.py
+++ b/queens/data_processors/numpy_file.py
@@ -18,13 +18,13 @@
import numpy as np
-from queens.data_processors.data_processor import DataProcessor
+from queens.data_processors._data_processor import DataProcessor
from queens.utils.logger_settings import log_init_args
_logger = logging.getLogger(__name__)
-class Numpy(DataProcessor):
+class NumpyFile(DataProcessor):
"""Class for extracting data from numpy binaries."""
@log_init_args
@@ -43,9 +43,6 @@ def __init__(
file_options_dict (dict): Dictionary with read-in options for the file
files_to_be_deleted_regex_lst (lst): List with paths to files that should be deleted.
The paths can contain regex expressions.
-
- Returns:
- Instance of Numpy class
"""
super().__init__(
file_name_identifier=file_name_identifier,
diff --git a/queens/data_processors/pvd.py b/queens/data_processors/pvd_file.py
similarity index 96%
rename from queens/data_processors/pvd.py
rename to queens/data_processors/pvd_file.py
index 0243a104f..b48009b94 100644
--- a/queens/data_processors/pvd.py
+++ b/queens/data_processors/pvd_file.py
@@ -19,13 +19,13 @@
import numpy as np
import pyvista as pv
-from queens.data_processors.data_processor import DataProcessor
+from queens.data_processors._data_processor import DataProcessor
from queens.utils.logger_settings import log_init_args
_logger = logging.getLogger(__name__)
-class Pvd(DataProcessor):
+class PvdFile(DataProcessor):
"""Class for extracting data from pvd.
Attributes:
@@ -60,9 +60,6 @@ def __init__(
block (int, optional): Considered block of MultiBlock data set (first block by default)
point_data (bool, optional): Whether to extract point data (True) or cell data (False).
Defaults to point data.
-
- Returns:
- Instance of Pvd class
"""
super().__init__(
file_name_identifier=file_name_identifier,
diff --git a/queens/data_processors/txt.py b/queens/data_processors/txt_file.py
similarity index 98%
rename from queens/data_processors/txt.py
rename to queens/data_processors/txt_file.py
index fbf6ea8bc..832fa07a9 100644
--- a/queens/data_processors/txt.py
+++ b/queens/data_processors/txt_file.py
@@ -18,13 +18,13 @@
import re
from pathlib import Path
-from queens.data_processors.data_processor import DataProcessor
+from queens.data_processors._data_processor import DataProcessor
from queens.utils.logger_settings import log_init_args
_logger = logging.getLogger(__name__)
-class Txt(DataProcessor):
+class TxtFile(DataProcessor):
"""Class for extracting data from txt files.
Provides basic functionality for extracting data from txt files,
@@ -72,9 +72,6 @@ def __init__(
max_file_size_in_mega_byte (int): Upper limit of the file size to be read into
memory in megabyte (MB). See comment above on
Potential Improvement.
-
- Returns:
- Instance of Txt class
"""
super().__init__(
file_name_identifier=file_name_identifier,
@@ -119,7 +116,7 @@ def get_raw_data_from_file(self, file_path):
def filter_and_manipulate_raw_data(self, raw_data):
"""Filter the raw data from the txt file.
- The Txt class provides some basic filtering functionality,
+ The TxtFile class provides some basic filtering functionality,
however it is up to the user to define the specifics of how the raw data
should be filtered.
diff --git a/queens/distributions/distribution.py b/queens/distributions/_distribution.py
similarity index 100%
rename from queens/distributions/distribution.py
rename to queens/distributions/_distribution.py
diff --git a/queens/distributions/beta.py b/queens/distributions/beta.py
index 403669492..24f1dc9d6 100644
--- a/queens/distributions/beta.py
+++ b/queens/distributions/beta.py
@@ -18,7 +18,7 @@
import scipy.linalg
import scipy.stats
-from queens.distributions.distribution import Continuous
+from queens.distributions._distribution import Continuous
from queens.utils.logger_settings import log_init_args
diff --git a/queens/distributions/categorical.py b/queens/distributions/categorical.py
index 863e27291..2fdbfae63 100644
--- a/queens/distributions/categorical.py
+++ b/queens/distributions/categorical.py
@@ -22,7 +22,7 @@
import numpy as np
-from queens.distributions.distribution import Distribution
+from queens.distributions._distribution import Distribution
from queens.utils.logger_settings import log_init_args
_logger = logging.getLogger(__name__)
diff --git a/queens/distributions/exponential.py b/queens/distributions/exponential.py
index 1e5c03a2b..65c7f3ec3 100644
--- a/queens/distributions/exponential.py
+++ b/queens/distributions/exponential.py
@@ -16,7 +16,7 @@
import numpy as np
-from queens.distributions.distribution import Continuous
+from queens.distributions._distribution import Continuous
from queens.utils.logger_settings import log_init_args
diff --git a/queens/distributions/free_variable.py b/queens/distributions/free_variable.py
index b2d29ae73..f8fad1c29 100644
--- a/queens/distributions/free_variable.py
+++ b/queens/distributions/free_variable.py
@@ -14,7 +14,7 @@
#
"""Free Variable."""
-from queens.distributions.distribution import Continuous
+from queens.distributions._distribution import Continuous
from queens.utils.logger_settings import log_init_args
diff --git a/queens/distributions/lognormal.py b/queens/distributions/lognormal.py
index 0d04bdfde..ca8f9d690 100644
--- a/queens/distributions/lognormal.py
+++ b/queens/distributions/lognormal.py
@@ -18,7 +18,7 @@
import scipy.linalg
import scipy.stats
-from queens.distributions.distribution import Continuous
+from queens.distributions._distribution import Continuous
from queens.distributions.normal import Normal
from queens.utils.logger_settings import log_init_args
diff --git a/queens/distributions/mean_field_normal.py b/queens/distributions/mean_field_normal.py
index dafecf51f..17a075bac 100644
--- a/queens/distributions/mean_field_normal.py
+++ b/queens/distributions/mean_field_normal.py
@@ -18,7 +18,7 @@
import scipy.stats
from scipy.special import erf # pylint:disable=no-name-in-module
-from queens.distributions.distribution import Continuous
+from queens.distributions._distribution import Continuous
from queens.utils.logger_settings import log_init_args
diff --git a/queens/distributions/mixture.py b/queens/distributions/mixture.py
index 591ae219a..8689ac967 100644
--- a/queens/distributions/mixture.py
+++ b/queens/distributions/mixture.py
@@ -20,7 +20,7 @@
from scipy.special import logsumexp
from queens.distributions import VALID_TYPES
-from queens.distributions.distribution import Continuous
+from queens.distributions._distribution import Continuous
from queens.utils.imports import get_module_class
from queens.utils.logger_settings import log_init_args
diff --git a/queens/distributions/multinomial.py b/queens/distributions/multinomial.py
index 6ef5ddcd2..18251caf5 100644
--- a/queens/distributions/multinomial.py
+++ b/queens/distributions/multinomial.py
@@ -17,7 +17,7 @@
import numpy as np
from scipy.stats import multinomial
-from queens.distributions.distribution import Discrete
+from queens.distributions._distribution import Discrete
from queens.utils.logger_settings import log_init_args
diff --git a/queens/distributions/normal.py b/queens/distributions/normal.py
index 211335f43..5c11d34f8 100644
--- a/queens/distributions/normal.py
+++ b/queens/distributions/normal.py
@@ -18,9 +18,10 @@
import scipy.linalg
import scipy.stats
-from queens.distributions.distribution import Continuous
-from queens.utils import numpy as numpy_utils
+from queens.distributions._distribution import Continuous
from queens.utils.logger_settings import log_init_args
+from queens.utils.numpy_array import at_least_2d
+from queens.utils.numpy_linalg import safe_cholesky
class Normal(Continuous):
@@ -41,7 +42,7 @@ def __init__(self, mean, covariance):
covariance (array_like): covariance of the distribution
"""
mean = np.array(mean).reshape(-1)
- covariance = numpy_utils.at_least_2d(np.array(covariance))
+ covariance = at_least_2d(np.array(covariance))
# sanity checks
dimension = covariance.shape[0]
@@ -177,7 +178,7 @@ def _calculate_distribution_parameters(covariance):
logpdf_const (float): Constant for evaluation of log pdf
"""
dimension = covariance.shape[0]
- low_chol = numpy_utils.safe_cholesky(covariance)
+ low_chol = safe_cholesky(covariance)
# precision matrix Q and determinant of cov matrix
chol_inv = np.linalg.inv(low_chol)
diff --git a/queens/distributions/particle.py b/queens/distributions/particle.py
index 626efe05e..b35babc4c 100644
--- a/queens/distributions/particle.py
+++ b/queens/distributions/particle.py
@@ -19,7 +19,7 @@
import numpy as np
-from queens.distributions.distribution import Discrete
+from queens.distributions._distribution import Discrete
_logger = logging.getLogger(__name__)
diff --git a/queens/distributions/uniform.py b/queens/distributions/uniform.py
index b47082e82..eb9b1295a 100644
--- a/queens/distributions/uniform.py
+++ b/queens/distributions/uniform.py
@@ -18,7 +18,7 @@
import scipy.linalg
import scipy.stats
-from queens.distributions.distribution import Continuous
+from queens.distributions._distribution import Continuous
from queens.utils.logger_settings import log_init_args
diff --git a/queens/drivers/driver.py b/queens/drivers/_driver.py
similarity index 100%
rename from queens/drivers/driver.py
rename to queens/drivers/_driver.py
diff --git a/queens/drivers/function.py b/queens/drivers/function.py
index d4195b10c..1a48c9fd0 100644
--- a/queens/drivers/function.py
+++ b/queens/drivers/function.py
@@ -19,7 +19,7 @@
import numpy as np
-from queens.drivers.driver import Driver
+from queens.drivers._driver import Driver
from queens.example_simulator_functions import example_simulator_function_by_name
from queens.utils.imports import get_module_attribute
from queens.utils.logger_settings import log_init_args
diff --git a/queens/drivers/jobscript.py b/queens/drivers/jobscript.py
index abab8a322..3add7fa71 100644
--- a/queens/drivers/jobscript.py
+++ b/queens/drivers/jobscript.py
@@ -19,7 +19,7 @@
from dataclasses import dataclass
from pathlib import Path
-from queens.drivers.driver import Driver
+from queens.drivers._driver import Driver
from queens.utils.exceptions import SubprocessError
from queens.utils.injector import inject, inject_in_template
from queens.utils.io import read_file
diff --git a/queens/external_geometries/external_geometry.py b/queens/external_geometries/_external_geometry.py
similarity index 100%
rename from queens/external_geometries/external_geometry.py
rename to queens/external_geometries/_external_geometry.py
diff --git a/queens/external_geometries/fourc_dat.py b/queens/external_geometries/fourc_dat.py
index b4fa59feb..6d7106f26 100644
--- a/queens/external_geometries/fourc_dat.py
+++ b/queens/external_geometries/fourc_dat.py
@@ -23,7 +23,7 @@
import numpy as np
-from queens.external_geometries.external_geometry import ExternalGeometry
+from queens.external_geometries._external_geometry import ExternalGeometry
from queens.utils.logger_settings import log_init_args
diff --git a/queens/global_settings.py b/queens/global_settings.py
index 4fc8f6cf1..13a6c8f26 100644
--- a/queens/global_settings.py
+++ b/queens/global_settings.py
@@ -21,7 +21,7 @@
import logging
from pathlib import Path
-from queens.schedulers.dask import SHUTDOWN_CLIENTS
+from queens.schedulers._dask import SHUTDOWN_CLIENTS
from queens.utils.ascii_art import print_banner_and_description
from queens.utils.logger_settings import reset_logging, setup_basic_logging
from queens.utils.path import PATH_TO_QUEENS
diff --git a/queens/iterators/iterator.py b/queens/iterators/_iterator.py
similarity index 100%
rename from queens/iterators/iterator.py
rename to queens/iterators/_iterator.py
diff --git a/queens/iterators/pymc.py b/queens/iterators/_pymc.py
similarity index 66%
rename from queens/iterators/pymc.py
rename to queens/iterators/_pymc.py
index a8f220a01..598eb174b 100644
--- a/queens/iterators/pymc.py
+++ b/queens/iterators/_pymc.py
@@ -16,16 +16,18 @@
import abc
import logging
+from typing import Union
import arviz as az
import matplotlib.pyplot as plt
import numpy as np
import pymc as pm
import pytensor.tensor as pt
+from pytensor import Variable
-from queens.iterators.iterator import Iterator
+from queens.distributions import beta, exponential, lognormal, mean_field_normal, normal, uniform
+from queens.iterators._iterator import Iterator
from queens.utils.process_outputs import process_outputs, write_results
-from queens.utils.pymc import PymcDistributionWrapper, from_config_create_pymc_distribution_dict
_logger = logging.getLogger(__name__)
@@ -370,3 +372,217 @@ def post_run(self):
plt.close("all")
_logger.info("MCMC by PyMC results finished")
+
+
+class PymcDistributionWrapper(pt.Op):
+ """Op class for Data conversion.
+
+ This PymcDistributionWrapper class is a wrapper for PyMC Distributions in QUEENS.
+
+ Attributes:
+ logpdf (fun): The log-pdf function
+ logpdf_gradients (fun): The function to evaluate the gradient of the log-pdf
+ logpdf_grad (obj): Wrapper for the gradient function of the log-pdf
+ """
+
+ itypes = [pt.dmatrix]
+ otypes = [pt.dvector]
+
+ def __init__(self, logpdf, logpdf_gradients=None):
+ """Initzialise the wrapper for the functions.
+
+ Args:
+ logpdf (fun): The log-pdf function
+ logpdf_gradients (fun): The function to evaluate the gradient of the pdf
+ """
+ self.logpdf = logpdf
+ self.logpdf_gradients = logpdf_gradients
+ self.logpdf_grad = PymcGradientWrapper(self.logpdf_gradients)
+
+ # pylint: disable-next=unused-argument
+ def perform(self, _node, inputs, output_storage, params=None):
+ """Call outside pdf function."""
+ (sample,) = inputs
+
+ value = self.logpdf(sample)
+ output_storage[0][0] = np.array(value)
+
+ def grad(self, inputs, output_grads):
+ """Get gradient and multiply with upstream gradient."""
+ (sample,) = inputs
+ return [output_grads[0] * self.logpdf_grad(sample)]
+
+ def R_op(
+ self, inputs: list[Variable], eval_points: Union[Variable, list[Variable]]
+ ) -> list[Variable]:
+ """Construct a graph for the R-operator.
+
+ This method is primarily used by `Rop`.
+ For more information, see pymc documentation for the method.
+
+ Args:
+ inputs (list[Variable]): The input variables for the R operator.
+ eval_points (Union[Variable, list[Variable]]): Should have the same length as inputs.
+ Each element of `eval_points` specifies
+ the value of the corresponding input at
+ the point where the R-operator is to be
+ evaluated.
+
+ Returns:
+ list[Variable]
+ """
+ raise NotImplementedError
+
+
+class PymcGradientWrapper(pt.Op):
+ """Op class for Data conversion.
+
+ This Class is a wrapper for the gradient of the distributions in QUEENS.
+
+ Attributes:
+ gradient_func (fun): The function to evaluate the gradient of the pdf
+ """
+
+ itypes = [pt.dmatrix]
+ otypes = [pt.dmatrix]
+
+ def __init__(self, gradient_func):
+ """Initzialise the wrapper for the functions.
+
+ Args:
+ gradient_func (fun): The function to evaluate the gradient of the pdf
+ """
+ self.gradient_func = gradient_func
+
+ def perform(self, _node, inputs, output_storage, _params=None):
+ """Evaluate the gradient."""
+ (sample,) = inputs
+ if self.gradient_func is not None:
+ grads = self.gradient_func(sample)
+ output_storage[0][0] = grads
+ else:
+ raise TypeError("Gradient function is not callable")
+
+ def R_op(
+ self, inputs: list[Variable], eval_points: Union[Variable, list[Variable]]
+ ) -> list[Variable]:
+ """Construct a graph for the R-operator.
+
+ This method is primarily used by `Rop`.
+ For more information, see pymc documentation for the method.
+
+ Args:
+ inputs (list[Variable]): The input variables for the R operator.
+ eval_points (Union[Variable, list[Variable]]): Should have the same length as inputs.
+ Each element of `eval_points` specifies
+ the value of the corresponding input at
+ the point where the R-operator is to be
+ evaluated.
+
+ Returns:
+ list[Variable]
+ """
+ raise NotImplementedError
+
+
+def from_config_create_pymc_distribution_dict(parameters, explicit_shape):
+ """Get random variables in pymc distribution format.
+
+ Args:
+ parameters (obj): Parameters object
+
+ explicit_shape (int): Explicit shape parameter for distribution dimension
+
+ Returns:
+ pymc distribution list
+ """
+ pymc_distribution_list = []
+
+ # loop over random_variables and create list
+ for name, distribution in zip(parameters.names, parameters.to_distribution_list()):
+ pymc_distribution_list.append(
+ from_config_create_pymc_distribution(distribution, name, explicit_shape)
+ )
+ # Pass the distribution list as arguments
+ return pymc_distribution_list
+
+
+def from_config_create_pymc_distribution(distribution, name, explicit_shape):
+ """Create PyMC distribution object from queens distribution.
+
+ Args:
+ distribution (obj): Queens distribution object
+
+ name (str): name of random variable
+ explicit_shape (int): Explicit shape parameter for distribution dimension
+
+ Returns:
+ random_variable: Random variable, distribution object in pymc format
+ """
+ shape = (explicit_shape, distribution.dimension)
+ if isinstance(distribution, normal.Normal):
+ random_variable = pm.MvNormal(
+ name,
+ mu=distribution.mean,
+ cov=distribution.covariance,
+ shape=shape,
+ )
+ elif isinstance(distribution, mean_field_normal.MeanFieldNormal):
+ random_variable = pm.Normal(
+ name,
+ mu=distribution.mean,
+ sigma=distribution.covariance,
+ shape=shape,
+ )
+
+ elif isinstance(distribution, uniform.Uniform):
+ if np.all(distribution.lower_bound == 0):
+ random_variable = pm.Uniform(
+ name,
+ lower=0,
+ upper=distribution.upper_bound,
+ shape=shape,
+ )
+
+ elif np.all(distribution.upper_bound == 0):
+ random_variable = pm.Uniform(
+ name,
+ lower=distribution.lower_bound,
+ upper=0,
+ shape=shape,
+ )
+ else:
+ random_variable = pm.Uniform(
+ name,
+ lower=distribution.lower_bound,
+ upper=distribution.upper_bound,
+ shape=shape,
+ )
+ elif isinstance(distribution, lognormal.LogNormal):
+ if distribution.covariance.size == 1:
+ std = distribution.covariance[0, 0] ** (1 / 2)
+ else:
+ raise NotImplementedError("Only 1D lognormals supported")
+
+ random_variable = pm.LogNormal(
+ name,
+ mu=distribution.mean,
+ sigma=std,
+ shape=shape,
+ )
+ elif isinstance(distribution, exponential.Exponential):
+ random_variable = pm.Exponential(
+ name,
+ lam=distribution.rate,
+ shape=shape,
+ )
+ elif isinstance(distribution, beta.Beta):
+ random_variable = pm.Beta(
+ name,
+ alpha=distribution.a,
+ beta=distribution.b,
+ shape=shape,
+ )
+ else:
+ raise NotImplementedError("Not supported distriubtion by QUEENS and/or PyMC")
+ return random_variable
diff --git a/queens/iterators/variational_inference.py b/queens/iterators/_variational_inference.py
similarity index 99%
rename from queens/iterators/variational_inference.py
rename to queens/iterators/_variational_inference.py
index b5e9649c7..0bbdf6e56 100644
--- a/queens/iterators/variational_inference.py
+++ b/queens/iterators/_variational_inference.py
@@ -20,7 +20,7 @@
import numpy as np
-from queens.iterators.iterator import Iterator
+from queens.iterators._iterator import Iterator
from queens.utils.process_outputs import write_results
from queens.variational_distributions import FullRankNormal, MeanFieldNormal
from queens.visualization.variational_inference_visualization import VIVisualization
diff --git a/queens/iterators/adaptive_sampling.py b/queens/iterators/adaptive_sampling.py
index 2ccb358bd..904872c8a 100644
--- a/queens/iterators/adaptive_sampling.py
+++ b/queens/iterators/adaptive_sampling.py
@@ -24,8 +24,8 @@
from jax import jit
from particles.resampling import stratified
+from queens.iterators._iterator import Iterator
from queens.iterators.grid import Grid
-from queens.iterators.iterator import Iterator
from queens.iterators.metropolis_hastings import MetropolisHastings
from queens.iterators.sequential_monte_carlo_chopin import SequentialMonteCarloChopin
from queens.utils.io import load_result
diff --git a/queens/iterators/bbvi.py b/queens/iterators/bbvi.py
index 39e4b3a16..c599236d2 100644
--- a/queens/iterators/bbvi.py
+++ b/queens/iterators/bbvi.py
@@ -18,7 +18,7 @@
import numpy as np
-from queens.iterators.variational_inference import VALID_EXPORT_FIELDS, VariationalInference
+from queens.iterators._variational_inference import VALID_EXPORT_FIELDS, VariationalInference
from queens.utils.collection import CollectionObject
from queens.utils.logger_settings import log_init_args
from queens.utils.valid_options import check_if_valid_options
diff --git a/queens/iterators/bmfia.py b/queens/iterators/bmfia.py
index 3d67fbd58..37f01c818 100644
--- a/queens/iterators/bmfia.py
+++ b/queens/iterators/bmfia.py
@@ -19,7 +19,7 @@
import numpy as np
-from queens.iterators.iterator import Iterator
+from queens.iterators._iterator import Iterator
from queens.utils.logger_settings import log_init_args
from queens.utils.sobol_sequence import sample_sobol_sequence
diff --git a/queens/iterators/bmfmc.py b/queens/iterators/bmfmc.py
index 9b192cc42..869c86e42 100644
--- a/queens/iterators/bmfmc.py
+++ b/queens/iterators/bmfmc.py
@@ -21,7 +21,7 @@
import pandas as pd
from diversipy import psa_select
-from queens.iterators.iterator import Iterator
+from queens.iterators._iterator import Iterator
from queens.utils.logger_settings import log_init_args
from queens.utils.process_outputs import process_outputs, write_results
from queens.visualization.bmfmc_visualization import BMFMCVisualization
diff --git a/queens/iterators/classification.py b/queens/iterators/classification.py
index dbca72527..7e6f8131e 100644
--- a/queens/iterators/classification.py
+++ b/queens/iterators/classification.py
@@ -23,7 +23,7 @@
import numpy as np
from skactiveml.utils import MISSING_LABEL
-from queens.iterators.iterator import Iterator
+from queens.iterators._iterator import Iterator
from queens.utils.ascii_art import print_classification
from queens.utils.logger_settings import log_init_args
from queens.utils.process_outputs import write_results
diff --git a/queens/iterators/control_variates.py b/queens/iterators/control_variates.py
index ca9bc4965..9c151d48f 100644
--- a/queens/iterators/control_variates.py
+++ b/queens/iterators/control_variates.py
@@ -18,7 +18,7 @@
import numpy as np
-from queens.iterators.iterator import Iterator
+from queens.iterators._iterator import Iterator
from queens.utils.logger_settings import log_init_args
from queens.utils.process_outputs import write_results
diff --git a/queens/iterators/data.py b/queens/iterators/data.py
index f3514a437..b84ac253a 100644
--- a/queens/iterators/data.py
+++ b/queens/iterators/data.py
@@ -17,7 +17,7 @@
import logging
import pickle
-from queens.iterators.iterator import Iterator
+from queens.iterators._iterator import Iterator
from queens.utils.logger_settings import log_init_args
from queens.utils.process_outputs import process_outputs, write_results
diff --git a/queens/iterators/elementary_effects.py b/queens/iterators/elementary_effects.py
index ce684a02c..1416e5cba 100644
--- a/queens/iterators/elementary_effects.py
+++ b/queens/iterators/elementary_effects.py
@@ -25,7 +25,7 @@
from SALib.sample import morris
from queens.distributions.uniform import Uniform
-from queens.iterators.iterator import Iterator
+from queens.iterators._iterator import Iterator
from queens.utils.logger_settings import log_init_args
from queens.utils.process_outputs import write_results
from queens.visualization.sa_visualization import SAVisualization
diff --git a/queens/iterators/grid.py b/queens/iterators/grid.py
index 2a0498208..9853e36d3 100644
--- a/queens/iterators/grid.py
+++ b/queens/iterators/grid.py
@@ -20,7 +20,7 @@
from queens.utils.process_outputs import process_outputs, write_results
from queens.visualization.grid_iterator_visualization import GridIteratorVisualization
-from .iterator import Iterator
+from ._iterator import Iterator
class Grid(Iterator):
diff --git a/queens/iterators/hamiltonian_monte_carlo.py b/queens/iterators/hamiltonian_monte_carlo.py
index 9c1f2c3dd..0ccdf6500 100644
--- a/queens/iterators/hamiltonian_monte_carlo.py
+++ b/queens/iterators/hamiltonian_monte_carlo.py
@@ -22,7 +22,7 @@
import pymc as pm
-from queens.iterators.pymc import PyMC
+from queens.iterators._pymc import PyMC
from queens.utils.logger_settings import log_init_args
_logger = logging.getLogger(__name__)
diff --git a/queens/iterators/latin_hypercube_sampling.py b/queens/iterators/latin_hypercube_sampling.py
index 88c0a8a32..efdb8cf88 100644
--- a/queens/iterators/latin_hypercube_sampling.py
+++ b/queens/iterators/latin_hypercube_sampling.py
@@ -19,7 +19,7 @@
import numpy as np
from pyDOE import lhs
-from queens.iterators.iterator import Iterator
+from queens.iterators._iterator import Iterator
from queens.utils.logger_settings import log_init_args
from queens.utils.process_outputs import process_outputs, write_results
diff --git a/queens/iterators/metropolis_hastings.py b/queens/iterators/metropolis_hastings.py
index 8a956e249..fde0cbdda 100644
--- a/queens/iterators/metropolis_hastings.py
+++ b/queens/iterators/metropolis_hastings.py
@@ -30,7 +30,7 @@
from tqdm import tqdm
from queens.distributions.normal import Normal
-from queens.iterators.iterator import Iterator
+from queens.iterators._iterator import Iterator
from queens.utils import mcmc as mcmc_utils
from queens.utils import sequential_monte_carlo as smc_utils
from queens.utils.logger_settings import log_init_args
diff --git a/queens/iterators/metropolis_hastings_pymc.py b/queens/iterators/metropolis_hastings_pymc.py
index 527a27e8f..526a595ac 100644
--- a/queens/iterators/metropolis_hastings_pymc.py
+++ b/queens/iterators/metropolis_hastings_pymc.py
@@ -23,9 +23,8 @@
import numpy as np
import pymc as pm
-from queens.iterators.pymc import PyMC
+from queens.iterators._pymc import PyMC, PymcDistributionWrapper
from queens.utils.logger_settings import log_init_args
-from queens.utils.pymc import PymcDistributionWrapper
_logger = logging.getLogger(__name__)
diff --git a/queens/iterators/mlmc.py b/queens/iterators/mlmc.py
index a2b609aa5..6bdcb7b35 100644
--- a/queens/iterators/mlmc.py
+++ b/queens/iterators/mlmc.py
@@ -19,7 +19,7 @@
import numpy as np
from queens.distributions.uniform_discrete import UniformDiscrete
-from queens.iterators.iterator import Iterator
+from queens.iterators._iterator import Iterator
from queens.utils.logger_settings import log_init_args
from queens.utils.process_outputs import write_results
diff --git a/queens/iterators/monte_carlo.py b/queens/iterators/monte_carlo.py
index 4666fba1b..9deef2f06 100644
--- a/queens/iterators/monte_carlo.py
+++ b/queens/iterators/monte_carlo.py
@@ -19,7 +19,7 @@
import matplotlib.pyplot as plt
import numpy as np
-from queens.iterators.iterator import Iterator
+from queens.iterators._iterator import Iterator
from queens.utils.logger_settings import log_init_args
from queens.utils.process_outputs import process_outputs, write_results
diff --git a/queens/iterators/nuts.py b/queens/iterators/nuts.py
index 29a9b06bd..e8a67be20 100644
--- a/queens/iterators/nuts.py
+++ b/queens/iterators/nuts.py
@@ -23,7 +23,7 @@
import pymc as pm
-from queens.iterators.pymc import PyMC
+from queens.iterators._pymc import PyMC
from queens.utils.logger_settings import log_init_args
_logger = logging.getLogger(__name__)
diff --git a/queens/iterators/optimization.py b/queens/iterators/optimization.py
index 37a764a03..5d2d65ece 100644
--- a/queens/iterators/optimization.py
+++ b/queens/iterators/optimization.py
@@ -21,7 +21,7 @@
from scipy.optimize import Bounds, minimize
from scipy.optimize._numdiff import _prepare_bounds
-from queens.iterators.iterator import Iterator
+from queens.iterators._iterator import Iterator
from queens.utils.fd_jacobian import fd_jacobian, get_positions
from queens.utils.logger_settings import log_init_args
from queens.utils.process_outputs import write_results
diff --git a/queens/iterators/points.py b/queens/iterators/points.py
index 73be079b8..90caff98d 100644
--- a/queens/iterators/points.py
+++ b/queens/iterators/points.py
@@ -19,7 +19,7 @@
import numpy as np
-from queens.iterators.iterator import Iterator
+from queens.iterators._iterator import Iterator
from queens.utils.ascii_art import print_points_iterator
from queens.utils.logger_settings import log_init_args
from queens.utils.process_outputs import write_results
diff --git a/queens/iterators/polynomial_chaos.py b/queens/iterators/polynomial_chaos.py
index 0cc08915b..0be330b1a 100644
--- a/queens/iterators/polynomial_chaos.py
+++ b/queens/iterators/polynomial_chaos.py
@@ -27,7 +27,7 @@
from chaospy.quadrature.frontend import SHORT_NAME_TABLE as projection_node_location_rules
from queens.distributions import beta, lognormal, normal, uniform
-from queens.iterators.iterator import Iterator
+from queens.iterators._iterator import Iterator
from queens.utils.logger_settings import log_init_args
from queens.utils.process_outputs import write_results
from queens.utils.valid_options import get_option
diff --git a/queens/iterators/reparameteriztion_based_variational.py b/queens/iterators/reparameteriztion_based_variational.py
index 736c8a734..4332d5724 100644
--- a/queens/iterators/reparameteriztion_based_variational.py
+++ b/queens/iterators/reparameteriztion_based_variational.py
@@ -18,7 +18,7 @@
import numpy as np
-from queens.iterators.variational_inference import VALID_EXPORT_FIELDS, VariationalInference
+from queens.iterators._variational_inference import VALID_EXPORT_FIELDS, VariationalInference
from queens.utils.collection import CollectionObject
from queens.utils.logger_settings import log_init_args
from queens.utils.valid_options import check_if_valid_options
diff --git a/queens/iterators/sequential_monte_carlo.py b/queens/iterators/sequential_monte_carlo.py
index fab33684c..7a811814b 100644
--- a/queens/iterators/sequential_monte_carlo.py
+++ b/queens/iterators/sequential_monte_carlo.py
@@ -44,7 +44,7 @@
import numpy as np
import scipy
-from queens.iterators.iterator import Iterator
+from queens.iterators._iterator import Iterator
from queens.iterators.metropolis_hastings import MetropolisHastings
from queens.utils import sequential_monte_carlo as smc_utils
from queens.utils.logger_settings import log_init_args
diff --git a/queens/iterators/sequential_monte_carlo_chopin.py b/queens/iterators/sequential_monte_carlo_chopin.py
index 6d20f1717..b68703c2b 100644
--- a/queens/iterators/sequential_monte_carlo_chopin.py
+++ b/queens/iterators/sequential_monte_carlo_chopin.py
@@ -24,8 +24,8 @@
from particles import distributions as dists
from particles.smc_samplers import AdaptiveTempering
-from queens.distributions.distribution import Distribution
-from queens.iterators.iterator import Iterator
+from queens.distributions._distribution import Distribution
+from queens.iterators._iterator import Iterator
from queens.utils import sequential_monte_carlo as smc_utils
from queens.utils.logger_settings import log_init_args
from queens.utils.printing import get_str_table
diff --git a/queens/iterators/sobol_index.py b/queens/iterators/sobol_index.py
index ed1e9f4d8..168a943bd 100644
--- a/queens/iterators/sobol_index.py
+++ b/queens/iterators/sobol_index.py
@@ -23,7 +23,7 @@
from SALib.sample import saltelli
from queens.distributions import lognormal, normal, uniform
-from queens.iterators.iterator import Iterator
+from queens.iterators._iterator import Iterator
from queens.utils.logger_settings import log_init_args
from queens.utils.process_outputs import write_results
diff --git a/queens/iterators/sobol_index_gp_uncertainty.py b/queens/iterators/sobol_index_gp_uncertainty.py
index ad65b6358..ad6e92ba0 100644
--- a/queens/iterators/sobol_index_gp_uncertainty.py
+++ b/queens/iterators/sobol_index_gp_uncertainty.py
@@ -32,7 +32,7 @@
from queens.utils.logger_settings import log_init_args
from queens.utils.process_outputs import write_results
-from .iterator import Iterator
+from ._iterator import Iterator
logging.getLogger("matplotlib").setLevel(logging.CRITICAL)
_logger = logging.getLogger(__name__)
diff --git a/queens/iterators/sobol_sequence.py b/queens/iterators/sobol_sequence.py
index 84cd2bad5..f6134c3ec 100644
--- a/queens/iterators/sobol_sequence.py
+++ b/queens/iterators/sobol_sequence.py
@@ -16,7 +16,7 @@
import logging
-from queens.iterators.iterator import Iterator
+from queens.iterators._iterator import Iterator
from queens.utils.logger_settings import log_init_args
from queens.utils.process_outputs import process_outputs, write_results
from queens.utils.sobol_sequence import sample_sobol_sequence
diff --git a/queens/models/model.py b/queens/models/_model.py
similarity index 100%
rename from queens/models/model.py
rename to queens/models/_model.py
diff --git a/queens/models/bmfmc.py b/queens/models/bmfmc.py
index 6e8424554..cb27bd618 100644
--- a/queens/models/bmfmc.py
+++ b/queens/models/bmfmc.py
@@ -24,7 +24,7 @@
import queens.utils.pdf_estimation as est
from queens.iterators.data import Data
-from queens.models.model import Model
+from queens.models._model import Model
from queens.parameters.random_fields.karhunen_loeve import KarhunenLoeve as RandomField
from queens.utils.logger_settings import log_init_args
diff --git a/queens/models/likelihoods/likelihood.py b/queens/models/likelihoods/_likelihood.py
similarity index 98%
rename from queens/models/likelihoods/likelihood.py
rename to queens/models/likelihoods/_likelihood.py
index e38924204..0c1636fb6 100644
--- a/queens/models/likelihoods/likelihood.py
+++ b/queens/models/likelihoods/_likelihood.py
@@ -18,7 +18,7 @@
import numpy as np
-from queens.models.model import Model
+from queens.models._model import Model
class Likelihood(Model):
diff --git a/queens/models/likelihoods/bmf_gaussian.py b/queens/models/likelihoods/bmf_gaussian.py
index 65ef77ec9..61c25ae28 100644
--- a/queens/models/likelihoods/bmf_gaussian.py
+++ b/queens/models/likelihoods/bmf_gaussian.py
@@ -24,7 +24,7 @@
import tqdm
from queens.distributions.mean_field_normal import MeanFieldNormal
-from queens.models.likelihoods.likelihood import Likelihood
+from queens.models.likelihoods._likelihood import Likelihood
from queens.utils.ascii_art import print_bmfia_acceleration
from queens.utils.logger_settings import log_init_args
from queens.utils.valid_options import get_option
diff --git a/queens/models/likelihoods/gaussian.py b/queens/models/likelihoods/gaussian.py
index 9ddee3ad3..8ac13fcf1 100644
--- a/queens/models/likelihoods/gaussian.py
+++ b/queens/models/likelihoods/gaussian.py
@@ -19,10 +19,10 @@
import numpy as np
from queens.distributions.normal import Normal
-from queens.models.likelihoods.likelihood import Likelihood
+from queens.models.likelihoods._likelihood import Likelihood
from queens.utils.exceptions import InvalidOptionError
from queens.utils.logger_settings import log_init_args
-from queens.utils.numpy import add_nugget_to_diagonal
+from queens.utils.numpy_linalg import add_nugget_to_diagonal
class Gaussian(Likelihood):
diff --git a/queens/models/logpdf_gp.py b/queens/models/logpdf_gp.py
index d6d0cf227..369ad4f84 100644
--- a/queens/models/logpdf_gp.py
+++ b/queens/models/logpdf_gp.py
@@ -26,10 +26,10 @@
from jax import jit, vmap
from scipy import stats
-from queens.models.model import Model
+from queens.models._model import Model
from queens.utils import jax_minimize_wrapper
-from queens.utils.gpflow import init_scaler
-from queens.utils.numpy import safe_cholesky
+from queens.utils.gpflow_transformations import init_scaler
+from queens.utils.numpy_linalg import safe_cholesky
_logger = logging.getLogger(__name__)
jax.config.update("jax_enable_x64", True)
diff --git a/queens/models/simulation.py b/queens/models/simulation.py
index 7feaac7bf..d00478c5d 100644
--- a/queens/models/simulation.py
+++ b/queens/models/simulation.py
@@ -16,7 +16,7 @@
import numpy as np
-from queens.models.model import Model
+from queens.models._model import Model
from queens.utils.logger_settings import log_init_args
diff --git a/queens/models/surrogates/surrogate.py b/queens/models/surrogates/_surrogate.py
similarity index 99%
rename from queens/models/surrogates/surrogate.py
rename to queens/models/surrogates/_surrogate.py
index 85eb6ec96..acb563741 100644
--- a/queens/models/surrogates/surrogate.py
+++ b/queens/models/surrogates/_surrogate.py
@@ -20,7 +20,7 @@
import numpy as np
from sklearn.model_selection import KFold
-from queens.models.model import Model
+from queens.models._model import Model
from queens.visualization.surrogate_visualization import SurrogateVisualization
_logger = logging.getLogger(__name__)
diff --git a/queens/models/surrogates/bayesian_neural_network.py b/queens/models/surrogates/bayesian_neural_network.py
index 8ed45bf96..38fb5b193 100644
--- a/queens/models/surrogates/bayesian_neural_network.py
+++ b/queens/models/surrogates/bayesian_neural_network.py
@@ -20,9 +20,9 @@
import numpy as np
import tensorflow_probability as tfp
-from queens.models.surrogates.surrogate import Surrogate
+from queens.models.surrogates._surrogate import Surrogate
+from queens.utils.configure_tensorflow import configure_keras, configure_tensorflow
from queens.utils.logger_settings import log_init_args
-from queens.utils.tensorflow import configure_keras, configure_tensorflow
_logger = logging.getLogger(__name__)
diff --git a/queens/models/surrogates/gaussian_neural_network.py b/queens/models/surrogates/gaussian_neural_network.py
index d4abd3c18..683cc93eb 100644
--- a/queens/models/surrogates/gaussian_neural_network.py
+++ b/queens/models/surrogates/gaussian_neural_network.py
@@ -20,10 +20,10 @@
import numpy as np
import tensorflow_probability as tfp
-from queens.models.surrogates.surrogate import Surrogate
+from queens.models.surrogates._surrogate import Surrogate
+from queens.utils.configure_tensorflow import configure_keras, configure_tensorflow
from queens.utils.logger_settings import log_init_args
from queens.utils.random_process_scaler import VALID_SCALER
-from queens.utils.tensorflow import configure_keras, configure_tensorflow
from queens.utils.valid_options import get_option
from queens.visualization.gaussian_neural_network_vis import plot_loss
diff --git a/queens/models/surrogates/gaussian_process.py b/queens/models/surrogates/gaussian_process.py
index 5140a7430..daae1577c 100644
--- a/queens/models/surrogates/gaussian_process.py
+++ b/queens/models/surrogates/gaussian_process.py
@@ -20,10 +20,11 @@
import numpy as np
import tensorflow_probability as tfp
-from queens.models.surrogates.surrogate import Surrogate
-from queens.utils.gpflow import extract_block_diag, init_scaler, set_transform_function
+from queens.models.surrogates._surrogate import Surrogate
+from queens.utils.configure_tensorflow import configure_tensorflow
+from queens.utils.gpflow_transformations import init_scaler, set_transform_function
from queens.utils.logger_settings import log_init_args
-from queens.utils.tensorflow import configure_tensorflow
+from queens.utils.numpy_array import extract_block_diag
_logger = logging.getLogger(__name__)
diff --git a/queens/models/surrogates/heteroskedastic_gaussian_process.py b/queens/models/surrogates/heteroskedastic_gaussian_process.py
index c0c3ab9b8..a45098183 100644
--- a/queens/models/surrogates/heteroskedastic_gaussian_process.py
+++ b/queens/models/surrogates/heteroskedastic_gaussian_process.py
@@ -21,9 +21,9 @@
import tensorflow_probability as tfp
from sklearn.cluster import KMeans
-from queens.models.surrogates.surrogate import Surrogate
+from queens.models.surrogates._surrogate import Surrogate
+from queens.utils.configure_tensorflow import configure_keras, configure_tensorflow
from queens.utils.logger_settings import log_init_args
-from queens.utils.tensorflow import configure_keras, configure_tensorflow
_logger = logging.getLogger(__name__)
diff --git a/queens/models/surrogates/jitted_gaussian_process.py b/queens/models/surrogates/jitted_gaussian_process.py
index fc82e86eb..04e5f36fa 100644
--- a/queens/models/surrogates/jitted_gaussian_process.py
+++ b/queens/models/surrogates/jitted_gaussian_process.py
@@ -20,7 +20,7 @@
from scipy.linalg import cho_solve
import queens.models.surrogates.utils.kernel_jitted as utils_jitted
-from queens.models.surrogates.surrogate import Surrogate
+from queens.models.surrogates._surrogate import Surrogate
from queens.utils.logger_settings import log_init_args
from queens.utils.random_process_scaler import VALID_SCALER
from queens.utils.valid_options import get_option
diff --git a/queens/models/surrogates/variational_gaussian_process.py b/queens/models/surrogates/variational_gaussian_process.py
index e8e26f5e1..44d852865 100644
--- a/queens/models/surrogates/variational_gaussian_process.py
+++ b/queens/models/surrogates/variational_gaussian_process.py
@@ -35,10 +35,11 @@
import numpy as np
import tensorflow_probability as tfp
-from queens.models.surrogates.surrogate import Surrogate
-from queens.utils.gpflow import extract_block_diag, init_scaler, set_transform_function
+from queens.models.surrogates._surrogate import Surrogate
+from queens.utils.configure_tensorflow import configure_keras, configure_tensorflow
+from queens.utils.gpflow_transformations import init_scaler, set_transform_function
from queens.utils.logger_settings import log_init_args
-from queens.utils.tensorflow import configure_keras, configure_tensorflow
+from queens.utils.numpy_array import extract_block_diag
_logger = logging.getLogger(__name__)
diff --git a/queens/parameters/parameters.py b/queens/parameters/parameters.py
index d5f09c236..86da808bb 100644
--- a/queens/parameters/parameters.py
+++ b/queens/parameters/parameters.py
@@ -19,9 +19,9 @@
import numpy as np
from queens.distributions import VALID_TYPES as VALID_DISTRIBUTION_TYPES
-from queens.distributions.distribution import Continuous
+from queens.distributions._distribution import Continuous
from queens.parameters.random_fields import VALID_TYPES as VALID_FIELD_TYPES
-from queens.parameters.random_fields.random_field import RandomField
+from queens.parameters.random_fields._random_field import RandomField
from queens.utils.imports import get_module_class
from queens.utils.logger_settings import log_init_args
diff --git a/queens/parameters/random_fields/random_field.py b/queens/parameters/random_fields/_random_field.py
similarity index 98%
rename from queens/parameters/random_fields/random_field.py
rename to queens/parameters/random_fields/_random_field.py
index cfa781f59..68fad2cf5 100644
--- a/queens/parameters/random_fields/random_field.py
+++ b/queens/parameters/random_fields/_random_field.py
@@ -18,7 +18,7 @@
import numpy as np
-from queens.utils.numpy import at_least_2d
+from queens.utils.numpy_array import at_least_2d
class RandomField(metaclass=abc.ABCMeta):
diff --git a/queens/parameters/random_fields/fourier.py b/queens/parameters/random_fields/fourier.py
index dfcb01b49..8476c429f 100644
--- a/queens/parameters/random_fields/fourier.py
+++ b/queens/parameters/random_fields/fourier.py
@@ -19,7 +19,7 @@
from scipy.spatial.distance import pdist
from queens.distributions.mean_field_normal import MeanFieldNormal
-from queens.parameters.random_fields.random_field import RandomField
+from queens.parameters.random_fields._random_field import RandomField
class Fourier(RandomField):
diff --git a/queens/parameters/random_fields/karhunen_loeve.py b/queens/parameters/random_fields/karhunen_loeve.py
index f094f7b57..0fea87fa8 100644
--- a/queens/parameters/random_fields/karhunen_loeve.py
+++ b/queens/parameters/random_fields/karhunen_loeve.py
@@ -20,7 +20,7 @@
from scipy.spatial.distance import pdist, squareform
from queens.distributions.mean_field_normal import MeanFieldNormal
-from queens.parameters.random_fields.random_field import RandomField
+from queens.parameters.random_fields._random_field import RandomField
_logger = logging.getLogger(__name__)
diff --git a/queens/parameters/random_fields/piece_wise.py b/queens/parameters/random_fields/piece_wise.py
index 5a09eb25a..3fad8c95a 100644
--- a/queens/parameters/random_fields/piece_wise.py
+++ b/queens/parameters/random_fields/piece_wise.py
@@ -16,7 +16,7 @@
from queens.distributions import VALID_TYPES as distribution_types
from queens.distributions.mean_field_normal import MeanFieldNormal
-from queens.parameters.random_fields.random_field import RandomField
+from queens.parameters.random_fields._random_field import RandomField
from queens.utils.imports import get_module_class
diff --git a/queens/schedulers/__init__.py b/queens/schedulers/__init__.py
index 5a45f8905..0b3ad5fdd 100644
--- a/queens/schedulers/__init__.py
+++ b/queens/schedulers/__init__.py
@@ -17,9 +17,9 @@
Modules for scheduling and submitting computational jobs.
"""
+from queens.schedulers._scheduler import Scheduler
from queens.schedulers.cluster import Cluster
from queens.schedulers.local import Local
from queens.schedulers.pool import Pool
-from queens.schedulers.scheduler import Scheduler
VALID_TYPES = {"local": Local, "cluster": Cluster, "pool": Pool}
diff --git a/queens/schedulers/dask.py b/queens/schedulers/_dask.py
similarity index 99%
rename from queens/schedulers/dask.py
rename to queens/schedulers/_dask.py
index acb3fdc2c..35df2f3c8 100644
--- a/queens/schedulers/dask.py
+++ b/queens/schedulers/_dask.py
@@ -22,7 +22,7 @@
import tqdm
from dask.distributed import as_completed
-from queens.schedulers.scheduler import Scheduler
+from queens.schedulers._scheduler import Scheduler
from queens.utils.printing import get_str_table
_logger = logging.getLogger(__name__)
diff --git a/queens/schedulers/scheduler.py b/queens/schedulers/_scheduler.py
similarity index 100%
rename from queens/schedulers/scheduler.py
rename to queens/schedulers/_scheduler.py
diff --git a/queens/schedulers/cluster.py b/queens/schedulers/cluster.py
index 2393f4775..bc6278346 100644
--- a/queens/schedulers/cluster.py
+++ b/queens/schedulers/cluster.py
@@ -21,7 +21,7 @@
from dask.distributed import Client
from dask_jobqueue import PBSCluster, SLURMCluster
-from queens.schedulers.dask import Dask
+from queens.schedulers._dask import Dask
from queens.utils.config_directories import experiment_directory # Do not change this import!
from queens.utils.logger_settings import log_init_args
from queens.utils.valid_options import get_option
diff --git a/queens/schedulers/local.py b/queens/schedulers/local.py
index a62ab5284..557190cf0 100644
--- a/queens/schedulers/local.py
+++ b/queens/schedulers/local.py
@@ -18,7 +18,7 @@
from dask.distributed import Client, LocalCluster
-from queens.schedulers.dask import Dask
+from queens.schedulers._dask import Dask
from queens.utils.config_directories import experiment_directory
from queens.utils.logger_settings import log_init_args
diff --git a/queens/schedulers/pool.py b/queens/schedulers/pool.py
index 2258d05d4..8faf06edc 100644
--- a/queens/schedulers/pool.py
+++ b/queens/schedulers/pool.py
@@ -20,7 +20,7 @@
import numpy as np
from tqdm import tqdm
-from queens.schedulers.scheduler import Scheduler
+from queens.schedulers._scheduler import Scheduler
from queens.utils.config_directories import experiment_directory
from queens.utils.logger_settings import log_init_args
from queens.utils.pool import create_pool
diff --git a/queens/stochastic_optimizers/stochastic_optimizer.py b/queens/stochastic_optimizers/_stochastic_optimizer.py
similarity index 100%
rename from queens/stochastic_optimizers/stochastic_optimizer.py
rename to queens/stochastic_optimizers/_stochastic_optimizer.py
diff --git a/queens/stochastic_optimizers/adam.py b/queens/stochastic_optimizers/adam.py
index 48c98b46b..5646a6d48 100644
--- a/queens/stochastic_optimizers/adam.py
+++ b/queens/stochastic_optimizers/adam.py
@@ -18,7 +18,7 @@
import numpy as np
-from queens.stochastic_optimizers.stochastic_optimizer import StochasticOptimizer
+from queens.stochastic_optimizers._stochastic_optimizer import StochasticOptimizer
from queens.utils.iterative_averaging import ExponentialAveraging
_logger = logging.getLogger(__name__)
diff --git a/queens/stochastic_optimizers/adamax.py b/queens/stochastic_optimizers/adamax.py
index a4a1787bb..7d38ca3de 100644
--- a/queens/stochastic_optimizers/adamax.py
+++ b/queens/stochastic_optimizers/adamax.py
@@ -18,7 +18,7 @@
import numpy as np
-from queens.stochastic_optimizers.stochastic_optimizer import StochasticOptimizer
+from queens.stochastic_optimizers._stochastic_optimizer import StochasticOptimizer
from queens.utils.iterative_averaging import ExponentialAveraging
_logger = logging.getLogger(__name__)
diff --git a/queens/stochastic_optimizers/rms_prop.py b/queens/stochastic_optimizers/rms_prop.py
index e2875e954..6a97b9c60 100644
--- a/queens/stochastic_optimizers/rms_prop.py
+++ b/queens/stochastic_optimizers/rms_prop.py
@@ -18,7 +18,7 @@
import numpy as np
-from queens.stochastic_optimizers.stochastic_optimizer import StochasticOptimizer
+from queens.stochastic_optimizers._stochastic_optimizer import StochasticOptimizer
from queens.utils.iterative_averaging import ExponentialAveraging
_logger = logging.getLogger(__name__)
diff --git a/queens/stochastic_optimizers/sgd.py b/queens/stochastic_optimizers/sgd.py
index 240c33f86..dad6a2345 100644
--- a/queens/stochastic_optimizers/sgd.py
+++ b/queens/stochastic_optimizers/sgd.py
@@ -18,7 +18,7 @@
import numpy as np
-from queens.stochastic_optimizers.stochastic_optimizer import StochasticOptimizer
+from queens.stochastic_optimizers._stochastic_optimizer import StochasticOptimizer
_logger = logging.getLogger(__name__)
diff --git a/queens/utils/cli.py b/queens/utils/cli.py
index ae3cc3076..1483cfcdc 100644
--- a/queens/utils/cli.py
+++ b/queens/utils/cli.py
@@ -23,10 +23,10 @@
from queens.utils.exceptions import CLIError
from queens.utils.injector import inject
from queens.utils.input_to_script import create_script_from_input_file
+from queens.utils.io import print_pickled_data
from queens.utils.logger_settings import reset_logging, setup_cli_logging
from queens.utils.metadata import write_metadata_to_csv
from queens.utils.path import PATH_TO_QUEENS
-from queens.utils.pickle import print_pickled_data
from queens.utils.printing import get_str_table
from queens.utils.run_subprocess import run_subprocess
diff --git a/queens/utils/tensorflow.py b/queens/utils/configure_tensorflow.py
similarity index 100%
rename from queens/utils/tensorflow.py
rename to queens/utils/configure_tensorflow.py
diff --git a/queens/utils/experimental_data_reader.py b/queens/utils/experimental_data_reader.py
index 2a87e793f..fffdd6db9 100644
--- a/queens/utils/experimental_data_reader.py
+++ b/queens/utils/experimental_data_reader.py
@@ -18,7 +18,7 @@
import numpy as np
-from queens.data_processors.csv import Csv
+from queens.data_processors.csv_file import CsvFile
from queens.utils.logger_settings import log_init_args
@@ -61,7 +61,7 @@ def __init__(
self.base_dir = Path(csv_data_base_dir)
if data_processor is None:
- self.data_processor = Csv(
+ self.data_processor = CsvFile(
file_name_identifier=self.file_name,
file_options_dict={
"header_row": 0,
diff --git a/queens/utils/from_config_create.py b/queens/utils/from_config_create.py
index fa9e2a47f..f518dc079 100644
--- a/queens/utils/from_config_create.py
+++ b/queens/utils/from_config_create.py
@@ -20,10 +20,10 @@
from queens.data_processors import VALID_TYPES as VALID_DATA_PROCESSOR_TYPES
from queens.distributions import VALID_TYPES as VALID_DISTRIBUTION_TYPES
from queens.drivers import VALID_TYPES as VALID_DRIVER_TYPES
-from queens.drivers.driver import Driver
+from queens.drivers._driver import Driver
from queens.external_geometries import VALID_TYPES as VALID_EXTERNAL_GEOMETRY_TYPES
from queens.iterators import VALID_TYPES as VALID_ITERATOR_TYPES
-from queens.iterators.iterator import Iterator
+from queens.iterators._iterator import Iterator
from queens.models import VALID_TYPES as VALID_MODEL_TYPES
from queens.models.bmfmc import BMFMC
from queens.parameters.parameters import from_config_create_parameters
diff --git a/queens/utils/gpflow.py b/queens/utils/gpflow_transformations.py
similarity index 76%
rename from queens/utils/gpflow.py
rename to queens/utils/gpflow_transformations.py
index e53e775fb..fb3ac94f6 100644
--- a/queens/utils/gpflow.py
+++ b/queens/utils/gpflow_transformations.py
@@ -16,7 +16,6 @@
from typing import TYPE_CHECKING
-import numpy as np
from sklearn.preprocessing import StandardScaler
# This allows autocomplete in the IDE
@@ -63,25 +62,3 @@ def set_transform_function(data, transform):
name=data.name.split(":")[0],
transform=transform,
)
-
-
-def extract_block_diag(array, block_size):
- """Extract block diagonals of square 2D Array.
-
- Args:
- array (np.ndarray): Square 2D array
- block_size (int): Block size
-
- Returns:
- 3D Array containing block diagonals
- """
- n_blocks = array.shape[0] // block_size
-
- new_shape = (n_blocks, block_size, block_size)
- new_strides = (
- block_size * array.strides[0] + block_size * array.strides[1],
- array.strides[0],
- array.strides[1],
- )
-
- return np.lib.stride_tricks.as_strided(array, new_shape, new_strides)
diff --git a/queens/utils/input_to_script.py b/queens/utils/input_to_script.py
index e7a2729cc..89bc4a031 100644
--- a/queens/utils/input_to_script.py
+++ b/queens/utils/input_to_script.py
@@ -20,12 +20,12 @@
import black
-from queens.distributions.distribution import Continuous
-from queens.drivers.driver import Driver
-from queens.iterators.iterator import Iterator
+from queens.distributions._distribution import Continuous
+from queens.drivers._driver import Driver
+from queens.iterators._iterator import Iterator
from queens.models.bmfmc import BMFMC
-from queens.parameters.random_fields.random_field import RandomField
-from queens.schedulers.scheduler import Scheduler
+from queens.parameters.random_fields._random_field import RandomField
+from queens.schedulers._scheduler import Scheduler
from queens.utils.from_config_create import VALID_TYPES, check_for_reference
from queens.utils.imports import get_module_attribute, get_option
from queens.utils.io import load_input_file
diff --git a/queens/utils/io.py b/queens/utils/io.py
index 680aafb18..04a31e0dc 100644
--- a/queens/utils/io.py
+++ b/queens/utils/io.py
@@ -15,12 +15,13 @@
"""Utils for input/output handling."""
import csv
+import logging
+import pickle
from pathlib import Path
import yaml
from queens.utils.exceptions import FileTypeError
-from queens.utils.pickle import load_pickle
try:
import simplejson as json
@@ -28,6 +29,73 @@
import json
+_logger = logging.getLogger(__name__)
+
+
+def load_pickle(file_path):
+ """Load a pickle file directly from path.
+
+ Args:
+ file_path (Path): Path to pickle-file
+
+ Returns:
+ Data (dict) in the pickle file
+ """
+ if not file_path.is_file():
+ raise FileNotFoundError(f"File {file_path} does not exist.")
+ try:
+ data = pickle.load(file_path.open("rb"))
+ return data
+ except Exception as exception:
+ raise IOError(f"Could not open the pickle file {file_path}") from exception
+
+
+def print_pickled_data(file_path):
+ """Print a table of the data within a pickle file.
+
+ Only goes one layer deep for dicts. This is similar to *python -m pickle file_path* but makes
+ it a single command and fancy prints.
+
+ Args:
+ file_path (Path): Path to pickle-file
+ """
+ data = load_pickle(file_path)
+ _logger.info("\n\npickle file: %s", file_path)
+ for key, item in data.items():
+ item_type = type(item)
+ if isinstance(item, dict):
+ string = ""
+ for subkey, subitem in item.items():
+ string += (
+ _create_single_item_string(subkey, subitem, type(subitem), seperator="-") + "\n"
+ )
+ item = string.replace("\n", "\n ")
+ _logger.info(_create_single_item_string(key, item, item_type))
+ _logger.info(" ")
+
+
+def _create_single_item_string(key, item, item_type, seperator="="):
+ """Create a table for a single item.
+
+ Args:
+ key (str): Key of the item
+ item (obj): Item value for the key
+ item_type (str): Type of the item value
+ seperator (str, optional): Create seperator line (default is "=")
+
+ Returns:
+ string: table for this item.
+ """
+ string = (
+ seperator * 60
+ + f"\nKey: {key}\n"
+ + f"Type: {item_type}\n"
+ + f"Value:\n{item}\n"
+ + seperator * 60
+ )
+ return string
+
+
def load_input_file(input_file_path):
"""Load inputs from file by path.
diff --git a/queens/utils/numpy_array.py b/queens/utils/numpy_array.py
new file mode 100644
index 000000000..f264bec2b
--- /dev/null
+++ b/queens/utils/numpy_array.py
@@ -0,0 +1,73 @@
+#
+# SPDX-License-Identifier: LGPL-3.0-or-later
+# Copyright (c) 2024-2025, QUEENS contributors.
+#
+# This file is part of QUEENS.
+#
+# QUEENS is free software: you can redistribute it and/or modify it under the terms of the GNU
+# Lesser General Public License as published by the Free Software Foundation, either version 3 of
+# the License, or (at your option) any later version. QUEENS is distributed in the hope that it will
+# be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You
+# should have received a copy of the GNU Lesser General Public License along with QUEENS. If not,
+# see .
+#
+"""Numpy array utils."""
+
+import numpy as np
+
+
+def at_least_2d(arr):
+ """View input array as array with at least two dimensions.
+
+ Args:
+ arr (np.ndarray): Input array
+
+ Returns:
+ arr (np.ndarray): View of input array with at least two dimensions
+ """
+ if arr.ndim == 0:
+ return arr.reshape((1, 1))
+ if arr.ndim == 1:
+ return arr[:, np.newaxis]
+ return arr
+
+
+def at_least_3d(arr):
+ """View input array as array with at least three dimensions.
+
+ Args:
+ arr (np.ndarray): Input array
+
+ Returns:
+ arr (np.ndarray): View of input array with at least three dimensions
+ """
+ if arr.ndim == 0:
+ return arr.reshape((1, 1, 1))
+ if arr.ndim == 1:
+ return arr[:, np.newaxis, np.newaxis]
+ if arr.ndim == 2:
+ return arr[:, :, np.newaxis]
+ return arr
+
+
+def extract_block_diag(array, block_size):
+ """Extract block diagonals of square 2D Array.
+
+ Args:
+ array (np.ndarray): Square 2D array
+ block_size (int): Block size
+
+ Returns:
+ 3D Array containing block diagonals
+ """
+ n_blocks = array.shape[0] // block_size
+
+ new_shape = (n_blocks, block_size, block_size)
+ new_strides = (
+ block_size * array.strides[0] + block_size * array.strides[1],
+ array.strides[0],
+ array.strides[1],
+ )
+
+ return np.lib.stride_tricks.as_strided(array, new_shape, new_strides)
diff --git a/queens/utils/numpy.py b/queens/utils/numpy_linalg.py
similarity index 76%
rename from queens/utils/numpy.py
rename to queens/utils/numpy_linalg.py
index 332986941..8ecd849c1 100644
--- a/queens/utils/numpy.py
+++ b/queens/utils/numpy_linalg.py
@@ -12,7 +12,7 @@
# should have received a copy of the GNU Lesser General Public License along with QUEENS. If not,
# see .
#
-"""Numpy array utils."""
+"""Numpy linear algebra utils."""
import logging
@@ -21,40 +21,6 @@
_logger = logging.getLogger(__name__)
-def at_least_2d(arr):
- """View input array as array with at least two dimensions.
-
- Args:
- arr (np.ndarray): Input array
-
- Returns:
- arr (np.ndarray): View of input array with at least two dimensions
- """
- if arr.ndim == 0:
- return arr.reshape((1, 1))
- if arr.ndim == 1:
- return arr[:, np.newaxis]
- return arr
-
-
-def at_least_3d(arr):
- """View input array as array with at least three dimensions.
-
- Args:
- arr (np.ndarray): Input array
-
- Returns:
- arr (np.ndarray): View of input array with at least three dimensions
- """
- if arr.ndim == 0:
- return arr.reshape((1, 1, 1))
- if arr.ndim == 1:
- return arr[:, np.newaxis, np.newaxis]
- if arr.ndim == 2:
- return arr[:, :, np.newaxis]
- return arr
-
-
def safe_cholesky(matrix, jitter_start_value=1e-10):
"""Numerically stable Cholesky decomposition.
diff --git a/queens/utils/pickle.py b/queens/utils/pickle.py
deleted file mode 100644
index 2bac6bff2..000000000
--- a/queens/utils/pickle.py
+++ /dev/null
@@ -1,84 +0,0 @@
-#
-# SPDX-License-Identifier: LGPL-3.0-or-later
-# Copyright (c) 2024-2025, QUEENS contributors.
-#
-# This file is part of QUEENS.
-#
-# QUEENS is free software: you can redistribute it and/or modify it under the terms of the GNU
-# Lesser General Public License as published by the Free Software Foundation, either version 3 of
-# the License, or (at your option) any later version. QUEENS is distributed in the hope that it will
-# be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
-# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You
-# should have received a copy of the GNU Lesser General Public License along with QUEENS. If not,
-# see .
-#
-"""Utils to handle pickle files."""
-
-import logging
-import pickle
-
-_logger = logging.getLogger(__name__)
-
-
-def load_pickle(file_path):
- """Load a pickle file directly from path.
-
- Args:
- file_path (Path): Path to pickle-file
-
- Returns:
- Data (dict) in the pickle file
- """
- if not file_path.is_file():
- raise FileNotFoundError(f"File {file_path} does not exist.")
- try:
- data = pickle.load(file_path.open("rb"))
- return data
- except Exception as exception:
- raise IOError(f"Could not open the pickle file {file_path}") from exception
-
-
-def print_pickled_data(file_path):
- """Print a table of the data within a pickle file.
-
- Only goes one layer deep for dicts. This is similar to *python -m pickle file_path* but makes
- it a single command and fancy prints.
-
- Args:
- file_path (Path): Path to pickle-file
- """
- data = load_pickle(file_path)
- _logger.info("\n\npickle file: %s", file_path)
- for key, item in data.items():
- item_type = type(item)
- if isinstance(item, dict):
- string = ""
- for subkey, subitem in item.items():
- string += (
- _create_single_item_string(subkey, subitem, type(subitem), seperator="-") + "\n"
- )
- item = string.replace("\n", "\n ")
- _logger.info(_create_single_item_string(key, item, item_type))
- _logger.info(" ")
-
-
-def _create_single_item_string(key, item, item_type, seperator="="):
- """Create a table for a single item.
-
- Args:
- key (str): Key of the item
- item (obj): Item value for the key
- item_type (str): Type of the item value
- seperator (str, optional): Create seperator line (default is "=")
-
- Returns:
- string: table for this item.
- """
- string = (
- seperator * 60
- + f"\nKey: {key}\n"
- + f"Type: {item_type}\n"
- + f"Value:\n{item}\n"
- + seperator * 60
- )
- return string
diff --git a/queens/utils/pymc.py b/queens/utils/pymc.py
deleted file mode 100644
index d922c1a03..000000000
--- a/queens/utils/pymc.py
+++ /dev/null
@@ -1,238 +0,0 @@
-#
-# SPDX-License-Identifier: LGPL-3.0-or-later
-# Copyright (c) 2024-2025, QUEENS contributors.
-#
-# This file is part of QUEENS.
-#
-# QUEENS is free software: you can redistribute it and/or modify it under the terms of the GNU
-# Lesser General Public License as published by the Free Software Foundation, either version 3 of
-# the License, or (at your option) any later version. QUEENS is distributed in the hope that it will
-# be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
-# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You
-# should have received a copy of the GNU Lesser General Public License along with QUEENS. If not,
-# see .
-#
-"""Collection of utility functions and classes for PyMC."""
-
-from typing import Union
-
-import numpy as np
-import pymc as pm
-import pytensor.tensor as pt
-from pytensor import Variable
-
-from queens.distributions import beta, exponential, lognormal, mean_field_normal, normal, uniform
-
-
-class PymcDistributionWrapper(pt.Op):
- """Op class for Data conversion.
-
- This PymcDistributionWrapper class is a wrapper for PyMC Distributions in QUEENS.
-
- Attributes:
- logpdf (fun): The log-pdf function
- logpdf_gradients (fun): The function to evaluate the gradient of the log-pdf
- logpdf_grad (obj): Wrapper for the gradient function of the log-pdf
- """
-
- itypes = [pt.dmatrix]
- otypes = [pt.dvector]
-
- def __init__(self, logpdf, logpdf_gradients=None):
- """Initzialise the wrapper for the functions.
-
- Args:
- logpdf (fun): The log-pdf function
- logpdf_gradients (fun): The function to evaluate the gradient of the pdf
- """
- self.logpdf = logpdf
- self.logpdf_gradients = logpdf_gradients
- self.logpdf_grad = PymcGradientWrapper(self.logpdf_gradients)
-
- # pylint: disable-next=unused-argument
- def perform(self, _node, inputs, output_storage, params=None):
- """Call outside pdf function."""
- (sample,) = inputs
-
- value = self.logpdf(sample)
- output_storage[0][0] = np.array(value)
-
- def grad(self, inputs, output_grads):
- """Get gradient and multiply with upstream gradient."""
- (sample,) = inputs
- return [output_grads[0] * self.logpdf_grad(sample)]
-
- def R_op(
- self, inputs: list[Variable], eval_points: Union[Variable, list[Variable]]
- ) -> list[Variable]:
- """Construct a graph for the R-operator.
-
- This method is primarily used by `Rop`.
- For more information, see pymc documentation for the method.
-
- Args:
- inputs (list[Variable]): The input variables for the R operator.
- eval_points (Union[Variable, list[Variable]]): Should have the same length as inputs.
- Each element of `eval_points` specifies
- the value of the corresponding input at
- the point where the R-operator is to be
- evaluated.
-
- Returns:
- list[Variable]
- """
- raise NotImplementedError
-
-
-class PymcGradientWrapper(pt.Op):
- """Op class for Data conversion.
-
- This Class is a wrapper for the gradient of the distributions in QUEENS.
-
- Attributes:
- gradient_func (fun): The function to evaluate the gradient of the pdf
- """
-
- itypes = [pt.dmatrix]
- otypes = [pt.dmatrix]
-
- def __init__(self, gradient_func):
- """Initzialise the wrapper for the functions.
-
- Args:
- gradient_func (fun): The function to evaluate the gradient of the pdf
- """
- self.gradient_func = gradient_func
-
- def perform(self, _node, inputs, output_storage, _params=None):
- """Evaluate the gradient."""
- (sample,) = inputs
- if self.gradient_func is not None:
- grads = self.gradient_func(sample)
- output_storage[0][0] = grads
- else:
- raise TypeError("Gradient function is not callable")
-
- def R_op(
- self, inputs: list[Variable], eval_points: Union[Variable, list[Variable]]
- ) -> list[Variable]:
- """Construct a graph for the R-operator.
-
- This method is primarily used by `Rop`.
- For more information, see pymc documentation for the method.
-
- Args:
- inputs (list[Variable]): The input variables for the R operator.
- eval_points (Union[Variable, list[Variable]]): Should have the same length as inputs.
- Each element of `eval_points` specifies
- the value of the corresponding input at
- the point where the R-operator is to be
- evaluated.
-
- Returns:
- list[Variable]
- """
- raise NotImplementedError
-
-
-def from_config_create_pymc_distribution_dict(parameters, explicit_shape):
- """Get random variables in pymc distribution format.
-
- Args:
- parameters (obj): Parameters object
-
- explicit_shape (int): Explicit shape parameter for distribution dimension
-
- Returns:
- pymc distribution list
- """
- pymc_distribution_list = []
-
- # loop over random_variables and create list
- for name, distribution in zip(parameters.names, parameters.to_distribution_list()):
- pymc_distribution_list.append(
- from_config_create_pymc_distribution(distribution, name, explicit_shape)
- )
- # Pass the distribution list as arguments
- return pymc_distribution_list
-
-
-def from_config_create_pymc_distribution(distribution, name, explicit_shape):
- """Create PyMC distribution object from queens distribution.
-
- Args:
- distribution (obj): Queens distribution object
-
- name (str): name of random variable
- explicit_shape (int): Explicit shape parameter for distribution dimension
-
- Returns:
- random_variable: Random variable, distribution object in pymc format
- """
- shape = (explicit_shape, distribution.dimension)
- if isinstance(distribution, normal.Normal):
- random_variable = pm.MvNormal(
- name,
- mu=distribution.mean,
- cov=distribution.covariance,
- shape=shape,
- )
- elif isinstance(distribution, mean_field_normal.MeanFieldNormal):
- random_variable = pm.Normal(
- name,
- mu=distribution.mean,
- sigma=distribution.covariance,
- shape=shape,
- )
-
- elif isinstance(distribution, uniform.Uniform):
- if np.all(distribution.lower_bound == 0):
- random_variable = pm.Uniform(
- name,
- lower=0,
- upper=distribution.upper_bound,
- shape=shape,
- )
-
- elif np.all(distribution.upper_bound == 0):
- random_variable = pm.Uniform(
- name,
- lower=distribution.lower_bound,
- upper=0,
- shape=shape,
- )
- else:
- random_variable = pm.Uniform(
- name,
- lower=distribution.lower_bound,
- upper=distribution.upper_bound,
- shape=shape,
- )
- elif isinstance(distribution, lognormal.LogNormal):
- if distribution.covariance.size == 1:
- std = distribution.covariance[0, 0] ** (1 / 2)
- else:
- raise NotImplementedError("Only 1D lognormals supported")
-
- random_variable = pm.LogNormal(
- name,
- mu=distribution.mean,
- sigma=std,
- shape=shape,
- )
- elif isinstance(distribution, exponential.Exponential):
- random_variable = pm.Exponential(
- name,
- lam=distribution.rate,
- shape=shape,
- )
- elif isinstance(distribution, beta.Beta):
- random_variable = pm.Beta(
- name,
- alpha=distribution.a,
- beta=distribution.b,
- shape=shape,
- )
- else:
- raise NotImplementedError("Not supported distriubtion by QUEENS and/or PyMC")
- return random_variable
diff --git a/queens/variational_distributions/variational_distribution.py b/queens/variational_distributions/_variational_distribution.py
similarity index 100%
rename from queens/variational_distributions/variational_distribution.py
rename to queens/variational_distributions/_variational_distribution.py
diff --git a/queens/variational_distributions/full_rank_normal.py b/queens/variational_distributions/full_rank_normal.py
index 2c7abd537..e0006df10 100644
--- a/queens/variational_distributions/full_rank_normal.py
+++ b/queens/variational_distributions/full_rank_normal.py
@@ -19,7 +19,7 @@
from numba import njit
from queens.utils.logger_settings import log_init_args
-from queens.variational_distributions.variational_distribution import Variational
+from queens.variational_distributions._variational_distribution import Variational
class FullRankNormal(Variational):
diff --git a/queens/variational_distributions/joint.py b/queens/variational_distributions/joint.py
index 1dd5770b1..7ec2caa51 100644
--- a/queens/variational_distributions/joint.py
+++ b/queens/variational_distributions/joint.py
@@ -17,7 +17,7 @@
import numpy as np
import scipy
-from queens.variational_distributions.variational_distribution import Variational
+from queens.variational_distributions._variational_distribution import Variational
class Joint(Variational):
diff --git a/queens/variational_distributions/mean_field_normal.py b/queens/variational_distributions/mean_field_normal.py
index 743e81eb1..315e88ab7 100644
--- a/queens/variational_distributions/mean_field_normal.py
+++ b/queens/variational_distributions/mean_field_normal.py
@@ -17,7 +17,7 @@
import numpy as np
from queens.utils.logger_settings import log_init_args
-from queens.variational_distributions.variational_distribution import Variational
+from queens.variational_distributions._variational_distribution import Variational
class MeanFieldNormal(Variational):
diff --git a/queens/variational_distributions/mixture_model.py b/queens/variational_distributions/mixture_model.py
index 2568ed77b..75a3174f4 100644
--- a/queens/variational_distributions/mixture_model.py
+++ b/queens/variational_distributions/mixture_model.py
@@ -16,7 +16,7 @@
import numpy as np
-from queens.variational_distributions.variational_distribution import Variational
+from queens.variational_distributions._variational_distribution import Variational
class MixtureModel(Variational):
diff --git a/queens/variational_distributions/particle.py b/queens/variational_distributions/particle.py
index 96b89564a..cb2b89559 100644
--- a/queens/variational_distributions/particle.py
+++ b/queens/variational_distributions/particle.py
@@ -17,7 +17,7 @@
import numpy as np
from queens.distributions.particle import Particle as ParticleDistribution
-from queens.variational_distributions.variational_distribution import Variational
+from queens.variational_distributions._variational_distribution import Variational
class Particle(Variational):
diff --git a/tests/integration_tests/cluster/test_fourc_mc_cluster.py b/tests/integration_tests/cluster/test_fourc_mc_cluster.py
index bf680025d..3deed6049 100644
--- a/tests/integration_tests/cluster/test_fourc_mc_cluster.py
+++ b/tests/integration_tests/cluster/test_fourc_mc_cluster.py
@@ -21,7 +21,7 @@
import pytest
import queens.schedulers.cluster as cluster_scheduler # pylint: disable=consider-using-from-import
-from queens.data_processors.pvd import Pvd
+from queens.data_processors.pvd_file import PvdFile
from queens.distributions.uniform import Uniform
from queens.drivers import Jobscript
from queens.iterators.monte_carlo import MonteCarlo
@@ -127,7 +127,7 @@ def test_fourc_mc_cluster(
parameter_2 = Uniform(lower_bound=0.0, upper_bound=1.0)
parameters = Parameters(parameter_1=parameter_1, parameter_2=parameter_2)
- data_processor = Pvd(
+ data_processor = PvdFile(
field_name="displacement",
file_name_identifier="*.pvd",
file_options_dict={},
diff --git a/tests/integration_tests/fourc/test_fourc_mc.py b/tests/integration_tests/fourc/test_fourc_mc.py
index 3386692d8..ef9529e4c 100644
--- a/tests/integration_tests/fourc/test_fourc_mc.py
+++ b/tests/integration_tests/fourc/test_fourc_mc.py
@@ -18,7 +18,7 @@
import numpy as np
-from queens.data_processors.pvd import Pvd
+from queens.data_processors.pvd_file import PvdFile
from queens.distributions.uniform import Uniform
from queens.drivers.fourc import Fourc
from queens.iterators.monte_carlo import MonteCarlo
@@ -48,7 +48,7 @@ def test_fourc_mc(
parameter_2 = Uniform(lower_bound=0.0, upper_bound=1.0)
parameters = Parameters(parameter_1=parameter_1, parameter_2=parameter_2)
- data_processor = Pvd(
+ data_processor = PvdFile(
field_name="displacement",
file_name_identifier=f"{global_settings.experiment_name}_*.pvd",
file_options_dict={},
diff --git a/tests/integration_tests/fourc/test_fourc_mc_random_field_ensight.py b/tests/integration_tests/fourc/test_fourc_mc_random_field_ensight.py
index 9173cc634..fe4197429 100644
--- a/tests/integration_tests/fourc/test_fourc_mc_random_field_ensight.py
+++ b/tests/integration_tests/fourc/test_fourc_mc_random_field_ensight.py
@@ -19,7 +19,7 @@
import numpy as np
import pytest
-from queens.data_processors.ensight import Ensight
+from queens.data_processors.ensight_file import EnsightFile
from queens.drivers.fourc import Fourc
from queens.external_geometries.fourc_dat import FourcDat
from queens.iterators.monte_carlo import MonteCarlo
@@ -90,7 +90,7 @@ def test_write_random_material_to_dat(
list_geometric_sets=["DSURFACE 1"],
input_template=fourc_input_preprocessed,
)
- data_processor = Ensight(
+ data_processor = EnsightFile(
file_name_identifier="*_structure.case",
file_options_dict={
"delete_field_data": False,
diff --git a/tests/integration_tests/python/test_rpvi_iterator_exe_park91a_hifi_provided_gradient.py b/tests/integration_tests/python/test_rpvi_iterator_exe_park91a_hifi_provided_gradient.py
index cfacb82f0..abf59022c 100644
--- a/tests/integration_tests/python/test_rpvi_iterator_exe_park91a_hifi_provided_gradient.py
+++ b/tests/integration_tests/python/test_rpvi_iterator_exe_park91a_hifi_provided_gradient.py
@@ -17,7 +17,7 @@
import numpy as np
import pytest
-from queens.data_processors import Csv
+from queens.data_processors import CsvFile
from queens.distributions import Normal
from queens.drivers import Mpi
from queens.iterators import RPVI
@@ -94,14 +94,14 @@ def test_rpvi_iterator_exe_park91a_hifi_provided_gradient(
num_jobs=1,
experiment_name=global_settings.experiment_name,
)
- data_processor = Csv(
+ data_processor = CsvFile(
file_name_identifier="*_output.csv",
file_options_dict={
"delete_field_data": False,
"filter": {"type": "entire_file"},
},
)
- gradient_data_processor = Csv(
+ gradient_data_processor = CsvFile(
file_name_identifier="*_gradient.csv",
file_options_dict={
"delete_field_data": False,
@@ -207,7 +207,7 @@ def test_rpvi_iterator_exe_park91a_hifi_finite_differences_gradient(
num_jobs=1,
experiment_name=global_settings.experiment_name,
)
- data_processor = Csv(
+ data_processor = CsvFile(
file_name_identifier="*_output.csv",
file_options_dict={
"delete_field_data": False,
@@ -319,7 +319,7 @@ def test_rpvi_iterator_exe_park91a_hifi_adjoint_gradient(
num_jobs=1,
experiment_name=global_settings.experiment_name,
)
- data_processor = Csv(
+ data_processor = CsvFile(
file_name_identifier="*_output.csv",
file_options_dict={
"delete_field_data": False,
@@ -333,7 +333,7 @@ def test_rpvi_iterator_exe_park91a_hifi_adjoint_gradient(
data_processor=data_processor,
mpi_cmd=mpi_command,
)
- gradient_data_processor = Csv(
+ gradient_data_processor = CsvFile(
file_name_identifier="*_gradient.csv",
file_options_dict={
"delete_field_data": False,
diff --git a/tests/unit_tests/data_processors/test_csv.py b/tests/unit_tests/data_processors/test_csv.py
index 074e4180f..c86142bfe 100644
--- a/tests/unit_tests/data_processors/test_csv.py
+++ b/tests/unit_tests/data_processors/test_csv.py
@@ -20,8 +20,8 @@
import pandas as pd
import pytest
-import queens.data_processors.csv
-from queens.data_processors.csv import Csv
+import queens.data_processors.csv_file
+from queens.data_processors.csv_file import CsvFile
@pytest.fixture(name="dummy_csv_file", scope="session")
@@ -113,10 +113,10 @@ def fixture_default_data_processor(mocker):
}
mocker.patch(
- ("queens.data_processors.csv.Csv.check_valid_filter_options"),
+ ("queens.data_processors.csv_file.CsvFile.check_valid_filter_options"),
return_value=None,
)
- csv_instance = Csv(
+ csv_instance = CsvFile(
file_name_identifier,
file_options_dict,
files_to_be_deleted_regex_lst,
@@ -155,11 +155,11 @@ def test_init(mocker):
}
mp = mocker.patch(
- ("queens.data_processors.csv.Csv.check_valid_filter_options"),
+ ("queens.data_processors.csv_file.CsvFile.check_valid_filter_options"),
return_value=None,
)
- my_data_processor = Csv(
+ my_data_processor = CsvFile(
file_name_identifier,
file_options_dict,
files_to_be_deleted_regex_lst,
@@ -183,55 +183,57 @@ def test_init(mocker):
def test_check_valid_filter_options_entire_file():
"""Test checking of valid filter options."""
- Csv.check_valid_filter_options({"type": "entire_file"})
+ CsvFile.check_valid_filter_options({"type": "entire_file"})
with pytest.raises(
TypeError,
match="For the filter type `entire_file`, you have to provide a dictionary of type "
- f"{Csv.expected_filter_entire_file}.",
+ f"{CsvFile.expected_filter_entire_file}.",
):
- Csv.check_valid_filter_options({"type": "entire_file", "tolerance": 0})
+ CsvFile.check_valid_filter_options({"type": "entire_file", "tolerance": 0})
def test_check_valid_filter_options_by_range():
"""Test checking of valid filter by range options."""
- Csv.check_valid_filter_options({"type": "by_range", "range": [1.0, 2.0], "tolerance": 1.0})
+ CsvFile.check_valid_filter_options({"type": "by_range", "range": [1.0, 2.0], "tolerance": 1.0})
with pytest.raises(
TypeError,
match=re.escape(
"For the filter type `by_range`, you have to provide "
- f"a dictionary of type {Csv.expected_filter_by_range}."
+ f"a dictionary of type {CsvFile.expected_filter_by_range}."
),
):
- Csv.check_valid_filter_options({"type": "by_range", "range": [1.0, 2.0]})
+ CsvFile.check_valid_filter_options({"type": "by_range", "range": [1.0, 2.0]})
def test_check_valid_filter_options_by_row_index():
"""Test checking of valid filter by row index options."""
- Csv.check_valid_filter_options({"type": "by_row_index", "rows": [1, 2]})
+ CsvFile.check_valid_filter_options({"type": "by_row_index", "rows": [1, 2]})
with pytest.raises(
TypeError,
match=re.escape(
"For the filter type `by_row_index`, you have to provide "
- f"a dictionary of type {Csv.expected_filter_by_row_index}."
+ f"a dictionary of type {CsvFile.expected_filter_by_row_index}."
),
):
- Csv.check_valid_filter_options({"type": "by_row_index", "rows": [1, 2], "tolerance": 1.0})
+ CsvFile.check_valid_filter_options(
+ {"type": "by_row_index", "rows": [1, 2], "tolerance": 1.0}
+ )
def test_check_valid_filter_options_by_target_values():
"""Test checking of valid filter by target values."""
- Csv.check_valid_filter_options(
+ CsvFile.check_valid_filter_options(
{"type": "by_target_values", "target_values": [1.0, 2.0, 3.0], "tolerance": 1.0}
)
with pytest.raises(
TypeError,
match=re.escape(
"For the filter type `by_target_values`, you have to provide "
- f"a dictionary of type {Csv.expected_filter_by_target_values}."
+ f"a dictionary of type {CsvFile.expected_filter_by_target_values}."
),
):
- Csv.check_valid_filter_options(
+ CsvFile.check_valid_filter_options(
{"type": "by_target_values", "target_values": [1.0, 2.0, 3.0]}
)
diff --git a/tests/unit_tests/data_processors/test_ensight_interface.py b/tests/unit_tests/data_processors/test_ensight_interface.py
deleted file mode 100644
index 6d2740aef..000000000
--- a/tests/unit_tests/data_processors/test_ensight_interface.py
+++ /dev/null
@@ -1,202 +0,0 @@
-#
-# SPDX-License-Identifier: LGPL-3.0-or-later
-# Copyright (c) 2024-2025, QUEENS contributors.
-#
-# This file is part of QUEENS.
-#
-# QUEENS is free software: you can redistribute it and/or modify it under the terms of the GNU
-# Lesser General Public License as published by the Free Software Foundation, either version 3 of
-# the License, or (at your option) any later version. QUEENS is distributed in the hope that it will
-# be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
-# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You
-# should have received a copy of the GNU Lesser General Public License along with QUEENS. If not,
-# see .
-#
-"""Tests for distance to surface measurement data_processors evaluation."""
-
-import numpy as np
-import pytest
-
-from queens.data_processors.ensight_interface import EnsightInterfaceDiscrepancy
-
-
-############## fixtures
-@pytest.fixture(name="all_dimensions", scope="module", params=["2d", "3d"])
-def fixture_all_dimensions(request):
- """Parameterized fixture to select problem dimension."""
- return request.param
-
-
-@pytest.fixture(name="default_data_processor")
-def fixture_default_data_processor(mocker):
- """Default ensight class for upcoming tests."""
- file_name_identifier = "dummy_prefix*dummyfix"
- file_options_dict = {
- "path_to_ref_data": "dummy_path",
- "time_tol": 1e-03,
- "visualization": False,
- "displacement_fields": ["first_disp", "second_disp"],
- "problem_dimension": "5d",
- }
- file_to_be_deleted_regex_lst = []
-
- mocker.patch(
- "queens.data_processors.ensight_interface.EnsightInterfaceDiscrepancy.read_monitorfile",
- return_value="None",
- )
- pp = EnsightInterfaceDiscrepancy(
- file_name_identifier,
- file_options_dict,
- file_to_be_deleted_regex_lst,
- )
- return pp
-
-
-# --------------- actual tests -------------------------
-
-
-def test_init(mocker):
- """Test the init method."""
- experimental_ref_data = "dummy_data"
- displacement_fields = ["first_disp", "second_disp"]
- time_tol = 1e-03
- visualization_bool = False
- files_to_be_deleted_regex_lst = []
- problem_dim = "5d"
-
- file_name_identifier = "dummy_prefix*dummyfix"
- file_options_dict = {
- "path_to_ref_data": "dummy_path",
- "time_tol": time_tol,
- "visualization": visualization_bool,
- "displacement_fields": displacement_fields,
- "problem_dimension": problem_dim,
- }
-
- mocker.patch(
- "queens.data_processors.ensight_interface.EnsightInterfaceDiscrepancy.read_monitorfile",
- return_value="dummy_data",
- )
- my_data_processor = EnsightInterfaceDiscrepancy(
- file_name_identifier,
- file_options_dict,
- files_to_be_deleted_regex_lst,
- )
-
- assert my_data_processor.time_tol == time_tol
- assert my_data_processor.visualization_bool is visualization_bool
- assert my_data_processor.displacement_fields == displacement_fields
- assert my_data_processor.problem_dimension == problem_dim
- assert my_data_processor.experimental_ref_data_lst == experimental_ref_data
-
- assert my_data_processor.files_to_be_deleted_regex_lst == files_to_be_deleted_regex_lst
- assert my_data_processor.file_options_dict == file_options_dict
- assert my_data_processor.file_name_identifier == file_name_identifier
-
-
-def test_from_config_create_data_processor(mocker):
- """Test the config method."""
- experimental_ref_data = np.array([[1, 2], [3, 4]])
- mp = mocker.patch(
- "queens.data_processors.ensight_interface.EnsightInterfaceDiscrepancy.__init__",
- return_value=None,
- )
-
- mocker.patch(
- "queens.data_processors.ensight_interface.EnsightInterfaceDiscrepancy.read_monitorfile",
- return_value=experimental_ref_data,
- )
- file_name_identifier = "dummyprefix*dummy.case"
- time_tol = 1e-03
- visualization_bool = False
- displacement_fields = ["first_disp", "second_disp"]
- delete_field_data = False
- problem_dimension = "5d"
- path_to_ref_data = "some_path"
- files_to_be_deleted_regex_lst = []
-
- file_options_dict = {
- "time_tol": time_tol,
- "visualization_bool": visualization_bool,
- "displacement_fields": displacement_fields,
- "delete_field_data": delete_field_data,
- "problem_dimension": problem_dimension,
- "path_to_ref_data": path_to_ref_data,
- }
-
- EnsightInterfaceDiscrepancy(
- file_name_identifier=file_name_identifier,
- file_options_dict=file_options_dict,
- files_to_be_deleted_regex_lst=files_to_be_deleted_regex_lst,
- )
- mp.assert_called_once_with(
- file_name_identifier=file_name_identifier,
- file_options_dict=file_options_dict,
- files_to_be_deleted_regex_lst=files_to_be_deleted_regex_lst,
- )
-
-
-def test_read_monitorfile(mocker):
- """Test reading of monitor file."""
- # monitor_string will be used to mock the content of a monitor file that is linked at
- # path_to_ref_data whereas the indentation is compulsory
- monitor_string = """#somecomment
-steps 2 npoints 4
-2 0 1
-2 0 2
-2 1 2
-3 0 1 2
-#comments here and in following lines
-#lines above: #number of dimensions for point pairs #ID of coordinate directions
-# following lines in scheme seperated by arbitrary number of spaces
-# (first time point) x1 y1 x1' y1' x2 y2 x2' y2' x3 y3 x3' y3' x4 y4 x4' y4' x5 y5 x5' y5'
-# (x y) is a location of the interface (x' y') is a point that is associated with
-# the direction in which the distance to the interface is measured
-# the vectors (x y)->(x' y') should point towards the interface
-4.0e+00 1.0 1.0 1.0 1.0 2.0 2.0 2.0 2.0 3.0 3.0 3.0 3.0 1.0 1.0 1.0 1.0 1.0 1.0
-8.0e+00 5.0 5.0 5.0 5.0 6.0 6.0 6.0 6.0 7.0 7.0 7.0 7.0 5.0 5.0 5.0 5.0 5.0 5.0"""
-
- mp = mocker.patch("builtins.open", mocker.mock_open(read_data=monitor_string))
- data = EnsightInterfaceDiscrepancy.read_monitorfile("dummy_path")
- mp.assert_called_once()
-
- assert data == [
- [
- 4.0,
- [
- [[1.0, 1.0, 0], [1.0, 1.0, 0]],
- [[2.0, 0, 2.0], [2.0, 0, 2.0]],
- [[0, 3.0, 3.0], [0, 3.0, 3.0]],
- [[1.0, 1.0, 1.0], [1.0, 1.0, 1.0]],
- ],
- ],
- [
- 8.0,
- [
- [[5.0, 5.0, 0], [5.0, 5.0, 0]],
- [[6.0, 0, 6.0], [6.0, 0, 6.0]],
- [[0, 7.0, 7.0], [0, 7.0, 7.0]],
- [[5.0, 5.0, 5.0], [5.0, 5.0, 5.0]],
- ],
- ],
- ]
-
- monitor_string = """something wrong"""
- mocker.patch("builtins.open", mocker.mock_open(read_data=monitor_string))
- with pytest.raises(ValueError):
- EnsightInterfaceDiscrepancy.read_monitorfile("some_path")
-
-
-def test_stretch_vector(default_data_processor):
- """Test for stretch vector helpre method."""
- assert default_data_processor.stretch_vector([1, 2, 3], [2, 4, 6], 2) == [
- [-1, -2, -3],
- [4, 8, 12],
- ]
-
-
-def test_compute_distance(default_data_processor):
- """Test for distance computation."""
- assert default_data_processor.compute_distance(
- [[2, 4, 6], [1, 2, 3], [3, 6, 9]], [[0, 0, 0], [0.1, 0.2, 0.3]]
- ) == pytest.approx(np.sqrt(14), abs=10e-12)
diff --git a/tests/unit_tests/data_processors/test_npy.py b/tests/unit_tests/data_processors/test_npy.py
index 466a3b8cb..a2e3d6356 100644
--- a/tests/unit_tests/data_processors/test_npy.py
+++ b/tests/unit_tests/data_processors/test_npy.py
@@ -19,7 +19,7 @@
import numpy as np
import pytest
-from queens.data_processors.numpy import Numpy
+from queens.data_processors.numpy_file import NumpyFile
# ------ fixtures ----------
@@ -65,7 +65,7 @@ def fixture_default_data_processor_npy():
file_options_dict = {}
files_to_be_deleted_regex_lst = []
- data_processor = Numpy(
+ data_processor = NumpyFile(
file_name_identifier,
file_options_dict,
files_to_be_deleted_regex_lst,
@@ -81,7 +81,7 @@ def test_init():
file_options_dict = {"dummy": "dummy"}
files_to_be_deleted_regex_lst = ["abc"]
- data_processor = Numpy(
+ data_processor = NumpyFile(
file_name_identifier,
file_options_dict,
files_to_be_deleted_regex_lst,
diff --git a/tests/unit_tests/data_processors/test_txt.py b/tests/unit_tests/data_processors/test_txt.py
index a3fba4529..828731703 100644
--- a/tests/unit_tests/data_processors/test_txt.py
+++ b/tests/unit_tests/data_processors/test_txt.py
@@ -16,7 +16,7 @@
import pytest
-from queens.data_processors.txt import Txt
+from queens.data_processors.txt_file import TxtFile
from queens.utils.path import relative_path_from_queens
@@ -37,7 +37,7 @@ def fixture_default_data_processor():
file_options_dict = {}
- txt_instance = Txt(
+ txt_instance = TxtFile(
file_name_identifier,
file_options_dict,
files_to_be_deleted_regex_lst,
diff --git a/tests/unit_tests/drivers/test_jobscript.py b/tests/unit_tests/drivers/test_jobscript.py
index 5060ed6f0..3c1bb5539 100644
--- a/tests/unit_tests/drivers/test_jobscript.py
+++ b/tests/unit_tests/drivers/test_jobscript.py
@@ -21,7 +21,7 @@
import pytest
import yaml
-from queens.data_processors import Numpy, Txt
+from queens.data_processors import NumpyFile, TxtFile
from queens.distributions import FreeVariable
from queens.drivers.jobscript import JobOptions, Jobscript
from queens.parameters import Parameters
@@ -95,7 +95,7 @@ def fixture_executable(tmp_path):
@pytest.fixture(name="data_processor")
def fixture_data_processor():
"""Dummy data processor."""
- return Numpy(
+ return NumpyFile(
file_name_identifier="dummy.npy",
file_options_dict={},
)
@@ -104,7 +104,7 @@ def fixture_data_processor():
@pytest.fixture(name="gradient_data_processor")
def fixture_gradient_data_processor():
"""Dummy gradient data processor."""
- return Txt(file_name_identifier="dummy.txt", file_options_dict={})
+ return TxtFile(file_name_identifier="dummy.txt", file_options_dict={})
@pytest.fixture(name="jobscript_file_name")
diff --git a/tests/unit_tests/external_geometries/test_fourc_dat.py b/tests/unit_tests/external_geometries/test_fourc_dat.py
index 5db8c3e01..c82d3816b 100644
--- a/tests/unit_tests/external_geometries/test_fourc_dat.py
+++ b/tests/unit_tests/external_geometries/test_fourc_dat.py
@@ -202,7 +202,7 @@ def test_init(mocker, tmp_path):
surface_topology = [{"node_mesh": [], "surface_topology": [], "topology_name": ""}]
volume_topology = [{"node_mesh": [], "volume_topology": [], "topology_name": ""}]
node_coordinates = {"node_mesh": [], "coordinates": []}
- mp = mocker.patch("queens.external_geometries.external_geometry.ExternalGeometry.__init__")
+ mp = mocker.patch("queens.external_geometries._external_geometry.ExternalGeometry.__init__")
path_to_preprocessed_dat_file = tmp_path / "preprocessed"
random_fields = (
diff --git a/tests/unit_tests/iterators/test_grid.py b/tests/unit_tests/iterators/test_grid.py
index 2d89f0031..21688f464 100644
--- a/tests/unit_tests/iterators/test_grid.py
+++ b/tests/unit_tests/iterators/test_grid.py
@@ -137,7 +137,7 @@ def test_init(
"""Test the initialization of the Grid class."""
# some default input for testing
num_parameters = 2
- mp = mocker.patch("queens.iterators.iterator.Iterator.__init__")
+ mp = mocker.patch("queens.iterators._iterator.Iterator.__init__")
Grid.parameters = Mock()
Grid.parameters.num_parameters = num_parameters
diff --git a/tests/unit_tests/models/test_bmfmc.py b/tests/unit_tests/models/test_bmfmc.py
index 173aba1e6..3c4528826 100644
--- a/tests/unit_tests/models/test_bmfmc.py
+++ b/tests/unit_tests/models/test_bmfmc.py
@@ -153,7 +153,7 @@ def test_init(global_settings, mocker, settings_probab_mapping, parameters):
"""Test initialization."""
y_pdf_support = np.linspace(-1, 1, 200)
- mp1 = mocker.patch("queens.models.model.Model.__init__")
+ mp1 = mocker.patch("queens.models._model.Model.__init__")
mp2 = mocker.patch("queens.models.bmfmc.BmfmcInterface.__init__", return_value=None)
approx = "dummy_approx"
model = BMFMC(
diff --git a/tests/unit_tests/models/test_differentiable_fd.py b/tests/unit_tests/models/test_differentiable_fd.py
index 98ec7d5f2..264d0eb9f 100644
--- a/tests/unit_tests/models/test_differentiable_fd.py
+++ b/tests/unit_tests/models/test_differentiable_fd.py
@@ -18,8 +18,8 @@
import pytest
from mock import Mock
+from queens.models._model import Model
from queens.models.finite_difference import FiniteDifference
-from queens.models.model import Model
from queens.utils.valid_options import InvalidOptionError
diff --git a/tests/unit_tests/models/test_model.py b/tests/unit_tests/models/test_model.py
index 1f9a4869b..d4074375b 100644
--- a/tests/unit_tests/models/test_model.py
+++ b/tests/unit_tests/models/test_model.py
@@ -19,7 +19,7 @@
import numpy as np
import pytest
-from queens.models.model import Model
+from queens.models._model import Model
class DummyModel(Model):
diff --git a/tests/unit_tests/utils/test_numpy_utils.py b/tests/unit_tests/utils/test_numpy_utils.py
index b76d99264..89e00e62d 100644
--- a/tests/unit_tests/utils/test_numpy_utils.py
+++ b/tests/unit_tests/utils/test_numpy_utils.py
@@ -17,7 +17,7 @@
import numpy as np
import pytest
-from queens.utils.numpy import at_least_2d, at_least_3d
+from queens.utils.numpy_array import at_least_2d, at_least_3d
@pytest.fixture(name="arr_0d", scope="module")