Skip to content

Commit d843a3c

Browse files
authored
Merge pull request #191 from MilesCranmer/backend-update
Update backend: `x^y` instead of `abs(x)^y`, and high-precision constants
2 parents 1e3fd5e + 1f3aace commit d843a3c

File tree

3 files changed

+31
-4
lines changed

3 files changed

+31
-4
lines changed

pysr/sr.py

+7-2
Original file line numberDiff line numberDiff line change
@@ -41,13 +41,14 @@
4141
sympy_mappings = {
4242
"div": lambda x, y: x / y,
4343
"mult": lambda x, y: x * y,
44+
"sqrt": lambda x: sympy.sqrt(x),
4445
"sqrt_abs": lambda x: sympy.sqrt(abs(x)),
4546
"square": lambda x: x**2,
4647
"cube": lambda x: x**3,
4748
"plus": lambda x, y: x + y,
4849
"sub": lambda x, y: x - y,
4950
"neg": lambda x: -x,
50-
"pow": lambda x, y: sympy.Function("unimplemented_pow")(x, y),
51+
"pow": lambda x, y: x**y,
5152
"pow_abs": lambda x, y: abs(x) ** y,
5253
"cos": sympy.cos,
5354
"sin": sympy.sin,
@@ -59,7 +60,7 @@
5960
"acos": sympy.acos,
6061
"asin": sympy.asin,
6162
"atan": sympy.atan,
62-
"acosh": lambda x: sympy.acosh(abs(x) + 1),
63+
"acosh": lambda x: sympy.acosh(x),
6364
"acosh_abs": lambda x: sympy.acosh(abs(x) + 1),
6465
"asinh": sympy.asinh,
6566
"atanh": lambda x: sympy.atanh(sympy.Mod(x + 1, 2) - 1),
@@ -68,6 +69,10 @@
6869
"mod": sympy.Mod,
6970
"erf": sympy.erf,
7071
"erfc": sympy.erfc,
72+
"log": lambda x: sympy.log(x),
73+
"log10": lambda x: sympy.log(x, 10),
74+
"log2": lambda x: sympy.log(x, 2),
75+
"log1p": lambda x: sympy.log(x + 1),
7176
"log_abs": lambda x: sympy.log(abs(x)),
7277
"log10_abs": lambda x: sympy.log(abs(x), 10),
7378
"log2_abs": lambda x: sympy.log(abs(x), 2),

pysr/version.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,2 @@
1-
__version__ = "0.10.4-1"
2-
__symbolic_regression_jl_version__ = "0.10.1"
1+
__version__ = "0.11.0"
2+
__symbolic_regression_jl_version__ = "0.12.0"

test/test.py

+22
Original file line numberDiff line numberDiff line change
@@ -82,6 +82,22 @@ def test_multiprocessing(self):
8282
print(model.equations_)
8383
self.assertLessEqual(model.equations_.iloc[-1]["loss"], 1e-4)
8484

85+
def test_high_precision_search(self):
86+
y = 1.23456789 * self.X[:, 0]
87+
model = PySRRegressor(
88+
**self.default_test_kwargs,
89+
early_stop_condition="stop_if(loss, complexity) = loss < 1e-4 && complexity == 3",
90+
precision=64,
91+
parsimony=0.01,
92+
warm_start=True,
93+
)
94+
model.fit(self.X, y)
95+
from pysr.sr import Main
96+
97+
# We should have that the model state is now a Float64 hof:
98+
Main.test_state = model.raw_julia_state_
99+
self.assertTrue(Main.eval("typeof(test_state[2]).parameters[1] == Float64"))
100+
85101
def test_multioutput_custom_operator_quiet_custom_complexity(self):
86102
y = self.X[:, [0, 1]] ** 2
87103
model = PySRRegressor(
@@ -182,6 +198,12 @@ def test_empty_operators_single_input_warm_start(self):
182198
warm_start=True,
183199
early_stop_condition=None,
184200
)
201+
# Check that the the julia state is saved:
202+
from pysr.sr import Main
203+
204+
# We should have that the model state is now a Float32 hof:
205+
Main.test_state = regressor.raw_julia_state_
206+
self.assertTrue(Main.eval("typeof(test_state[2]).parameters[1] == Float32"))
185207
# This should exit almost immediately, and use the old equations
186208
regressor.fit(X, y)
187209

0 commit comments

Comments
 (0)