summaryrefslogtreecommitdiff
path: root/test/test_parameters.py
diff options
context:
space:
mode:
Diffstat (limited to 'test/test_parameters.py')
-rwxr-xr-xtest/test_parameters.py228
1 files changed, 228 insertions, 0 deletions
diff --git a/test/test_parameters.py b/test/test_parameters.py
new file mode 100755
index 0000000..e36b1a1
--- /dev/null
+++ b/test/test_parameters.py
@@ -0,0 +1,228 @@
+#!/usr/bin/env python3
+
+from dfatool import parameters
+from dfatool.utils import by_name_to_by_param
+from dfatool.functions import analytic
+from dfatool.model import ParallelParamFit
+import unittest
+
+import numpy as np
+
+
+class TestModels(unittest.TestCase):
+ def test_distinct_param_values(self):
+ X = np.arange(35)
+ by_name = {
+ "TX": {
+ "param": [(x % 5, x % 7) for x in X],
+ "power": X,
+ "attributes": ["power"],
+ }
+ }
+ self.assertEqual(
+ parameters.distinct_param_values(by_name, "TX"),
+ [list(range(5)), list(range(7))],
+ )
+
+ def test_parameter_detection_linear(self):
+ # rng = np.random.default_rng(seed=1312) # requiresy NumPy >= 1.17
+ np.random.seed(1312)
+ X = np.arange(200) % 50
+ # Y = X + rng.normal(size=X.size) # requiry NumPy >= 1.17
+ Y = X + np.random.normal(size=X.size)
+ parameter_names = ["p_mod5", "p_linear"]
+
+ # Test input data:
+ # * param[0] ("p_mod5") == X % 5 (bogus data to test detection of non-influence)
+ # * param[1] ("p_linear") == X
+ # * TX power == X ± gaussian noise
+ # -> TX power depends linearly on "p_linear"
+ by_name = {
+ "TX": {
+ "param": [(x % 5, x) for x in X],
+ "power": Y,
+ "attributes": ["power"],
+ }
+ }
+ by_param = by_name_to_by_param(by_name)
+ stats = parameters.ParamStats(by_name, by_param, parameter_names, dict())
+
+ self.assertEqual(stats.depends_on_param("TX", "power", "p_mod5"), False)
+ self.assertEqual(stats.depends_on_param("TX", "power", "p_linear"), True)
+
+ # Fit individual functions for each parameter (only "p_linear" in this case)
+
+ paramfit = ParallelParamFit(by_param)
+ paramfit.enqueue("TX", "power", 1, "p_linear")
+ paramfit.fit()
+
+ fit_result = paramfit.get_result("TX", "power")
+ self.assertEqual(fit_result["p_linear"]["best"], "linear")
+ self.assertEqual("p_mod5" not in fit_result, True)
+
+ # Fit a single function for all parameters (still only "p_linear" in this case)
+
+ combined_fit = analytic.function_powerset(fit_result, parameter_names, 0)
+
+ self.assertEqual(
+ combined_fit.model_function,
+ "0 + regression_arg(0) + regression_arg(1) * parameter(p_linear)",
+ )
+ self.assertEqual(
+ combined_fit._function_str,
+ "0 + reg_param[0] + reg_param[1] * model_param[1]",
+ )
+
+ combined_fit.fit(by_param, "TX", "power")
+
+ self.assertEqual(combined_fit.fit_success, True)
+
+ self.assertEqual(combined_fit.is_predictable([None, None]), False)
+ self.assertEqual(combined_fit.is_predictable([None, 0]), True)
+ self.assertEqual(combined_fit.is_predictable([None, 50]), True)
+ self.assertEqual(combined_fit.is_predictable([0, None]), False)
+ self.assertEqual(combined_fit.is_predictable([50, None]), False)
+ self.assertEqual(combined_fit.is_predictable([0, 0]), True)
+ self.assertEqual(combined_fit.is_predictable([0, 50]), True)
+ self.assertEqual(combined_fit.is_predictable([50, 0]), True)
+ self.assertEqual(combined_fit.is_predictable([50, 50]), True)
+
+ # The function should be linear without offset or skew
+ for i in range(100):
+ self.assertAlmostEqual(combined_fit.eval([None, i]), i, places=0)
+
+ def test_parameter_detection_multi_dimensional(self):
+ # rng = np.random.default_rng(seed=1312) # requires NumPy >= 1.17
+ np.random.seed(1312)
+ # vary each parameter from 1 to 10
+ Xi = (np.arange(50) % 10) + 1
+ # Three parameters -> Build input array [[1, 1, 1], [1, 1, 2], ..., [10, 10, 10]]
+ X = np.array(np.meshgrid(Xi, Xi, Xi)).T.reshape(-1, 3)
+
+ f_lls = np.vectorize(
+ lambda x: 42 + 7 * x[0] + 10 * np.log(x[1]) - 0.5 * x[2] * x[2],
+ signature="(n)->()",
+ )
+ f_ll = np.vectorize(
+ lambda x: 23 + 5 * x[0] - 3 * x[0] / x[1], signature="(n)->()"
+ )
+
+ # Y_lls = f_lls(X) + rng.normal(size=X.shape[0]) # requires NumPy >= 1.17
+ # Y_ll = f_ll(X) + rng.normal(size=X.shape[0]) # requires NumPy >= 1.17
+ Y_lls = f_lls(X) + np.random.normal(size=X.shape[0])
+ Y_ll = f_ll(X) + np.random.normal(size=X.shape[0])
+
+ parameter_names = ["lin_lin", "log_inv", "square_none"]
+
+ by_name = {
+ "someKey": {
+ "param": X,
+ "lls": Y_lls,
+ "ll": Y_ll,
+ "attributes": ["lls", "ll"],
+ }
+ }
+ by_param = by_name_to_by_param(by_name)
+ stats = parameters.ParamStats(by_name, by_param, parameter_names, dict())
+
+ self.assertEqual(stats.depends_on_param("someKey", "lls", "lin_lin"), True)
+ self.assertEqual(stats.depends_on_param("someKey", "lls", "log_inv"), True)
+ self.assertEqual(stats.depends_on_param("someKey", "lls", "square_none"), True)
+
+ self.assertEqual(stats.depends_on_param("someKey", "ll", "lin_lin"), True)
+ self.assertEqual(stats.depends_on_param("someKey", "ll", "log_inv"), True)
+ self.assertEqual(stats.depends_on_param("someKey", "ll", "square_none"), False)
+
+ paramfit = ParallelParamFit(by_param)
+ paramfit.enqueue("someKey", "lls", 0, "lin_lin")
+ paramfit.enqueue("someKey", "lls", 1, "log_inv")
+ paramfit.enqueue("someKey", "lls", 2, "square_none")
+ paramfit.enqueue("someKey", "ll", 0, "lin_lin")
+ paramfit.enqueue("someKey", "ll", 1, "log_inv")
+ paramfit.fit()
+
+ fit_lls = paramfit.get_result("someKey", "lls")
+ self.assertEqual(fit_lls["lin_lin"]["best"], "linear")
+ self.assertEqual(fit_lls["log_inv"]["best"], "logarithmic")
+ self.assertEqual(fit_lls["square_none"]["best"], "square")
+
+ combined_fit_lls = analytic.function_powerset(fit_lls, parameter_names, 0)
+
+ self.assertEqual(
+ combined_fit_lls.model_function,
+ "0 + regression_arg(0) + regression_arg(1) * parameter(lin_lin)"
+ " + regression_arg(2) * np.log(parameter(log_inv))"
+ " + regression_arg(3) * (parameter(square_none))**2"
+ " + regression_arg(4) * parameter(lin_lin) * np.log(parameter(log_inv))"
+ " + regression_arg(5) * parameter(lin_lin) * (parameter(square_none))**2"
+ " + regression_arg(6) * np.log(parameter(log_inv)) * (parameter(square_none))**2"
+ " + regression_arg(7) * parameter(lin_lin) * np.log(parameter(log_inv)) * (parameter(square_none))**2",
+ )
+
+ combined_fit_lls.fit(by_param, "someKey", "lls")
+
+ self.assertEqual(combined_fit_lls.fit_success, True)
+
+ # Verify that f_lls parameters have been found
+ self.assertAlmostEqual(combined_fit_lls.model_args[0], 42, places=0)
+ self.assertAlmostEqual(combined_fit_lls.model_args[1], 7, places=0)
+ self.assertAlmostEqual(combined_fit_lls.model_args[2], 10, places=0)
+ self.assertAlmostEqual(combined_fit_lls.model_args[3], -0.5, places=1)
+ self.assertAlmostEqual(combined_fit_lls.model_args[4], 0, places=2)
+ self.assertAlmostEqual(combined_fit_lls.model_args[5], 0, places=2)
+ self.assertAlmostEqual(combined_fit_lls.model_args[6], 0, places=2)
+ self.assertAlmostEqual(combined_fit_lls.model_args[7], 0, places=2)
+
+ self.assertEqual(combined_fit_lls.is_predictable([None, None, None]), False)
+ self.assertEqual(combined_fit_lls.is_predictable([None, None, 11]), False)
+ self.assertEqual(combined_fit_lls.is_predictable([None, 11, None]), False)
+ self.assertEqual(combined_fit_lls.is_predictable([None, 11, 11]), False)
+ self.assertEqual(combined_fit_lls.is_predictable([11, None, None]), False)
+ self.assertEqual(combined_fit_lls.is_predictable([11, None, 11]), False)
+ self.assertEqual(combined_fit_lls.is_predictable([11, 11, None]), False)
+ self.assertEqual(combined_fit_lls.is_predictable([11, 11, 11]), True)
+
+ # Verify that fitted function behaves like input function
+ for i, x in enumerate(X):
+ self.assertAlmostEqual(combined_fit_lls.eval(x), f_lls(x), places=0)
+
+ fit_ll = paramfit.get_result("someKey", "ll")
+ self.assertEqual(fit_ll["lin_lin"]["best"], "linear")
+ self.assertEqual(fit_ll["log_inv"]["best"], "inverse")
+ self.assertEqual("quare_none" not in fit_ll, True)
+
+ combined_fit_ll = analytic.function_powerset(fit_ll, parameter_names, 0)
+
+ self.assertEqual(
+ combined_fit_ll.model_function,
+ "0 + regression_arg(0) + regression_arg(1) * parameter(lin_lin)"
+ " + regression_arg(2) * 1/(parameter(log_inv))"
+ " + regression_arg(3) * parameter(lin_lin) * 1/(parameter(log_inv))",
+ )
+
+ combined_fit_ll.fit(by_param, "someKey", "ll")
+
+ self.assertEqual(combined_fit_ll.fit_success, True)
+
+ # Verify that f_ll parameters have been found
+ self.assertAlmostEqual(combined_fit_ll.model_args[0], 23, places=0)
+ self.assertAlmostEqual(combined_fit_ll.model_args[1], 5, places=0)
+ self.assertAlmostEqual(combined_fit_ll.model_args[2], 0, places=1)
+ self.assertAlmostEqual(combined_fit_ll.model_args[3], -3, places=0)
+
+ self.assertEqual(combined_fit_ll.is_predictable([None, None, None]), False)
+ self.assertEqual(combined_fit_ll.is_predictable([None, None, 11]), False)
+ self.assertEqual(combined_fit_ll.is_predictable([None, 11, None]), False)
+ self.assertEqual(combined_fit_ll.is_predictable([None, 11, 11]), False)
+ self.assertEqual(combined_fit_ll.is_predictable([11, None, None]), False)
+ self.assertEqual(combined_fit_ll.is_predictable([11, None, 11]), False)
+ self.assertEqual(combined_fit_ll.is_predictable([11, 11, None]), True)
+ self.assertEqual(combined_fit_ll.is_predictable([11, 11, 11]), True)
+
+ # Verify that fitted function behaves like input function
+ for i, x in enumerate(X):
+ self.assertAlmostEqual(combined_fit_ll.eval(x), f_ll(x), places=0)
+
+
+if __name__ == "__main__":
+ unittest.main()