diff options
author | Daniel Friesel <derf@finalrewind.org> | 2018-03-15 16:54:28 +0100 |
---|---|---|
committer | Daniel Friesel <derf@finalrewind.org> | 2018-03-15 16:54:28 +0100 |
commit | 862945aa0c6b9eaadd06662dd67bf80f3812b2ec (patch) | |
tree | 29ae9321c4769f6fa73e7b8d3a4ca2765a9e933d | |
parent | c00fb33e29548fab04b648b0dcef481f7d6bc58e (diff) |
allow inferred functions to be overridden manually
-rwxr-xr-x | bin/analyze-archive.py | 11 | ||||
-rwxr-xr-x | lib/dfatool.py | 27 |
2 files changed, 33 insertions, 5 deletions
diff --git a/bin/analyze-archive.py b/bin/analyze-archive.py index a9ef3bc..0882883 100755 --- a/bin/analyze-archive.py +++ b/bin/analyze-archive.py @@ -41,10 +41,11 @@ if __name__ == '__main__': ignored_trace_indexes = None discard_outliers = None + function_override = {} try: raw_opts, args = getopt.getopt(sys.argv[1:], "", - 'plot ignored-trace-indexes= discard-outliers='.split(' ')) + 'plot ignored-trace-indexes= discard-outliers= function-override='.split(' ')) for option, parameter in raw_opts: optname = re.sub(r'^--', '', option) @@ -58,6 +59,11 @@ if __name__ == '__main__': if 'discard-outliers' in opts: discard_outliers = float(opts['discard-outliers']) + if 'function-override' in opts: + for function_desc in opts['function-override'].split(';'): + state_or_tran, attribute, *function_str = function_desc.split(' ') + function_override[(state_or_tran, attribute)] = ' '.join(function_str) + except getopt.GetoptError as err: print(err) sys.exit(2) @@ -67,7 +73,8 @@ if __name__ == '__main__': preprocessed_data = raw_data.get_preprocessed_data() model = EnergyModel(preprocessed_data, ignore_trace_indexes = ignored_trace_indexes, - discard_outliers = discard_outliers) + discard_outliers = discard_outliers, + function_override = function_override) print('--- simple static model ---') static_model = model.get_static() diff --git a/lib/dfatool.py b/lib/dfatool.py index e6d4315..152b25b 100755 --- a/lib/dfatool.py +++ b/lib/dfatool.py @@ -6,6 +6,7 @@ import io import json import numpy as np import os +import re from scipy import optimize from gplearn.genetic import SymbolicRegressor from sklearn.metrics import r2_score @@ -45,6 +46,12 @@ def float_or_nan(n): except ValueError: return np.nan +def vprint(verbose, string): + if verbose: + print(string) + +# TODO function override per Argument, z.B. für CC1200 send.duration + def _elem_param_and_arg_list(elem): param_dict = elem['parameter'] paramkeys = sorted(param_dict.keys()) @@ -419,13 +426,14 @@ class ParamFunction: class AnalyticFunction: - def __init__(self, function_str, num_vars, parameters, num_args): + def __init__(self, function_str, num_vars, parameters, num_args, verbose = True): self._parameter_names = parameters self._num_args = num_args self._model_str = function_str rawfunction = function_str self._dependson = [False] * (len(parameters) + num_args) self.fit_success = False + self.verbose = verbose for i in range(len(parameters)): if rawfunction.find('parameter({})'.format(parameters[i])) >= 0: @@ -739,7 +747,7 @@ def _mean_std_by_param(by_param, state_or_tran, key, param_index): class EnergyModel: - def __init__(self, preprocessed_data, ignore_trace_indexes = None, discard_outliers = None): + def __init__(self, preprocessed_data, ignore_trace_indexes = None, discard_outliers = None, function_override = {}): self.traces = preprocessed_data self.by_name = {} self.by_param = {} @@ -749,6 +757,7 @@ class EnergyModel: self._parameter_names = sorted(self.traces[0]['trace'][0]['parameter'].keys()) self._num_args = {} self._outlier_threshold = discard_outliers + self.function_override = function_override if discard_outliers != None: self._compute_outlier_stats(ignore_trace_indexes, discard_outliers) for run in self.traces: @@ -977,7 +986,19 @@ class EnergyModel: else: fit_results[result['key'][2]] = fit_result - if len(fit_results.keys()): + if (state_or_tran, model_attribute) in self.function_override: + function_str = self.function_override[(state_or_tran, model_attribute)] + var_re = re.compile(r'regression_arg\(([0-9]*)\)') + var_count = max(map(int, var_re.findall(function_str))) + 1 + x = AnalyticFunction(function_str, + var_count, self._parameter_names, num_args) + x.fit(self.by_param, state_or_tran, model_attribute) + if x.fit_success: + param_model[state_or_tran][model_attribute] = { + 'fit_result': fit_results, + 'function' : x + } + elif len(fit_results.keys()): x = analytic.function_powerset(fit_results, self._parameter_names, num_args) x.fit(self.by_param, state_or_tran, model_attribute) if x.fit_success: |