summaryrefslogtreecommitdiff
path: root/lib/functions.py
diff options
context:
space:
mode:
Diffstat (limited to 'lib/functions.py')
-rw-r--r--lib/functions.py177
1 files changed, 175 insertions, 2 deletions
diff --git a/lib/functions.py b/lib/functions.py
index fd9063f..cc97f9a 100644
--- a/lib/functions.py
+++ b/lib/functions.py
@@ -1,3 +1,9 @@
+"""
+Utilities for analytic description of parameter-dependent model attributes.
+
+This module provides classes and helper functions useful for least-squares
+regression and general handling of model functions.
+"""
from itertools import chain, combinations
import numpy as np
import re
@@ -7,28 +13,110 @@ from utils import is_numeric
arg_support_enabled = True
def powerset(iterable):
+ """
+ Calculate powerset of given items.
+
+ Returns an iterable containing one tuple for each powerset element.
+
+ Example: powerset([1, 2]) -> [(), (1), (2), (1, 2)]
+ """
s = list(iterable)
return chain.from_iterable(combinations(s, r) for r in range(len(s)+1))
class ParamFunction:
+ """
+ A one-dimensional model function, ready for least squares optimization and similar.
+
+ Supports validity checks (e.g. if it is undefined for x <= 0) and an
+ error measure.
+ """
def __init__(self, param_function, validation_function, num_vars):
+ """
+ Create function object suitable for regression analysis.
+
+ This documentation assumes that 1-dimensional functions
+ (-> single float as model input) are used. However, n-dimensional
+ functions (-> list of float as model input) are also supported.
+
+ arguments:
+ param_function -- regression function. Must have the signature
+ (reg_param, model_param) -> float.
+ reg_param is a list of regression variable values,
+ model_param is the model input value (float).
+ Example: lambda rp, mp: rp[0] + rp[1] * mp
+ validation_function -- function used to check whether param_function
+ is defined for a given model_param. Signature:
+ model_param -> bool
+ Example: lambda mp: mp > 0
+ num_vars -- How many regression variables are used by this function,
+ i.e., the length of param_function's reg_param argument.
+ """
self._param_function = param_function
self._validation_function = validation_function
self._num_variables = num_vars
def is_valid(self, arg):
+ """
+ Check whether the regression function is defined for the given argument.
+
+ Returns bool.
+ """
return self._validation_function(arg)
def eval(self, param, args):
+ """
+ Evaluate regression function.
+
+ arguments:
+ param -- regression variable values (list of float)
+ arg -- model input (float)
+ """
return self._param_function(param, args)
def error_function(self, P, X, y):
+ """
+ Calculate model error.
+
+ arguments:
+ P -- optimized regression variables (list of float)
+ X -- model input (float)
+ y -- expected output from ground truth (float)
+
+ Returns deviation between model and ground truth (float).
+ """
return self._param_function(P, X) - y
class AnalyticFunction:
+ """
+ A multi-dimensional model function, generated from a string, which can be optimized using regression.
+
+ The function describes a single model attribute (e.g. TX duration or send(...) energy)
+ and how it is influenced by model parameters such as configured bit rate or
+ packet length.
+ """
def __init__(self, function_str, parameters, num_args, verbose = True, regression_args = None):
+ """
+ Create a new AnalyticFunction object from a function string.
+
+ arguments:
+ function_str -- the function.
+ Refer to regression variables using regression_arg(123),
+ to parameters using parameter(name),
+ and to function arguments (if any) using function_arg(123).
+ Example: "regression_arg(0) + regression_arg(1) * parameter(txbytes)"
+ parameters -- list containing the names of all model parameters,
+ including those not used in function_str, sorted lexically.
+ Sorting is mandatory, as parameter indexes (and not names) are used internally.
+ num_args -- number of local function arguments, if any. Set to 0 if
+ the model attribute does not belong to a function or if function
+ arguments are not included in the model.
+ verbose -- complain about odd events
+ regression_args -- Initial regression variable values,
+ both for function usage and least squares optimization.
+ If unset, defaults to [1, 1, 1, ...]
+ """
self._parameter_names = parameters
self._num_args = num_args
self._model_str = function_str
@@ -65,6 +153,23 @@ class AnalyticFunction:
self._regression_args = []
def get_fit_data(self, by_param, state_or_tran, model_attribute):
+ """
+ Return training data suitable for scipy.optimize.least_squares.
+
+ arguments:
+ by_param -- measurement data, partitioned by state/transition name and parameter/arg values
+ state_or_tran -- state or transition name, e.g. "TX" or "send"
+ model_attribute -- model attribute name, e.g. "power" or "duration"
+
+ returns (X, Y, num_valid, num_total):
+ X -- 2-D NumPy array of parameter combinations (model input).
+ First dimension is the parameter/argument index, the second
+ dimension contains its values.
+ Example: X[0] contains the first parameter's values.
+ Y -- 1-D NumPy array of training data (desired model output).
+ num_valid -- amount of distinct parameter values suitable for optimization
+ num_total -- total amount of distinct parameter values
+ """
dimension = len(self._parameter_names) + self._num_args
X = [[] for i in range(dimension)]
Y = []
@@ -95,6 +200,14 @@ class AnalyticFunction:
return X, Y, num_valid, num_total
def fit(self, by_param, state_or_tran, model_attribute):
+ """
+ Fit the function on measurements via least squares regression.
+
+ arguments:
+ by_param -- measurement data, partitioned by state/transition name and parameter/arg values
+ state_or_tran -- state or transition name, e.g. "TX" or "send"
+ model_attribute -- model attribute name, e.g. "power" or "duration"
+ """
X, Y, num_valid, num_total = self.get_fit_data(by_param, state_or_tran, model_attribute)
if num_valid > 2:
error_function = lambda P, X, y: self._function(P, X) - y
@@ -112,17 +225,42 @@ class AnalyticFunction:
vprint(self.verbose, '[W] Insufficient amount of valid parameter keys, cannot fit {}/{}'.format(state_or_tran, model_attribute))
def is_predictable(self, param_list):
+ """
+ Return whether the model function can be evaluated on the given parameter values.
+
+ The first value corresponds to the lexically first model parameter, etc.
+ All parameters must be set, not just the ones this function depends on.
+
+ Returns False iff a parameter the function depends on is not numeric
+ (e.g. None).
+ """
for i, param in enumerate(param_list):
if self._dependson[i] and not is_numeric(param):
return False
return True
def eval(self, param_list, arg_list = []):
+ """
+ Evaluate model function with specified param/arg values.
+
+ arguments:
+ param_list -- parameter values (list of float). First item
+ corresponds to lexically first parameter, etc.
+ arg_list -- argument values (list of float), if arguments are used.
+ """
if len(self._regression_args) == 0:
return self._function(param_list, arg_list)
return self._function(self._regression_args, param_list)
class analytic:
+ """
+ Utilities for analytic description of parameter-dependent model attributes and regression analysis.
+
+ provided functions:
+ functions -- retrieve pre-defined set of regression function candidates
+ function_powerset -- combine several per-parameter functions into a single AnalyticFunction
+ """
+
_num0_8 = np.vectorize(lambda x: 8 - bin(int(x)).count("1"))
_num0_16 = np.vectorize(lambda x: 16 - bin(int(x)).count("1"))
_num1 = np.vectorize(lambda x: bin(int(x)).count("1"))
@@ -147,6 +285,21 @@ class analytic:
}
def functions(safe_functions_enabled = False):
+ """
+ Retrieve pre-defined set of regression function candidates.
+
+ Returns a dict of functions which are typical for energy/timing
+ behaviour of embedded hardware, e.g. linear, exponential or inverse
+ dependency on a configuration setting/runtime variable.
+
+ arguments:
+ safe_functions_enabled -- Include "safe" variants of functions with
+ limited argument range, e.g. a safe
+ inverse which returns 1 when dividing by 0.
+
+ Each function is a ParamFunction object. In most cases, two regression
+ variables are expected.
+ """
functions = {
'linear' : ParamFunction(
lambda reg_param, model_param: reg_param[0] + reg_param[1] * model_param,
@@ -221,6 +374,7 @@ class analytic:
return functions
def _fmap(reference_type, reference_name, function_type):
+ """Map arg/parameter name and best-fit function name to function text suitable for AnalyticFunction."""
ref_str = '{}({})'.format(reference_type,reference_name)
if function_type == 'linear':
return ref_str
@@ -240,10 +394,29 @@ class analytic:
return 'np.sqrt({})'.format(ref_str)
return 'analytic._{}({})'.format(function_type, ref_str)
- def function_powerset(function_descriptions, parameter_names, num_args):
+ def function_powerset(fit_results, parameter_names, num_args):
+ """
+ Combine per-parameter regression results into a single multi-dimensional function.
+
+ arguments:
+ fit_results -- results dict. One element per parameter, each containing
+ a dict of the form {'best' : name of function with best fit}.
+ Must not include parameters which do not influence the model attribute.
+ Example: {'txpower' : {'best': 'exponential'}}
+ parameter_names -- Parameter names, including those left
+ out in fit_results because they do not influence the model attribute.
+ Must be sorted lexically.
+ Example: ['bitrate', 'txpower']
+ num_args -- number of local function arguments, if any. Set to 0 if
+ the model attribute does not belong to a function or if function
+ arguments are not included in the model.
+
+ Returns an AnalyticFunction instantce corresponding to the combined
+ function.
+ """
buf = '0'
arg_idx = 0
- for combination in powerset(function_descriptions.items()):
+ for combination in powerset(fit_results.items()):
buf += ' + regression_arg({:d})'.format(arg_idx)
arg_idx += 1
for function_item in combination: