diff options
author | Birte Kristina Friesel <birte.friesel@uos.de> | 2024-03-11 14:08:21 +0100 |
---|---|---|
committer | Birte Kristina Friesel <birte.friesel@uos.de> | 2024-03-11 14:08:21 +0100 |
commit | 1e29075c9f625947a82c86d7304ed20eb6f06e64 (patch) | |
tree | 789b3fe8b253c0069b97d5c47e625513ee37a32c /lib/loader | |
parent | 3a6089bc4258e884b5ce909538b2f49a3dc87e78 (diff) |
Move Logfile and CSVfile from utils to a new loader class
Diffstat (limited to 'lib/loader')
-rw-r--r-- | lib/loader/__init__.py | 13 | ||||
-rw-r--r-- | lib/loader/plain.py | 97 |
2 files changed, 104 insertions, 6 deletions
diff --git a/lib/loader/__init__.py b/lib/loader/__init__.py index caa2212..9dc83e3 100644 --- a/lib/loader/__init__.py +++ b/lib/loader/__init__.py @@ -20,6 +20,7 @@ from .energytrace import ( EnergyTraceWithTimer, ) from .keysight import DLog, KeysightCSV +from .plain import Logfile, CSVfile from .mimosa import MIMOSA logger = logging.getLogger(__name__) @@ -667,9 +668,9 @@ class RawData: "offline_aggregates", None ) if offline_aggregates: - state_or_transition[ - "online_aggregates" - ] = offline_aggregates + state_or_transition["online_aggregates"] = ( + offline_aggregates + ) for j, traces in enumerate(ptalog["traces"]): self.filenames.append("{}#{}".format(filename, j)) @@ -736,9 +737,9 @@ class RawData: "offline_aggregates", None ) if offline_aggregates: - state_or_transition[ - "online_aggregates" - ] = offline_aggregates + state_or_transition["online_aggregates"] = ( + offline_aggregates + ) for j, traces in enumerate(ptalog["traces"]): self.filenames.append("{}#{}".format(filename, j)) self.traces_by_fileno.append(traces) diff --git a/lib/loader/plain.py b/lib/loader/plain.py new file mode 100644 index 0000000..5aa4293 --- /dev/null +++ b/lib/loader/plain.py @@ -0,0 +1,97 @@ +#!/usr/bin/env python3 + +from ..utils import soft_cast_int, soft_cast_float +import re + + +class CSVfile: + def __init__(self): + pass + + def load(self, f): + observations = list() + for lineno, line in enumerate(f): + if lineno == 0: + param_names = line.split(",")[1:-1] + attr_names = line.removesuffix("\n").split(",")[-1:] + else: + param_values = list(map(soft_cast_int, line.split(",")[1:-1])) + attr_values = list( + map(soft_cast_float, line.removesuffix("\n").split(",")[-1:]) + ) + observations.append( + { + "name": "CSVFile", + "param": dict(zip(param_names, param_values)), + "attribute": dict(zip(attr_names, attr_values)), + } + ) + return observations + + +class Logfile: + def __init__(self): + pass + + def kv_to_param(self, kv_str, cast): + try: + key, value = kv_str.split("=") + value = cast(value) + return key, value + except ValueError: + logger.warning(f"Invalid key-value pair: {kv_str}") + raise + + def kv_to_param_f(self, kv_str): + return self.kv_to_param(kv_str, soft_cast_float) + + def kv_to_param_i(self, kv_str): + return self.kv_to_param(kv_str, soft_cast_int) + + def load(self, f): + observations = list() + for lineno, line in enumerate(f): + m = re.search(r"\[::\] *([^|]*?) *[|] *([^|]*?) *[|] *(.*)", line) + if m: + name_str = m.group(1) + param_str = m.group(2) + attr_str = m.group(3) + try: + param = dict(map(self.kv_to_param_i, param_str.split())) + attr = dict(map(self.kv_to_param_f, attr_str.split())) + observations.append( + { + "name": name_str, + "param": param, + "attribute": attr, + } + ) + except ValueError: + logger.warning( + f"Error parsing {f}: invalid key-value pair in line {lineno+1}" + ) + logger.warning(f"Offending entry:\n{line}") + raise + + return observations + + def dump(self, observations, f): + for observation in observations: + name = observation["name"] + param = observation["param"] + attr = observation["attribute"] + + param_str = " ".join( + map( + lambda kv: f"{kv[0]}={kv[1]}", + sorted(param.items(), key=lambda kv: kv[0]), + ) + ) + attr_str = " ".join( + map( + lambda kv: f"{kv[0]}={kv[1]}", + sorted(attr.items(), key=lambda kv: kv[0]), + ) + ) + + print(f"[::] {name} | {param_str} | {attr_str}", file=f) |