From 81082d00444d3bfd644f2417ef5c1a34d9569a28 Mon Sep 17 00:00:00 2001 From: Daniel Friesel Date: Fri, 16 Oct 2020 13:58:20 +0200 Subject: Code aus Lennarts BA-repo --- lib/lennart/DataProcessor.py | 374 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 374 insertions(+) create mode 100644 lib/lennart/DataProcessor.py (limited to 'lib/lennart/DataProcessor.py') diff --git a/lib/lennart/DataProcessor.py b/lib/lennart/DataProcessor.py new file mode 100644 index 0000000..df3f41c --- /dev/null +++ b/lib/lennart/DataProcessor.py @@ -0,0 +1,374 @@ +class DataProcessor: + def __init__(self, sync_data, energy_data): + """ + Creates DataProcessor object. + + :param sync_data: input timestamps (SigrokResult) + :param energy_data: List of EnergyTrace datapoints + """ + self.reduced_timestamps = [] + self.modified_timestamps = [] + self.plot_data_x = [] + self.plot_data_y = [] + self.sync_data = sync_data + self.energy_data = energy_data + self.start_offset = 0 + + self.power_sync_watt = 0.011 + self.power_sync_len = 0.7 + self.power_sync_max_outliers = 2 + + def run(self): + """ + Main Function to remove unwanted data, get synchronization points, add the offset and add drift. + :return: None + """ + # remove Dirty Data from previously running program (happens if logic Analyzer Measurement starts earlier than + # the HW Reset from energytrace) + use_data_after_index = 0 + for x in range(1, len(self.sync_data.timestamps)): + if self.sync_data.timestamps[x] - self.sync_data.timestamps[x - 1] > 1.3: + use_data_after_index = x + break + + time_stamp_data = self.sync_data.timestamps[use_data_after_index:] + + last_data = [0, 0, 0, 0] + + # clean timestamp data, if at the end strange ts got added somehow + time_stamp_data = self.removeTooFarDatasets(time_stamp_data) + + self.reduced_timestamps = time_stamp_data + + # NEW + datasync_timestamps = [] + sync_start = 0 + outliers = 0 + pre_outliers_ts = None + for i, energytrace_dataset in enumerate(self.energy_data): + usedtime = energytrace_dataset[0] - last_data[0] # in microseconds + timestamp = energytrace_dataset[0] + usedenergy = energytrace_dataset[3] - last_data[3] + power = usedenergy / usedtime * 10 ** -3 # in watts + if power > 0: + if power > self.power_sync_watt: + if sync_start is None: + sync_start = timestamp + outliers = 0 + else: + # Sync point over or outliers + if outliers == 0: + pre_outliers_ts = timestamp + outliers += 1 + if outliers > self.power_sync_max_outliers: + if sync_start is not None: + if ( + pre_outliers_ts - sync_start + ) / 1_000_000 > self.power_sync_len: + datasync_timestamps.append( + ( + sync_start / 1_000_000, + pre_outliers_ts / 1_000_000, + ) + ) + sync_start = None + + last_data = energytrace_dataset + + self.plot_data_x.append(energytrace_dataset[0] / 1_000_000) + self.plot_data_y.append(power) + + if power > self.power_sync_watt: + if (self.energy_data[-1][0] - sync_start) / 1_000_000 > self.power_sync_len: + datasync_timestamps.append( + (sync_start / 1_000_000, pre_outliers_ts / 1_000_000) + ) + + # print("SYNC SPOTS: ", datasync_timestamps) + # print(time_stamp_data[2]) + + start_offset = datasync_timestamps[0][1] - time_stamp_data[2] + start_timestamp = datasync_timestamps[0][1] + + end_offset = datasync_timestamps[-2][0] - (time_stamp_data[-8] + start_offset) + end_timestamp = datasync_timestamps[-2][0] + print(start_timestamp, end_timestamp) + + # print(start_offset, start_timestamp, end_offset, end_timestamp) + + with_offset = self.addOffset(time_stamp_data, start_offset) + + with_drift = self.addDrift( + with_offset, end_timestamp, end_offset, start_timestamp + ) + + self.modified_timestamps = with_drift + + def addOffset(self, input_timestamps, start_offset): + """ + Add begin offset at start + + :param input_timestamps: List of timestamps (float list) + :param start_offset: Timestamp of last EnergyTrace datapoint at the first sync point + :return: List of modified timestamps (float list) + """ + modified_timestamps_with_offset = [] + for x in input_timestamps: + if x + start_offset >= 0: + modified_timestamps_with_offset.append(x + start_offset) + return modified_timestamps_with_offset + + def removeTooFarDatasets(self, input_timestamps): + """ + Removing datasets, that are to far away at ethe end + + :param input_timestamps: List of timestamps (float list) + :return: List of modified timestamps (float list) + """ + modified_timestamps = [] + for i, x in enumerate(input_timestamps): + # print(x - input_timestamps[i - 1], x - input_timestamps[i - 1] < 2.5) + if x - input_timestamps[i - 1] < 1.6: + modified_timestamps.append(x) + else: + break + return modified_timestamps + + def addDrift(self, input_timestamps, end_timestamp, end_offset, start_timestamp): + """ + Add drift to datapoints + + :param input_timestamps: List of timestamps (float list) + :param end_timestamp: Timestamp of first EnergyTrace datapoint at the second last sync point + :param end_offset: the time between end_timestamp and the timestamp of synchronisation signal + :param start_timestamp: Timestamp of last EnergyTrace datapoint at the first sync point + :return: List of modified timestamps (float list) + """ + endFactor = (end_timestamp + end_offset - start_timestamp) / ( + end_timestamp - start_timestamp + ) + modified_timestamps_with_drift = [] + for x in input_timestamps: + modified_timestamps_with_drift.append( + ((x - start_timestamp) * endFactor) + start_timestamp + ) + + return modified_timestamps_with_drift + + def plot(self, annotateData=None): + """ + Plots the power usage and the timestamps by logic analyzer + + :param annotateData: List of Strings with labels, only needed if annotated plots are wished + :return: None + """ + + def calculateRectangleCurve(timestamps, min_value=0, max_value=0.160): + import numpy as np + + data = [] + for ts in timestamps: + data.append(ts) + data.append(ts) + + a = np.empty((len(data),)) + a[1::4] = max_value + a[2::4] = max_value + a[3::4] = min_value + a[4::4] = min_value + return data, a # plotting by columns + + import matplotlib.pyplot as plt + + fig, ax = plt.subplots() + + if annotateData: + annot = ax.annotate( + "", + xy=(0, 0), + xytext=(20, 20), + textcoords="offset points", + bbox=dict(boxstyle="round", fc="w"), + arrowprops=dict(arrowstyle="->"), + ) + annot.set_visible(True) + + rectCurve_with_drift = calculateRectangleCurve( + self.modified_timestamps, max_value=max(self.plot_data_y) + ) + + plt.plot(self.plot_data_x, self.plot_data_y, label="Leistung") + + plt.plot( + rectCurve_with_drift[0], + rectCurve_with_drift[1], + "-g", + label="Synchronisationsignale mit Driftfaktor", + ) + + plt.xlabel("Zeit [s]") + plt.ylabel("Leistung [W]") + leg = plt.legend() + + def getDataText(x): + # print(x) + for i, xt in enumerate(self.modified_timestamps): + if xt > x: + return "Value: %s" % annotateData[i - 5] + + def update_annot(x, y, name): + annot.xy = (x, y) + text = name + + annot.set_text(text) + annot.get_bbox_patch().set_alpha(0.4) + + def hover(event): + if event.xdata and event.ydata: + annot.set_visible(False) + update_annot(event.xdata, event.ydata, getDataText(event.xdata)) + annot.set_visible(True) + fig.canvas.draw_idle() + + if annotateData: + fig.canvas.mpl_connect("motion_notify_event", hover) + + plt.show() + + def getPowerBetween(self, start, end, state_sleep): # 0.001469 + """ + calculates the average powerusage in interval + NOT SIDE EFFECT FREE, DON'T USE IT EVERYWHERE + + :param start: Start timestamp of interval + :param end: End timestamp of interval + :param state_sleep: Length in seconds of one state, needed for cutting out the UART Sending cycle + :return: float with average power usage + """ + first_index = 0 + all_power = [] + for ind in range(self.start_offset, len(self.plot_data_x)): + first_index = ind + if self.plot_data_x[ind] > start: + break + + nextIndAfterIndex = None + for ind in range(first_index, len(self.plot_data_x)): + nextIndAfterIndex = ind + if ( + self.plot_data_x[ind] > end + or self.plot_data_x[ind] > start + state_sleep + ): + self.start_offset = ind - 1 + break + all_power.append(self.plot_data_y[ind]) + + # TODO Idea remove datapoints that are too far away + def removeSD_Mean_Values(arr): + import numpy + + elements = numpy.array(arr) + + mean = numpy.mean(elements, axis=0) + sd = numpy.std(elements, axis=0) + + return [x for x in arr if (mean - 1 * sd < x < mean + 1.5 * sd)] + + if len(all_power) > 10: + # all_power = removeSD_Mean_Values(all_power) + pass + # TODO algorithm relocate datapoint + + pre_fix_len = len(all_power) + if len(all_power) == 0: + # print("PROBLEM") + all_power.append(self.plot_data_y[nextIndAfterIndex]) + elif len(all_power) == 1: + # print("OKAY") + pass + return pre_fix_len, sum(all_power) / len(all_power) + + def getStatesdfatool(self, state_sleep, algorithm=False): + """ + Calculates the length and energy usage of the states + + :param state_sleep: Length in seconds of one state, needed for cutting out the UART Sending cycle + :param algorithm: possible usage of accuracy algorithm / not implemented yet + :returns: returns list of states and transitions, starting with a transition and ending with astate + Each element is a dict containing: + * `isa`: 'state' or 'transition' + * `W_mean`: Mittelwert der Leistungsaufnahme + * `W_std`: Standardabweichung der Leistungsaufnahme + * `s`: Dauer + """ + if algorithm: + raise NotImplementedError + end_transition_ts = None + timestamps_sync_start = 0 + energy_trace_new = list() + + for ts_index in range( + 0 + timestamps_sync_start, int(len(self.modified_timestamps) / 2) + ): + start_transition_ts = self.modified_timestamps[ts_index * 2] + start_transition_ts_timing = self.reduced_timestamps[ts_index * 2] + + if end_transition_ts is not None: + count_dp, power = self.getPowerBetween( + end_transition_ts, start_transition_ts, state_sleep + ) + + # print("STATE", end_transition_ts * 10 ** 6, start_transition_ts * 10 ** 6, (start_transition_ts - end_transition_ts) * 10 ** 6, power) + if ( + (start_transition_ts - end_transition_ts) * 10 ** 6 > 900_000 + and power > self.power_sync_watt * 0.9 + and ts_index > 10 + ): + # remove last transition and stop (upcoming data only sync) + del energy_trace_new[-1] + break + pass + + state = { + "isa": "state", + "W_mean": power, + "W_std": 0.0001, + "s": ( + start_transition_ts_timing - end_transition_ts_timing + ), # * 10 ** 6, + } + energy_trace_new.append(state) + + energy_trace_new[-2]["W_mean_delta_next"] = ( + energy_trace_new[-2]["W_mean"] - energy_trace_new[-1]["W_mean"] + ) + + # get energy end_transition_ts + end_transition_ts = self.modified_timestamps[ts_index * 2 + 1] + count_dp, power = self.getPowerBetween( + start_transition_ts, end_transition_ts, state_sleep + ) + + # print("TRANS", start_transition_ts * 10 ** 6, end_transition_ts * 10 ** 6, (end_transition_ts - start_transition_ts) * 10 ** 6, power) + end_transition_ts_timing = self.reduced_timestamps[ts_index * 2 + 1] + + transition = { + "isa": "transition", + "W_mean": power, + "W_std": 0.0001, + "s": ( + end_transition_ts_timing - start_transition_ts_timing + ), # * 10 ** 6, + "count_dp": count_dp, + } + + if (end_transition_ts - start_transition_ts) * 10 ** 6 > 2_000_000: + # TODO Last data set corrupted? HOT FIX!!!!!!!!!!!! REMOVE LATER + # for x in range(4): + # del energy_trace_new[-1] + # break + pass + + energy_trace_new.append(transition) + # print(start_transition_ts, "-", end_transition_ts, "-", end_transition_ts - start_transition_ts) + return energy_trace_new -- cgit v1.2.3 From f308a519edecd8ee92f2fe18552620a569f48d3b Mon Sep 17 00:00:00 2001 From: Daniel Friesel Date: Mon, 19 Oct 2020 11:47:53 +0200 Subject: debug log --- lib/lennart/DataProcessor.py | 17 ++++++++++++++--- lib/lennart/EnergyInterface.py | 3 +++ lib/lennart/SigrokAPIInterface.py | 3 +++ lib/lennart/SigrokCLIInterface.py | 3 +++ lib/lennart/SigrokInterface.py | 3 +++ 5 files changed, 26 insertions(+), 3 deletions(-) (limited to 'lib/lennart/DataProcessor.py') diff --git a/lib/lennart/DataProcessor.py b/lib/lennart/DataProcessor.py index df3f41c..58cc705 100644 --- a/lib/lennart/DataProcessor.py +++ b/lib/lennart/DataProcessor.py @@ -1,3 +1,8 @@ +import logging + +logger = logging.getLogger(__name__) + + class DataProcessor: def __init__(self, sync_data, energy_data): """ @@ -84,7 +89,7 @@ class DataProcessor: (sync_start / 1_000_000, pre_outliers_ts / 1_000_000) ) - # print("SYNC SPOTS: ", datasync_timestamps) + logger.debug(f"Synchronization areas: {datasync_timestamps}") # print(time_stamp_data[2]) start_offset = datasync_timestamps[0][1] - time_stamp_data[2] @@ -92,9 +97,15 @@ class DataProcessor: end_offset = datasync_timestamps[-2][0] - (time_stamp_data[-8] + start_offset) end_timestamp = datasync_timestamps[-2][0] - print(start_timestamp, end_timestamp) + logger.debug( + f"Measurement area: LA timestamp range [{start_timestamp}, {end_timestamp}]" + ) + logger.debug(f"Start/End offsets: {start_offset} / {end_offset}") - # print(start_offset, start_timestamp, end_offset, end_timestamp) + if end_offset > 10: + logger.warning( + f"synchronization end_offset == {end_offset}. It should be no more than a few seconds." + ) with_offset = self.addOffset(time_stamp_data, start_offset) diff --git a/lib/lennart/EnergyInterface.py b/lib/lennart/EnergyInterface.py index 2b23667..19aae84 100644 --- a/lib/lennart/EnergyInterface.py +++ b/lib/lennart/EnergyInterface.py @@ -2,6 +2,9 @@ import re import subprocess from dfatool.lennart.DataInterface import DataInterface +import logging + +logger = logging.getLogger(__name__) class EnergyInterface(DataInterface): diff --git a/lib/lennart/SigrokAPIInterface.py b/lib/lennart/SigrokAPIInterface.py index a2c087a..44da678 100644 --- a/lib/lennart/SigrokAPIInterface.py +++ b/lib/lennart/SigrokAPIInterface.py @@ -6,6 +6,9 @@ import sigrok.core as sr from sigrok.core.classes import * from util.ByteHelper import ByteHelper +import logging + +logger = logging.getLogger(__name__) class SigrokAPIInterface(SigrokInterface): diff --git a/lib/lennart/SigrokCLIInterface.py b/lib/lennart/SigrokCLIInterface.py index d7347ca..b28a8a9 100644 --- a/lib/lennart/SigrokCLIInterface.py +++ b/lib/lennart/SigrokCLIInterface.py @@ -2,6 +2,9 @@ import subprocess import time from dfatool.lennart.SigrokInterface import SigrokInterface +import logging + +logger = logging.getLogger(__name__) class SigrokCLIInterface(SigrokInterface): diff --git a/lib/lennart/SigrokInterface.py b/lib/lennart/SigrokInterface.py index a5eaffc..1733b68 100644 --- a/lib/lennart/SigrokInterface.py +++ b/lib/lennart/SigrokInterface.py @@ -1,6 +1,9 @@ import json from dfatool.lennart.DataInterface import DataInterface +import logging + +logger = logging.getLogger(__name__) # Adding additional parsing functionality -- cgit v1.2.3 From db41acafd2abeca153c9f552e82cd26968092cb3 Mon Sep 17 00:00:00 2001 From: Daniel Friesel Date: Mon, 19 Oct 2020 14:28:43 +0200 Subject: ET+LA: add --plot-traces support, actually calculate standard deviation. Jungejungejungejungejunge... --- lib/lennart/DataProcessor.py | 27 +++++++++++++++------------ lib/loader.py | 10 ++++++---- 2 files changed, 21 insertions(+), 16 deletions(-) (limited to 'lib/lennart/DataProcessor.py') diff --git a/lib/lennart/DataProcessor.py b/lib/lennart/DataProcessor.py index 58cc705..27005b1 100644 --- a/lib/lennart/DataProcessor.py +++ b/lib/lennart/DataProcessor.py @@ -1,3 +1,4 @@ +import numpy as np import logging logger = logging.getLogger(__name__) @@ -248,13 +249,13 @@ class DataProcessor: def getPowerBetween(self, start, end, state_sleep): # 0.001469 """ - calculates the average powerusage in interval + calculates the powerusage in interval NOT SIDE EFFECT FREE, DON'T USE IT EVERYWHERE :param start: Start timestamp of interval :param end: End timestamp of interval :param state_sleep: Length in seconds of one state, needed for cutting out the UART Sending cycle - :return: float with average power usage + :return: power measurements in W """ first_index = 0 all_power = [] @@ -297,9 +298,9 @@ class DataProcessor: elif len(all_power) == 1: # print("OKAY") pass - return pre_fix_len, sum(all_power) / len(all_power) + return np.array(all_power) - def getStatesdfatool(self, state_sleep, algorithm=False): + def getStatesdfatool(self, state_sleep, with_traces=False, algorithm=False): """ Calculates the length and energy usage of the states @@ -325,14 +326,14 @@ class DataProcessor: start_transition_ts_timing = self.reduced_timestamps[ts_index * 2] if end_transition_ts is not None: - count_dp, power = self.getPowerBetween( + power = self.getPowerBetween( end_transition_ts, start_transition_ts, state_sleep ) # print("STATE", end_transition_ts * 10 ** 6, start_transition_ts * 10 ** 6, (start_transition_ts - end_transition_ts) * 10 ** 6, power) if ( (start_transition_ts - end_transition_ts) * 10 ** 6 > 900_000 - and power > self.power_sync_watt * 0.9 + and np.mean(power) > self.power_sync_watt * 0.9 and ts_index > 10 ): # remove last transition and stop (upcoming data only sync) @@ -342,8 +343,9 @@ class DataProcessor: state = { "isa": "state", - "W_mean": power, - "W_std": 0.0001, + "W_mean": np.mean(power), + "W_std": np.std(power), + "uW": power * 1e6, "s": ( start_transition_ts_timing - end_transition_ts_timing ), # * 10 ** 6, @@ -356,7 +358,7 @@ class DataProcessor: # get energy end_transition_ts end_transition_ts = self.modified_timestamps[ts_index * 2 + 1] - count_dp, power = self.getPowerBetween( + power = self.getPowerBetween( start_transition_ts, end_transition_ts, state_sleep ) @@ -365,12 +367,13 @@ class DataProcessor: transition = { "isa": "transition", - "W_mean": power, - "W_std": 0.0001, + "W_mean": np.mean(power), + "W_std": np.std(power), + "uW": power * 1e6, "s": ( end_transition_ts_timing - start_transition_ts_timing ), # * 10 ** 6, - "count_dp": count_dp, + "count_dp": len(power), } if (end_transition_ts - start_transition_ts) * 10 ** 6 > 2_000_000: diff --git a/lib/loader.py b/lib/loader.py index e004fe1..94a76b3 100644 --- a/lib/loader.py +++ b/lib/loader.py @@ -1656,8 +1656,6 @@ class EnergyTraceWithLogicAnalyzer: self.sync_data = SigrokResult.fromString(log_data[0]) self.energy_data = EnergyInterface.getDataFromString(str(log_data[1])) - pass - def analyze_states(self, traces, offline_index: int): """ Split log data into states and transitions and return duration, energy, and mean power for each element. @@ -1693,9 +1691,13 @@ class EnergyTraceWithLogicAnalyzer: dp = DataProcessor(sync_data=self.sync_data, energy_data=self.energy_data) dp.run() energy_trace_new = list() - energy_trace_new.extend(dp.getStatesdfatool(state_sleep=self.state_duration)) + energy_trace_new.extend( + dp.getStatesdfatool( + state_sleep=self.state_duration, with_traces=self.with_traces + ) + ) # Uncomment to plot traces - # dp.plot() # <- plot traces with sync annotatons + # dp.plot() # <- plot traces with sync annotatons # dp.plot(names) # <- plot annotated traces (with state/transition names) energy_trace_new = energy_trace_new[4:] -- cgit v1.2.3 From 7933d189e7aa6d1dfe857e1fd1bba70147380747 Mon Sep 17 00:00:00 2001 From: Daniel Friesel Date: Tue, 20 Oct 2020 11:39:35 +0200 Subject: DataProcessor: improve drift compensation it still isn't satisfactory --- lib/lennart/DataProcessor.py | 57 +++++++++++++++++++++----------------------- 1 file changed, 27 insertions(+), 30 deletions(-) (limited to 'lib/lennart/DataProcessor.py') diff --git a/lib/lennart/DataProcessor.py b/lib/lennart/DataProcessor.py index 27005b1..44fef0a 100644 --- a/lib/lennart/DataProcessor.py +++ b/lib/lennart/DataProcessor.py @@ -51,11 +51,12 @@ class DataProcessor: sync_start = 0 outliers = 0 pre_outliers_ts = None + # TODO only consider the first few and the last few seconds for sync points for i, energytrace_dataset in enumerate(self.energy_data): usedtime = energytrace_dataset[0] - last_data[0] # in microseconds timestamp = energytrace_dataset[0] usedenergy = energytrace_dataset[3] - last_data[3] - power = usedenergy / usedtime * 10 ** -3 # in watts + power = usedenergy / usedtime * 1e-3 # in watts if power > 0: if power > self.power_sync_watt: if sync_start is None: @@ -81,7 +82,7 @@ class DataProcessor: last_data = energytrace_dataset - self.plot_data_x.append(energytrace_dataset[0] / 1_000_000) + self.plot_data_x.append(timestamp / 1_000_000) self.plot_data_y.append(power) if power > self.power_sync_watt: @@ -90,8 +91,11 @@ class DataProcessor: (sync_start / 1_000_000, pre_outliers_ts / 1_000_000) ) - logger.debug(f"Synchronization areas: {datasync_timestamps}") - # print(time_stamp_data[2]) + # print(datasync_timestamps) + + # time_stamp_data contains an entry for each level change on the Logic Analyzer input. + # So, time_stamp_data[0] is the first low-to-high transition, time_stamp_data[2] the second, etc. + # -> time_stamp_data[-8] is the low-to-high transition indicating the first after-measurement sync pulse start_offset = datasync_timestamps[0][1] - time_stamp_data[2] start_timestamp = datasync_timestamps[0][1] @@ -99,7 +103,10 @@ class DataProcessor: end_offset = datasync_timestamps[-2][0] - (time_stamp_data[-8] + start_offset) end_timestamp = datasync_timestamps[-2][0] logger.debug( - f"Measurement area: LA timestamp range [{start_timestamp}, {end_timestamp}]" + f"Measurement area: ET timestamp range [{start_timestamp}, {end_timestamp}]" + ) + logger.debug( + f"Measurement area: LA timestamp range [{time_stamp_data[2]}, {time_stamp_data[-8]}]" ) logger.debug(f"Start/End offsets: {start_offset} / {end_offset}") @@ -108,7 +115,7 @@ class DataProcessor: f"synchronization end_offset == {end_offset}. It should be no more than a few seconds." ) - with_offset = self.addOffset(time_stamp_data, start_offset) + with_offset = np.array(time_stamp_data) + start_offset with_drift = self.addDrift( with_offset, end_timestamp, end_offset, start_timestamp @@ -116,20 +123,6 @@ class DataProcessor: self.modified_timestamps = with_drift - def addOffset(self, input_timestamps, start_offset): - """ - Add begin offset at start - - :param input_timestamps: List of timestamps (float list) - :param start_offset: Timestamp of last EnergyTrace datapoint at the first sync point - :return: List of modified timestamps (float list) - """ - modified_timestamps_with_offset = [] - for x in input_timestamps: - if x + start_offset >= 0: - modified_timestamps_with_offset.append(x + start_offset) - return modified_timestamps_with_offset - def removeTooFarDatasets(self, input_timestamps): """ Removing datasets, that are to far away at ethe end @@ -151,20 +144,22 @@ class DataProcessor: Add drift to datapoints :param input_timestamps: List of timestamps (float list) - :param end_timestamp: Timestamp of first EnergyTrace datapoint at the second last sync point + :param end_timestamp: Timestamp of first EnergyTrace datapoint at the second-to-last sync point :param end_offset: the time between end_timestamp and the timestamp of synchronisation signal :param start_timestamp: Timestamp of last EnergyTrace datapoint at the first sync point :return: List of modified timestamps (float list) """ endFactor = (end_timestamp + end_offset - start_timestamp) / ( end_timestamp - start_timestamp - ) - modified_timestamps_with_drift = [] - for x in input_timestamps: - modified_timestamps_with_drift.append( - ((x - start_timestamp) * endFactor) + start_timestamp - ) - + ) + 0.0001 + # print( + # f"({end_timestamp} + {end_offset} - {start_timestamp}) / ({end_timestamp} - {start_timestamp}) == {endFactor}" + # ) + # Manuelles endFactor += 0.0001 macht es merklich besser + # print(f"endFactor = {endFactor}") + modified_timestamps_with_drift = ( + (input_timestamps - start_timestamp) * endFactor + ) + start_timestamp return modified_timestamps_with_drift def plot(self, annotateData=None): @@ -345,11 +340,12 @@ class DataProcessor: "isa": "state", "W_mean": np.mean(power), "W_std": np.std(power), - "uW": power * 1e6, "s": ( start_transition_ts_timing - end_transition_ts_timing ), # * 10 ** 6, } + if with_traces: + state["uW"] = power * 1e6 energy_trace_new.append(state) energy_trace_new[-2]["W_mean_delta_next"] = ( @@ -369,12 +365,13 @@ class DataProcessor: "isa": "transition", "W_mean": np.mean(power), "W_std": np.std(power), - "uW": power * 1e6, "s": ( end_transition_ts_timing - start_transition_ts_timing ), # * 10 ** 6, "count_dp": len(power), } + if with_traces: + transition["uW"] = power * 1e6 if (end_transition_ts - start_transition_ts) * 10 ** 6 > 2_000_000: # TODO Last data set corrupted? HOT FIX!!!!!!!!!!!! REMOVE LATER -- cgit v1.2.3 From 7382103823962305df09b7ed1913597602a175e2 Mon Sep 17 00:00:00 2001 From: Daniel Friesel Date: Tue, 20 Oct 2020 14:58:44 +0200 Subject: DataProcessor: comments --- lib/lennart/DataProcessor.py | 23 +++++++++++++---------- 1 file changed, 13 insertions(+), 10 deletions(-) (limited to 'lib/lennart/DataProcessor.py') diff --git a/lib/lennart/DataProcessor.py b/lib/lennart/DataProcessor.py index 44fef0a..8373e75 100644 --- a/lib/lennart/DataProcessor.py +++ b/lib/lennart/DataProcessor.py @@ -42,7 +42,7 @@ class DataProcessor: last_data = [0, 0, 0, 0] # clean timestamp data, if at the end strange ts got added somehow - time_stamp_data = self.removeTooFarDatasets(time_stamp_data) + # time_stamp_data = self.removeTooFarDatasets(time_stamp_data) self.reduced_timestamps = time_stamp_data @@ -95,13 +95,14 @@ class DataProcessor: # time_stamp_data contains an entry for each level change on the Logic Analyzer input. # So, time_stamp_data[0] is the first low-to-high transition, time_stamp_data[2] the second, etc. - # -> time_stamp_data[-8] is the low-to-high transition indicating the first after-measurement sync pulse + # -> time_stamp_data[2] is the low-to-high transition indicating the end of the first sync pulse + # -> time_stamp_data[-8] is the low-to-high transition indicating the start of the first after-measurement sync pulse - start_offset = datasync_timestamps[0][1] - time_stamp_data[2] start_timestamp = datasync_timestamps[0][1] + start_offset = start_timestamp - time_stamp_data[2] - end_offset = datasync_timestamps[-2][0] - (time_stamp_data[-8] + start_offset) end_timestamp = datasync_timestamps[-2][0] + end_offset = end_timestamp - (time_stamp_data[-8] + start_offset) logger.debug( f"Measurement area: ET timestamp range [{start_timestamp}, {end_timestamp}]" ) @@ -149,17 +150,19 @@ class DataProcessor: :param start_timestamp: Timestamp of last EnergyTrace datapoint at the first sync point :return: List of modified timestamps (float list) """ - endFactor = (end_timestamp + end_offset - start_timestamp) / ( - end_timestamp - start_timestamp - ) + 0.0001 + endFactor = 1 + (end_offset / (end_timestamp - start_timestamp)) # print( - # f"({end_timestamp} + {end_offset} - {start_timestamp}) / ({end_timestamp} - {start_timestamp}) == {endFactor}" + # f"({end_timestamp} + {end_offset} - {start_timestamp}) / ({end_timestamp} - {start_timestamp}) == {endFactor}" # ) # Manuelles endFactor += 0.0001 macht es merklich besser # print(f"endFactor = {endFactor}") + # endFactor assumes that the end of the first sync pulse is at timestamp 0. + # Then, timestamps with drift := timestamps * endFactor. + # As this is not the case (the first sync pulse ends at start_timestamp > 0), we shift the data by first + # removing start_timestamp, then multiplying with endFactor, and then re-adding the start_timestamp. modified_timestamps_with_drift = ( - (input_timestamps - start_timestamp) * endFactor - ) + start_timestamp + input_timestamps - start_timestamp + ) * endFactor + start_timestamp return modified_timestamps_with_drift def plot(self, annotateData=None): -- cgit v1.2.3 From 993a3bae7f5c560d8c9c601c9a6b423e9f507785 Mon Sep 17 00:00:00 2001 From: Daniel Friesel Date: Wed, 21 Oct 2020 12:35:59 +0200 Subject: --plot-traces: use the correct time base for each backend --- bin/analyze-archive.py | 21 +++++++++++++++------ lib/lennart/DataProcessor.py | 15 +++++++++------ lib/loader.py | 32 ++++++++++++++++++++++---------- lib/plotter.py | 6 +++++- 4 files changed, 51 insertions(+), 23 deletions(-) (limited to 'lib/lennart/DataProcessor.py') diff --git a/bin/analyze-archive.py b/bin/analyze-archive.py index bd1c824..5d4411b 100755 --- a/bin/analyze-archive.py +++ b/bin/analyze-archive.py @@ -226,10 +226,16 @@ def print_html_model_data(model, pm, pq, lm, lq, am, ai, aq): def plot_traces(preprocessed_data, sot_name): traces = list() + timestamps = list() for trace in preprocessed_data: for state_or_transition in trace["trace"]: if state_or_transition["name"] == sot_name: - traces.extend(map(lambda x: x["uW"], state_or_transition["offline"])) + timestamps.extend( + map(lambda x: x["plot"][0], state_or_transition["offline"]) + ) + traces.extend( + map(lambda x: x["plot"][1], state_or_transition["offline"]) + ) if len(traces) == 0: print( f"""Did not find traces for state or transition {sot_name}. Abort.""", @@ -239,12 +245,15 @@ def plot_traces(preprocessed_data, sot_name): if len(traces) > 40: print(f"""Truncating plot to 40 of {len(traces)} traces (random sample)""") - traces = random.sample(traces, 40) + indexes = random.sample(range(len(traces)), 40) + timestamps = [timestamps[i] for i in indexes] + traces = [traces[i] for i in indexes] - plotter.plot_y( + plotter.plot_xy( + timestamps, traces, - xlabel="t [1e-5 s]", - ylabel="P [uW]", + xlabel="t [s]", + ylabel="P [W]", title=sot_name, family=True, ) @@ -463,7 +472,7 @@ if __name__ == "__main__": if name not in uw_per_sot: uw_per_sot[name] = list() for elem in state_or_transition["offline"]: - elem["uW"] = list(elem["uW"]) + elem["plot"] = list(elem["plot"]) uw_per_sot[name].append(state_or_transition) for name, data in uw_per_sot.items(): target = f"{args.export_traces}/{name}.json" diff --git a/lib/lennart/DataProcessor.py b/lib/lennart/DataProcessor.py index 8373e75..7546128 100644 --- a/lib/lennart/DataProcessor.py +++ b/lib/lennart/DataProcessor.py @@ -256,7 +256,8 @@ class DataProcessor: :return: power measurements in W """ first_index = 0 - all_power = [] + all_power = list() + all_ts = list() for ind in range(self.start_offset, len(self.plot_data_x)): first_index = ind if self.plot_data_x[ind] > start: @@ -272,6 +273,7 @@ class DataProcessor: self.start_offset = ind - 1 break all_power.append(self.plot_data_y[ind]) + all_ts.append(self.plot_data_x[ind]) # TODO Idea remove datapoints that are too far away def removeSD_Mean_Values(arr): @@ -293,10 +295,11 @@ class DataProcessor: if len(all_power) == 0: # print("PROBLEM") all_power.append(self.plot_data_y[nextIndAfterIndex]) + all_ts.append(0) elif len(all_power) == 1: # print("OKAY") pass - return np.array(all_power) + return np.array(all_power), np.array(all_ts) def getStatesdfatool(self, state_sleep, with_traces=False, algorithm=False): """ @@ -324,7 +327,7 @@ class DataProcessor: start_transition_ts_timing = self.reduced_timestamps[ts_index * 2] if end_transition_ts is not None: - power = self.getPowerBetween( + power, timestamps = self.getPowerBetween( end_transition_ts, start_transition_ts, state_sleep ) @@ -348,7 +351,7 @@ class DataProcessor: ), # * 10 ** 6, } if with_traces: - state["uW"] = power * 1e6 + state["plot"] = (timestamps - timestamps[0], power) energy_trace_new.append(state) energy_trace_new[-2]["W_mean_delta_next"] = ( @@ -357,7 +360,7 @@ class DataProcessor: # get energy end_transition_ts end_transition_ts = self.modified_timestamps[ts_index * 2 + 1] - power = self.getPowerBetween( + power, timestamps = self.getPowerBetween( start_transition_ts, end_transition_ts, state_sleep ) @@ -374,7 +377,7 @@ class DataProcessor: "count_dp": len(power), } if with_traces: - transition["uW"] = power * 1e6 + transition["plot"] = (timestamps - timestamps[0], power) if (end_transition_ts - start_transition_ts) * 10 ** 6 > 2_000_000: # TODO Last data set corrupted? HOT FIX!!!!!!!!!!!! REMOVE LATER diff --git a/lib/loader.py b/lib/loader.py index 94a76b3..c981ef7 100644 --- a/lib/loader.py +++ b/lib/loader.py @@ -1431,7 +1431,13 @@ class EnergyTraceWithBarcode: } if self.with_traces: - transition["uW"] = transition_power_W * 1e6 + timestamps = ( + self.interval_start_timestamp[ + transition_start_index:transition_done_index + ] + - self.interval_start_timestamp[transition_start_index] + ) + transition["plot"] = (timestamps, transition_power_W) energy_trace.append(transition) @@ -1451,7 +1457,11 @@ class EnergyTraceWithBarcode: } if self.with_traces: - state["uW"] = state_power_W * 1e6 + timestamps = ( + self.interval_start_timestamp[state_start_index:state_done_index] + - self.interval_start_timestamp[state_start_index] + ) + state["plot"] = (timestamps, state_power_W) energy_trace.append(state) @@ -1690,15 +1700,14 @@ class EnergyTraceWithLogicAnalyzer: dp = DataProcessor(sync_data=self.sync_data, energy_data=self.energy_data) dp.run() - energy_trace_new = list() - energy_trace_new.extend( - dp.getStatesdfatool( - state_sleep=self.state_duration, with_traces=self.with_traces - ) + energy_trace_new = dp.getStatesdfatool( + state_sleep=self.state_duration, with_traces=self.with_traces ) # Uncomment to plot traces - # dp.plot() # <- plot traces with sync annotatons - # dp.plot(names) # <- plot annotated traces (with state/transition names) + if offline_index == 0: + # dp.plot() # <- plot traces with sync annotatons + # dp.plot(names) # <- plot annotated traces (with state/transition names) + pass energy_trace_new = energy_trace_new[4:] energy_trace = list() @@ -2091,7 +2100,10 @@ class MIMOSA: } if self.with_traces: - data["uW"] = range_ua * self.voltage + data["plot"] = ( + np.arange(len(range_ua)) * 1e-5, + range_ua * self.voltage * 1e-6, + ) if isa == "transition": # subtract average power of previous state diff --git a/lib/plotter.py b/lib/plotter.py index 16c0145..929ceb9 100755 --- a/lib/plotter.py +++ b/lib/plotter.py @@ -136,7 +136,11 @@ def plot_xy(X, Y, xlabel=None, ylabel=None, title=None, output=None, family=Fals if family: cm = plt.get_cmap("brg", len(Y)) for i, YY in enumerate(Y): - plt.plot(np.arange(len(YY)), YY, "-", markersize=2, color=cm(i)) + if X: + XX = X[i] + else: + XX = np.arange(len(YY)) + plt.plot(XX, YY, "-", markersize=2, color=cm(i)) else: plt.plot(X, Y, "bo", markersize=2) if output: -- cgit v1.2.3 From 9f85fc2f2678c491d1ae8d42995fae0fb931f1fe Mon Sep 17 00:00:00 2001 From: Daniel Friesel Date: Thu, 22 Oct 2020 11:18:05 +0200 Subject: unfuck LA<->ET drift calculation --- lib/lennart/DataProcessor.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) (limited to 'lib/lennart/DataProcessor.py') diff --git a/lib/lennart/DataProcessor.py b/lib/lennart/DataProcessor.py index 7546128..8d762e7 100644 --- a/lib/lennart/DataProcessor.py +++ b/lib/lennart/DataProcessor.py @@ -117,10 +117,16 @@ class DataProcessor: ) with_offset = np.array(time_stamp_data) + start_offset + logger.debug( + f"Measurement area with offset: LA timestamp range [{with_offset[2]}, {with_offset[-8]}]" + ) with_drift = self.addDrift( with_offset, end_timestamp, end_offset, start_timestamp ) + logger.debug( + f"Measurement area with drift: LA timestamp range [{with_drift[2]}, {with_drift[-8]}]" + ) self.modified_timestamps = with_drift @@ -150,7 +156,7 @@ class DataProcessor: :param start_timestamp: Timestamp of last EnergyTrace datapoint at the first sync point :return: List of modified timestamps (float list) """ - endFactor = 1 + (end_offset / (end_timestamp - start_timestamp)) + endFactor = 1 + (end_offset / ((end_timestamp - end_offset) - start_timestamp)) # print( # f"({end_timestamp} + {end_offset} - {start_timestamp}) / ({end_timestamp} - {start_timestamp}) == {endFactor}" # ) @@ -216,7 +222,7 @@ class DataProcessor: label="Synchronisationsignale mit Driftfaktor", ) - plt.xlabel("Zeit [s]") + plt.xlabel("Zeit von EnergyTrace [s]") plt.ylabel("Leistung [W]") leg = plt.legend() -- cgit v1.2.3 From b7e25c6ca7746e86ef201b9be7ecec57bf5b2d2a Mon Sep 17 00:00:00 2001 From: Daniel Friesel Date: Thu, 22 Oct 2020 15:05:09 +0200 Subject: Improve sync=la timing restoration. There's still something fishy though... --- bin/generate-dfa-benchmark.py | 1 + lib/harness.py | 23 ++++++++++++++-------- lib/lennart/DataProcessor.py | 5 +++-- lib/lennart/SigrokInterface.py | 43 +----------------------------------------- lib/loader.py | 24 ++++++++++++++++++++++- 5 files changed, 43 insertions(+), 53 deletions(-) (limited to 'lib/lennart/DataProcessor.py') diff --git a/bin/generate-dfa-benchmark.py b/bin/generate-dfa-benchmark.py index c8681c5..98c3602 100755 --- a/bin/generate-dfa-benchmark.py +++ b/bin/generate-dfa-benchmark.py @@ -647,6 +647,7 @@ if __name__ == "__main__": log_return_values=need_return_values, repeat=1, energytrace_sync=energytrace_sync, + remove_nop_from_timings=False, # kein einfluss auf ungenauigkeiten ) elif "timing" in opt: harness = OnboardTimerHarness( diff --git a/lib/harness.py b/lib/harness.py index 51013e1..04b14eb 100644 --- a/lib/harness.py +++ b/lib/harness.py @@ -355,10 +355,14 @@ class OnboardTimerHarness(TransitionHarness): the dict `offline_aggregates` with the member `duration`. It contains a list of durations (in us) of the corresponding state/transition for each benchmark iteration. I.e. `.traces[*]['trace'][*]['offline_aggregates']['duration'] = [..., ...]` + :param remove_nop_from_timings: If true, remove the nop duration from reported timings + (i.e., reported timings reflect the estimated transition/state duration with the timer call overhea dremoved). + If false, do not remove nop durations, so the timings more accurately reflect the elapsed wall-clock time during the benchmark. """ - def __init__(self, counter_limits, **kwargs): + def __init__(self, counter_limits, remove_nop_from_timings=True, **kwargs): super().__init__(**kwargs) + self.remove_nop_from_timings = remove_nop_from_timings self.trace_length = 0 ( self.one_cycle_in_us, @@ -422,7 +426,6 @@ class OnboardTimerHarness(TransitionHarness): gpio.led_toggle(1); ptalog.stopTransition(); // ======================= LED SYNC ================================ - arch.sleep_ms(250); }\n\n""" return ret @@ -431,14 +434,17 @@ class OnboardTimerHarness(TransitionHarness): if self.energytrace_sync == "led": ret += "runLASync();\n" ret += "ptalog.passNop();\n" + if self.energytrace_sync == "led": + ret += "arch.sleep_ms(250);\n" ret += super().start_benchmark(benchmark_id) return ret def stop_benchmark(self): ret = "" + ret += super().stop_benchmark() if self.energytrace_sync == "led": ret += "runLASync();\n" - ret += super().stop_benchmark() + ret += "arch.sleep_ms(250);\n" return ret def pass_transition( @@ -498,8 +504,9 @@ class OnboardTimerHarness(TransitionHarness): prev_state_duration_us = ( prev_state_cycles * self.one_cycle_in_us + prev_state_overflow * self.one_overflow_in_us - - self.nop_cycles * self.one_cycle_in_us ) + if self.remove_nop_from_timings: + prev_state_duration_us -= self.nop_cycles * self.one_cycle_in_us final_state = self.traces[self.trace_id]["trace"][-1] if "offline_aggregates" not in final_state: final_state["offline_aggregates"] = {"duration": list()} @@ -561,15 +568,15 @@ class OnboardTimerHarness(TransitionHarness): ) ) duration_us = ( - cycles * self.one_cycle_in_us - + overflow * self.one_overflow_in_us - - self.nop_cycles * self.one_cycle_in_us + cycles * self.one_cycle_in_us + overflow * self.one_overflow_in_us ) prev_state_duration_us = ( prev_state_cycles * self.one_cycle_in_us + prev_state_overflow * self.one_overflow_in_us - - self.nop_cycles * self.one_cycle_in_us ) + if self.remove_nop_from_timings: + duration_us -= self.nop_cycles * self.one_cycle_in_us + prev_state_duration_us -= self.nop_cycles * self.one_cycle_in_us if duration_us < 0: duration_us = 0 # self.traces contains transitions and states, UART output only contains transitions -> use index * 2 diff --git a/lib/lennart/DataProcessor.py b/lib/lennart/DataProcessor.py index 8d762e7..90cc54d 100644 --- a/lib/lennart/DataProcessor.py +++ b/lib/lennart/DataProcessor.py @@ -228,9 +228,10 @@ class DataProcessor: def getDataText(x): # print(x) + dl = len(annotateData) for i, xt in enumerate(self.modified_timestamps): - if xt > x: - return "Value: %s" % annotateData[i - 5] + if xt > x and i >= 4 and i - 5 < dl: + return f"SoT: {annotateData[i - 5]}" def update_annot(x, y, name): annot.xy = (x, y) diff --git a/lib/lennart/SigrokInterface.py b/lib/lennart/SigrokInterface.py index 1733b68..32e8fe2 100644 --- a/lib/lennart/SigrokInterface.py +++ b/lib/lennart/SigrokInterface.py @@ -1,4 +1,5 @@ import json +import numpy as np from dfatool.lennart.DataInterface import DataInterface import logging @@ -62,48 +63,6 @@ class SigrokResult: return SigrokResult(data["timestamps"], data["onBeforeFirstChange"]) pass - @classmethod - def fromTraces(cls, traces): - """ - Generates SigrokResult from ptalog.json traces - - :param traces: traces from dfatool ptalog.json - :return: SigrokResult object - """ - timestamps = [0] - for tr in traces: - for t in tr["trace"]: - # print(t['online_aggregates']['duration'][0]) - timestamps.append( - timestamps[-1] + (t["online_aggregates"]["duration"][0] * 10 ** -6) - ) - - # print(timestamps) - # prepend FAKE Sync point - t_neu = [0.0, 0.0000001, 1.0, 1.00000001] - for i, x in enumerate(timestamps): - t_neu.append( - round(float(x) + t_neu[3] + 0.20, 6) - ) # list(map(float, t_ist.split(",")[:i+1])) - - # append FAKE Sync point / eine überschneidung - # [30.403632, 30.403639, 31.407265, 31.407271] - # appendData = [29.144855,30.148495,30.148502,30.403632,30.403639,31.407265,31.407271,] - appendData = [0, 1.000001, 1.000002, 1.25, 1.2500001] - - # TODO future work here, why does the sync not work completely - t_neu[-1] = ( - t_neu[-2] + (t_neu[-1] - t_neu[-2]) * 0.9 - ) # Weird offset failure with UART stuff - - offset = t_neu[-1] - appendData[0] - for x in appendData: - t_neu.append(x + offset) - - # print(t_neu) - print(len(t_neu)) - return SigrokResult(t_neu, False) - class SigrokInterface(DataInterface): def __init__(self, sample_rate, driver="fx2lafw", filename="temp/sigrok.log"): diff --git a/lib/loader.py b/lib/loader.py index 3f6b5ec..2f8e603 100644 --- a/lib/loader.py +++ b/lib/loader.py @@ -1780,9 +1780,31 @@ class EnergyTraceWithTimer(EnergyTraceWithLogicAnalyzer): pass def analyze_states(self, traces, offline_index: int): + + # Start "Synchronization pulse" + timestamps = [0, 10, 1e6, 1e6 + 10] + + # 250ms zwischen Ende der LASync und Beginn der Messungen + # (wegen sleep(250) in der generierten multipass-runLASync-Funktion) + timestamps.append(timestamps[-1] + 240e3) + for tr in traces: + for t in tr["trace"]: + # print(t['online_aggregates']['duration'][0]) + timestamps.append( + timestamps[-1] + t["online_aggregates"]["duration"][offline_index] + ) + + print(timestamps) + + # Stop "Synchronization pulses". The first one has already started. + timestamps.extend(np.array([10, 1e6, 1e6 + 10]) + timestamps[-1]) + timestamps.extend(np.array([0, 10, 1e6, 1e6 + 10]) + 250e3 + timestamps[-1]) + + timestamps = list(np.array(timestamps) * 1e-6) + from dfatool.lennart.SigrokInterface import SigrokResult - self.sync_data = SigrokResult.fromTraces(traces) + self.sync_data = SigrokResult(timestamps, False) return super().analyze_states(traces, offline_index) -- cgit v1.2.3 From 34b1b9466af7501e651cf3664d96e12c8126f116 Mon Sep 17 00:00:00 2001 From: Daniel Friesel Date: Fri, 23 Oct 2020 08:00:30 +0200 Subject: minor refactoring --- lib/lennart/DataProcessor.py | 2 +- lib/loader.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) (limited to 'lib/lennart/DataProcessor.py') diff --git a/lib/lennart/DataProcessor.py b/lib/lennart/DataProcessor.py index 90cc54d..07d2a2b 100644 --- a/lib/lennart/DataProcessor.py +++ b/lib/lennart/DataProcessor.py @@ -188,10 +188,10 @@ class DataProcessor: data.append(ts) a = np.empty((len(data),)) + a[0::4] = min_value a[1::4] = max_value a[2::4] = max_value a[3::4] = min_value - a[4::4] = min_value return data, a # plotting by columns import matplotlib.pyplot as plt diff --git a/lib/loader.py b/lib/loader.py index 71183c5..0e46755 100644 --- a/lib/loader.py +++ b/lib/loader.py @@ -578,7 +578,7 @@ class RawData: # TODO es gibt next_transitions ohne 'plan' return True - def _merge_online_and_offline(self, measurement): + def _merge_online_and_mimosa(self, measurement): # Edits self.traces_by_fileno[measurement['fileno']][*]['trace'][*]['offline'] # and self.traces_by_fileno[measurement['fileno']][*]['trace'][*]['offline_aggregates'] in place # (appends data from measurement['energy_trace']) @@ -1050,7 +1050,7 @@ class RawData: if version == 0 or version == 1: if self._measurement_is_valid_01(measurement): - self._merge_online_and_offline(measurement) + self._merge_online_and_mimosa(measurement) num_valid += 1 else: logger.warning( -- cgit v1.2.3 From df04f15d9132ec6b2781edfccc5ad8d33dd3cdd9 Mon Sep 17 00:00:00 2001 From: Daniel Friesel Date: Fri, 30 Oct 2020 11:15:45 +0100 Subject: Add DFATOOL_EXPORT_LASYNC variable for ET+LA / ET+Timer sync eval --- lib/lennart/DataProcessor.py | 17 +++++++++++++++++ lib/loader.py | 7 ++++++- lib/utils.py | 13 +++++++++++++ 3 files changed, 36 insertions(+), 1 deletion(-) (limited to 'lib/lennart/DataProcessor.py') diff --git a/lib/lennart/DataProcessor.py b/lib/lennart/DataProcessor.py index 07d2a2b..b46315a 100644 --- a/lib/lennart/DataProcessor.py +++ b/lib/lennart/DataProcessor.py @@ -171,6 +171,23 @@ class DataProcessor: ) * endFactor + start_timestamp return modified_timestamps_with_drift + def export_sync(self): + # [1st trans start, 1st trans stop, 2nd trans start, 2nd trans stop, ...] + sync_timestamps = list() + + for i in range(4, len(self.modified_timestamps) - 8, 2): + sync_timestamps.append( + (self.modified_timestamps[i], self.modified_timestamps[i + 1]) + ) + + # EnergyTrace timestamps + timestamps = self.plot_data_x + + # EnergyTrace power values + power = self.plot_data_y + + return {"sync": sync_timestamps, "timestamps": timestamps, "power": power} + def plot(self, annotateData=None): """ Plots the power usage and the timestamps by logic analyzer diff --git a/lib/loader.py b/lib/loader.py index 0e1e8c9..0c7ad91 100644 --- a/lib/loader.py +++ b/lib/loader.py @@ -12,7 +12,7 @@ import tarfile import hashlib from multiprocessing import Pool -from .utils import running_mean, soft_cast_int +from .utils import NpEncoder, running_mean, soft_cast_int logger = logging.getLogger(__name__) @@ -1714,6 +1714,11 @@ class EnergyTraceWithLogicAnalyzer: dp.plot() # <- plot traces with sync annotatons # dp.plot(names) # <- plot annotated traces (with state/transition names) pass + if os.getenv("DFATOOL_EXPORT_LASYNC") is not None: + filename = os.getenv("DFATOOL_EXPORT_LASYNC") + f"_{offline_index}.json" + with open(filename, "w") as f: + json.dump(dp.export_sync(), f, cls=NpEncoder) + logger.info("Exported data and LA sync timestamps to {filename}") energy_trace_new = energy_trace_new[4:] energy_trace = list() diff --git a/lib/utils.py b/lib/utils.py index d28ecda..adcb534 100644 --- a/lib/utils.py +++ b/lib/utils.py @@ -1,3 +1,4 @@ +import json import numpy as np import re import logging @@ -6,6 +7,18 @@ arg_support_enabled = True logger = logging.getLogger(__name__) +class NpEncoder(json.JSONEncoder): + def default(self, obj): + if isinstance(obj, np.integer): + return int(obj) + elif isinstance(obj, np.floating): + return float(obj) + elif isinstance(obj, np.ndarray): + return obj.tolist() + else: + return super(NpEncoder, self).default(obj) + + def running_mean(x: np.ndarray, N: int) -> np.ndarray: """ Compute `N` elements wide running average over `x`. -- cgit v1.2.3