summaryrefslogtreecommitdiff
path: root/lib
diff options
context:
space:
mode:
authorDaniel Friesel <derf@finalrewind.org>2019-12-13 14:19:16 +0100
committerDaniel Friesel <derf@finalrewind.org>2019-12-13 14:19:16 +0100
commit6dea5e30f9023b7748d9249f80aaeeded6011bc1 (patch)
tree1e4c41363c150b2862274493a952816bba3db9d9 /lib
parentde382bd34807a32af948a017164f0fb91cb2e6cb (diff)
Add Parser and Lexer for timed sequences (words with loops); use them in workload
Diffstat (limited to 'lib')
-rwxr-xr-xlib/automata.py26
-rw-r--r--lib/lex.py156
2 files changed, 172 insertions, 10 deletions
diff --git a/lib/automata.py b/lib/automata.py
index 39ac5df..02dbfb6 100755
--- a/lib/automata.py
+++ b/lib/automata.py
@@ -14,7 +14,31 @@ def _dict_to_list(input_dict: dict) -> list:
class SimulationResult:
+ """
+ Duration, Energy, and state/parameter results from PTA.simulate on a single run.
+
+ :param duration: run duration in s
+ :param duration_mae: Mean Absolute Error of duration, assuming cycle-perfect delay/sleep calls
+ :param duration_mape: Mean Absolute Percentage Error of duration, assuming cycle-perfect delay/sleep caals
+ :param energy: run energy in J
+ :param energy_mae: Mean Absolute Error of energy
+ :param energy_mape: Mean Absolute Percentage Error of energy
+ :param end_state: Final `State` of run
+ :param parameters: Final parameters of run
+ :param mean_power: mean power during run in W
+ """
+
def __init__(self, duration: float, energy: float, end_state, parameters, duration_mae: float = None, energy_mae: float = None):
+ u"""
+ Create a new SimulationResult.
+
+ :param duration: run duration in µs
+ :param duration_mae: Mean Absolute Error of duration in µs, default None
+ :param energy: run energy in pJ
+ :param energy_mae: Mean Absolute Error of energy in pJ, default None
+ :param end_state: Final `State` after simulation run
+ :param parameters: Parameter values after simulation run
+ """
self.duration = duration * 1e-6
self.duration_mae = duration_mae * 1e-6
self.duration_mape = self.duration_mae * 100 / self.duration
@@ -968,7 +992,7 @@ class PTA:
else:
function_name = function[0]
function_args = function[1:]
- if function_name is None:
+ if function_name is None or function_name == '_':
duration = function_args[0]
total_energy += state.get_energy(duration, param_dict)
if state.power.value_error is not None:
diff --git a/lib/lex.py b/lib/lex.py
index c0323fa..4388162 100644
--- a/lib/lex.py
+++ b/lib/lex.py
@@ -13,6 +13,28 @@ class TimedWordLexer(Lexer):
FUNCTIONSEP = r';'
+class TimedSequenceLexer(Lexer):
+ tokens = {LPAREN, RPAREN, LBRACE, RBRACE, CYCLE, IDENTIFIER, NUMBER, ARGSEP, FUNCTIONSEP}
+ ignore = ' \t'
+
+ LPAREN = r'\('
+ RPAREN = r'\)'
+ LBRACE = r'\{'
+ RBRACE = r'\}'
+ CYCLE = r'cycle'
+ IDENTIFIER = r'[a-zA-Z_][a-zA-Z0-9_]*'
+ NUMBER = r'[0-9e.]+'
+ ARGSEP = r','
+ FUNCTIONSEP = r';'
+
+ def error(self, t):
+ print("Illegal character '%s'" % t.value[0])
+ if t.value[0] == '{' and t.value.find('}'):
+ self.index += 1 + t.value.find('}')
+ else:
+ self.index += 1
+
+
class TimedWordParser(Parser):
tokens = TimedWordLexer.tokens
@@ -46,17 +68,133 @@ class TimedWordParser(Parser):
@_('NUMBER')
def arg(self, p):
- return [float(p.NUMBER)]
+ return float(p.NUMBER)
@_('IDENTIFIER')
def arg(self, p):
- return [p.IDENTIFIER]
+ return p.IDENTIFIER
+
+
+class TimedSequenceParser(Parser):
+ tokens = TimedSequenceLexer.tokens
+
+ @_('timedSequenceL', 'timedSequenceW')
+ def timedSequence(self, p):
+ return p[0]
+
+ @_('loop')
+ def timedSequenceL(self, p):
+ return [p.loop]
+
+ @_('loop timedSequenceW')
+ def timedSequenceL(self, p):
+ ret = [p.loop]
+ ret.extend(p.timedSequenceW)
+ return ret
+
+ @_('timedWord')
+ def timedSequenceW(self, p):
+ return [p.timedWord]
+
+ @_('timedWord timedSequenceL')
+ def timedSequenceW(self, p):
+ ret = [p.timedWord]
+ ret.extend(p.timedSequenceL)
+ return ret
+
+ @_('timedSymbol FUNCTIONSEP timedWord')
+ def timedWord(self, p):
+ p.timedWord.word.insert(0, p.timedSymbol)
+ return p.timedWord
+
+ @_('timedSymbol FUNCTIONSEP')
+ def timedWord(self, p):
+ return TimedWord(word=[p.timedSymbol])
+
+ @_('CYCLE LPAREN IDENTIFIER RPAREN LBRACE timedWord RBRACE')
+ def loop(self, p):
+ return Workload(p.IDENTIFIER, p.timedWord)
+
+ @_('IDENTIFIER', 'IDENTIFIER LPAREN RPAREN')
+ def timedSymbol(self, p):
+ return (p.IDENTIFIER,)
+
+ @_('IDENTIFIER LPAREN args RPAREN')
+ def timedSymbol(self, p):
+ return (p.IDENTIFIER, *p.args)
+
+ @_('arg ARGSEP args')
+ def args(self, p):
+ ret = [p.arg]
+ ret.extend(p.args)
+ return ret
+
+ @_('arg')
+ def args(self, p):
+ return [p.arg]
+
+ @_('NUMBER')
+ def arg(self, p):
+ return float(p.NUMBER)
+
+ @_('IDENTIFIER')
+ def arg(self, p):
+ return p.IDENTIFIER
+
+ def error(self, p):
+ if p:
+ print("Syntax error at token", p.type)
+ # Just discard the token and tell the parser it's okay.
+ self.errok()
+ else:
+ print("Syntax error at EOF")
+
+
+class TimedWord:
+ def __init__(self, word_string=None, word=list()):
+ if word_string is not None:
+ lexer = TimedWordLexer()
+ parser = TimedWordParser()
+ self.word = parser.parse(lexer.tokenize(word_string))
+ else:
+ self.word = word
+
+ def __getitem__(self, item):
+ return self.word[item]
+
+ def __repr__(self):
+ ret = list()
+ for symbol in self.word:
+ ret.append('{}({})'.format(symbol[0], ', '.join(map(str, symbol[1:]))))
+ return 'TimedWord<"{}">'.format('; '.join(ret))
+
+
+class Workload:
+ def __init__(self, name, word):
+ self.name = name
+ self.word = word
+
+ def __getitem__(self, item):
+ return self.word[item]
+
+ def __repr__(self):
+ return 'Workload("{}", {})'.format(self.name, self.word)
+
+
+class TimedSequence:
+ def __init__(self, seq_string=None, seq=list()):
+ if seq_string is not None:
+ lexer = TimedSequenceLexer()
+ parser = TimedSequenceParser()
+ self.seq = parser.parse(lexer.tokenize(seq_string))
+ else:
+ self.seq = seq
+ def __getitem__(self, item):
+ return self.seq[item]
-if __name__ == '__main__':
- data = 'init(); sleep(12345); foo(_, 7);'
- lexer = TimedWordLexer()
- parser = TimedWordParser()
- for tok in lexer.tokenize(data):
- print('type={}, value={}'.format(tok.type, tok.value))
- print(parser.parse(lexer.tokenize(data)))
+ def __repr__(self):
+ ret = list()
+ for symbol in self.seq:
+ ret.append('{}'.format(symbol))
+ return 'TimedSequence(seq=[{}])'.format(', '.join(ret))