Commit e89c0a61 authored by Maxime Perrotin's avatar Maxime Perrotin
Browse files

Augment syntax with syntactic sugar for observers

parent 8c9e9341
...@@ -124,6 +124,9 @@ The background pattern was downloaded from www.subtlepatterns.com ...@@ -124,6 +124,9 @@ The background pattern was downloaded from www.subtlepatterns.com
Changelog Changelog
========= =========
**3.5.0 (04/2021)**
- Support Input/Ouput expressions for model checkers
**3.4.6 (04/2021)** **3.4.6 (04/2021)**
- Introduce monitors to support model checking observers - Introduce monitors to support model checking observers
......
This source diff could not be displayed because it is too large. You can view the blob instead.
...@@ -1629,6 +1629,70 @@ def unary_expression(root, context): ...@@ -1629,6 +1629,70 @@ def unary_expression(root, context):
return expr, errors, warnings return expr, errors, warnings
def io_expression(root, context):
''' Expressions used in the context of observers (for model checking):
input
input x [from P] to F
output
output X from P
Since this is syntactic sugar, we transform these expression into the
regular form based on the known structure of events: Observable_Event
type that is generated by kazoo.
'''
inputString = get_input_string(root)
event_kind = "{kind}_event"
target_option = " and then event.{kind}_event.{target} = {function}"
msg_name = " and then present(event.{kind}_event.event.{function}.msg_{direction}) = {msg}"
string = "present(event) = "
msg, src, dest = "", "", ""
if root.type == lexer.INPUT_EXPRESSION:
kind = "input"
direction = "in"
else:
kind = "output"
direction = "out"
string += event_kind.format(kind=kind)
for child in root.getChildren():
if child.type == lexer.ID:
msg = child.text
elif child.type == lexer.FROM:
src = child.getChild(0).text
string += target_option.format(kind=kind,
target="source",
function=src)
elif child.type == lexer.TO:
dest = child.getChild(0).text
string += target_option.format(kind=kind,
target="dest",
function=dest)
else:
raise NotImplementedError("In io_expression")
if msg:
string += msg_name.format(kind=kind,
function=dest if kind=="input" else src,
direction=direction,
msg=msg)
parser = parser_init (string=string)
new_root = parser.expression()
tree = new_root.tree
tree.token_stream = parser.getTokenStream()
expr, errors, warnings = expression(tree, context)
expr.inputString = inputString
return expr, errors, warnings
def expression(root, context, pos='right'): def expression(root, context, pos='right'):
''' Expression analysis (e.g. 5+5*hello(world)!foo) ''' ''' Expression analysis (e.g. 5+5*hello(world)!foo) '''
logic = (lexer.OR, lexer.AND, lexer.XOR, lexer.IMPLIES) logic = (lexer.OR, lexer.AND, lexer.XOR, lexer.IMPLIES)
...@@ -1677,6 +1741,10 @@ def expression(root, context, pos='right'): ...@@ -1677,6 +1741,10 @@ def expression(root, context, pos='right'):
# If the procedure is not defined with a return value, a TypeError # If the procedure is not defined with a return value, a TypeError
# has been raised in compare_type, so no need to check it again here # has been raised in compare_type, so no need to check it again here
return prim, errs, warns return prim, errs, warns
elif root.type == lexer.INPUT_EXPRESSION:
return io_expression(root, context)
elif root.type == lexer.OUTPUT_EXPRESSION:
return io_expression(root, context)
else: else:
raise NotImplementedError(sdl92Parser.tokenNamesMap[root.type] + raise NotImplementedError(sdl92Parser.tokenNamesMap[root.type] +
' - line ' + str(root.getLine())) ' - line ' + str(root.getLine()))
...@@ -5811,7 +5879,7 @@ def parseSingleElement(elem:str='', string:str='', context=None): ...@@ -5811,7 +5879,7 @@ def parseSingleElement(elem:str='', string:str='', context=None):
# so we have to discard exceptions sent by e.g. find_variable # so we have to discard exceptions sent by e.g. find_variable
pass pass
except NotImplementedError as err: except NotImplementedError as err:
syntax_errors.append('Syntax error in expression - Fix it.') syntax_errors.append('Expression syntax not supported.')
except SyntaxError as err: except SyntaxError as err:
syntax_errors.append(err.text) syntax_errors.append(err.text)
# Check that the whole string has been consumed (ANTLR may stop parsing # Check that the whole string has been consumed (ANTLR may stop parsing
......
...@@ -141,7 +141,7 @@ except ImportError: ...@@ -141,7 +141,7 @@ except ImportError:
__all__ = ['opengeode', 'SDL_Scene', 'SDL_View', 'parse'] __all__ = ['opengeode', 'SDL_Scene', 'SDL_View', 'parse']
__version__ = '3.4.6' __version__ = '3.5.0'
if hasattr(sys, 'frozen'): if hasattr(sys, 'frozen'):
# Detect if we are running on Windows (py2exe-generated) # Detect if we are running on Windows (py2exe-generated)
......
# $ANTLR 3.5.2 sdl92.g 2021-04-07 10:19:04 # $ANTLR 3.5.2 sdl92.g 2021-04-13 11:49:19
import sys import sys
from antlr3 import * from antlr3 import *
...@@ -10,13 +10,13 @@ HIDDEN = BaseRecognizer.HIDDEN ...@@ -10,13 +10,13 @@ HIDDEN = BaseRecognizer.HIDDEN
# token types # token types
EOF=-1 EOF=-1
T__226=226
T__227=227
T__228=228 T__228=228
T__229=229 T__229=229
T__230=230 T__230=230
T__231=231 T__231=231
T__232=232 T__232=232
T__233=233
T__234=234
A=4 A=4
ACTION=5 ACTION=5
ACTIVE=6 ACTIVE=6
...@@ -118,133 +118,135 @@ INFORMAL_TEXT=101 ...@@ -118,133 +118,135 @@ INFORMAL_TEXT=101
INOUT=102 INOUT=102
INPUT=103 INPUT=103
INPUTLIST=104 INPUTLIST=104
INPUT_NONE=105 INPUT_EXPRESSION=105
INT=106 INPUT_NONE=106
J=107 INT=107
JOIN=108 J=108
K=109 JOIN=109
KEEP=110 K=110
L=111 KEEP=111
LABEL=112 L=112
LE=113 LABEL=113
LITERAL=114 LE=114
LT=115 LITERAL=115
L_BRACKET=116 LT=116
L_PAREN=117 L_BRACKET=117
M=118 L_PAREN=118
MANTISSA=119 M=119
MINUS_INFINITY=120 MANTISSA=120
MKSTRING=121 MINUS_INFINITY=121
MOD=122 MKSTRING=122
MONITOR=123 MOD=123
N=124 MONITOR=124
NEG=125 N=125
NEQ=126 NEG=126
NEWTYPE=127 NEQ=127
NEXTSTATE=128 NEWTYPE=128
NONE=129 NEXTSTATE=129
NOT=130 NONE=130
NUMBER_OF_INSTANCES=131 NOT=131
O=132 NUMBER_OF_INSTANCES=132
OCTSTR=133 O=133
OPEN_RANGE=134 OCTSTR=134
OR=135 OPEN_RANGE=135
OUT=136 OR=136
OUTPUT=137 OUT=137
OUTPUT_BODY=138 OUTPUT=138
P=139 OUTPUT_BODY=139
PARAM=140 OUTPUT_EXPRESSION=140
PARAMNAMES=141 P=141
PARAMS=142 PARAM=142
PAREN=143 PARAMNAMES=143
PFPAR=144 PARAMS=144
PLUS=145 PAREN=145
PLUS_INFINITY=146 PFPAR=146
POINT=147 PLUS=147
PRIMARY=148 PLUS_INFINITY=148
PRIORITY=149 POINT=149
PROCEDURE=150 PRIMARY=150
PROCEDURE_CALL=151 PRIORITY=151
PROCEDURE_NAME=152 PROCEDURE=152
PROCESS=153 PROCEDURE_CALL=153
PROVIDED=154 PROCEDURE_NAME=154
Q=155 PROCESS=155
QUESTION=156 PROVIDED=156
R=157 Q=157
RANGE=158 QUESTION=158
REFERENCED=159 R=159
REM=160 RANGE=160
RESET=161 REFERENCED=161
RETURN=162 REM=162
RETURNS=163 RESET=163
ROUTE=164 RETURN=164
R_BRACKET=165 RETURNS=165
R_PAREN=166 ROUTE=166
S=167 R_BRACKET=167
SAVE=168 R_PAREN=168
SELECTOR=169 S=169
SEMI=170 SAVE=170
SEQOF=171 SELECTOR=171
SEQUENCE=172 SEMI=172
SET=173 SEQOF=173
SIGNAL=174 SEQUENCE=174
SIGNALROUTE=175 SET=175
SIGNAL_LIST=176 SIGNAL=176
SORT=177 SIGNALROUTE=177
SPECIFIC=178 SIGNAL_LIST=178
START=179 SORT=179
STATE=180 SPECIFIC=180
STATELIST=181 START=181
STATE_AGGREGATION=182 STATE=182
STATE_PARTITION_CONNECTION=183 STATELIST=183
STIMULUS=184 STATE_AGGREGATION=184
STOP=185 STATE_PARTITION_CONNECTION=185
STOPIF=186 STIMULUS=186
STR=187 STOP=187
STRING=188 STOPIF=188
STRUCT=189 STR=189
SUBSTRUCTURE=190 STRING=190
SYNONYM=191 STRUCT=191
SYNONYM_LIST=192 SUBSTRUCTURE=192
SYNTYPE=193 SYNONYM=193
SYSTEM=194 SYNONYM_LIST=194
T=195 SYNTYPE=195
TASK=196 SYSTEM=196
TASK_BODY=197 T=197
TERMINATOR=198 TASK=198
TEXT=199 TASK_BODY=199
TEXTAREA=200 TERMINATOR=200
TEXTAREA_CONTENT=201 TEXT=201
THEN=202 TEXTAREA=202
THIS=203 TEXTAREA_CONTENT=203
TIMER=204 THEN=204
TO=205 THIS=205
TRANSITION=206 TIMER=206
TRUE=207 TO=207
TYPE=208 TRANSITION=208
TYPE_INSTANCE=209 TRUE=209
U=210 TYPE=210
USE=211 TYPE_INSTANCE=211
V=212 U=212
VALUE=213 USE=213
VARIABLE=214 V=214
VARIABLES=215 VALUE=215
VIA=216 VARIABLE=216
VIAPATH=217 VARIABLES=217
VIEW=218 VIA=218
W=219 VIAPATH=219
WITH=220 VIEW=220
WS=221 W=221
X=222 WITH=222
XOR=223 WS=223
Y=224 X=224
Z=225 XOR=225
Y=226
Z=227
# token names # token names
tokenNamesMap = { tokenNamesMap = {
0: "<invalid>", 1: "<EOR>", 2: "<DOWN>", 3: "<UP>", 0: "<invalid>", 1: "<EOR>", 2: "<DOWN>", 3: "<UP>",
-1: "EOF", 226: "T__226", 227: "T__227", 228: "T__228", 229: "T__229", -1: "EOF", 228: "T__228", 229: "T__229", 230: "T__230", 231: "T__231",
230: "T__230", 231: "T__231", 232: "T__232", 4: "A", 5: "ACTION", 6: "ACTIVE", 232: "T__232", 233: "T__233", 234: "T__234", 4: "A", 5: "ACTION", 6: "ACTIVE",
7: "AGGREGATION", 8: "ALL", 9: "ALPHA", 10: "ALTERNATIVE", 11: "AND", 7: "AGGREGATION", 8: "ALL", 9: "ALPHA", 10: "ALTERNATIVE", 11: "AND",
12: "ANSWER", 13: "ANY", 14: "APPEND", 15: "ARRAY", 16: "ASN1", 17: "ASNFILENAME", 12: "ANSWER", 13: "ANY", 14: "APPEND", 15: "ARRAY", 16: "ASN1", 17: "ASNFILENAME",
18: "ASSIGN", 19: "ASSIG_OP", 20: "ASTERISK", 21: "B", 22: "BASE", 23: "BITSTR", 18: "ASSIGN", 19: "ASSIG_OP", 20: "ASTERISK", 21: "B", 22: "BASE", 23: "BITSTR",
...@@ -263,31 +265,32 @@ tokenNamesMap = { ...@@ -263,31 +265,32 @@ tokenNamesMap = {
84: "FOR", 85: "FPAR", 86: "FROM", 87: "G", 88: "GE", 89: "GEODE", 90: "GROUND", 84: "FOR", 85: "FPAR", 86: "FROM", 87: "G", 88: "GE", 89: "GEODE", 90: "GROUND",
91: "GT", 92: "H", 93: "HYPERLINK", 94: "I", 95: "ID", 96: "IF", 97: "IFTHENELSE", 91: "GT", 92: "H", 93: "HYPERLINK", 94: "I", 95: "ID", 96: "IF", 97: "IFTHENELSE",
98: "IMPLIES", 99: "IMPORT", 100: "IN", 101: "INFORMAL_TEXT", 102: "INOUT", 98: "IMPLIES", 99: "IMPORT", 100: "IN", 101: "INFORMAL_TEXT", 102: "INOUT",
103: "INPUT", 104: "INPUTLIST", 105: "INPUT_NONE", 106: "INT", 107: "J", 103: "INPUT", 104: "INPUTLIST", 105: "INPUT_EXPRESSION", 106: "INPUT_NONE",
108: "JOIN", 109: "K", 110: "KEEP", 111: "L", 112: "LABEL", 113: "LE", 107: "INT", 108: "J", 109: "JOIN", 110: "K", 111: "KEEP", 112: "L",
114: "LITERAL", 115: "LT", 116: "L_BRACKET", 117: "L_PAREN", 118: "M", 113: "LABEL", 114: "LE", 115: "LITERAL", 116: "LT", 117: "L_BRACKET",
119: "MANTISSA", 120: "MINUS_INFINITY", 121: "MKSTRING", 122: "MOD", 118: "L_PAREN", 119: "M", 120: "MANTISSA", 121: "MINUS_INFINITY", 122: "MKSTRING",
123: "MONITOR", 124: "N", 125: "NEG", 126: "NEQ", 127: "NEWTYPE", 128: "NEXTSTATE", 123: "MOD", 124: "MONITOR", 125: "N", 126: "NEG", 127: "NEQ", 128: "NEWTYPE",
129: "NONE", 130: "NOT", 131: "NUMBER_OF_INSTANCES", 132: "O", 133: "OCTSTR", 129: "NEXTSTATE", 130: "NONE", 131: "NOT", 132: "NUMBER_OF_INSTANCES",
134: "OPEN_RANGE", 135: "OR", 136: "OUT", 137: "OUTPUT", 138: "OUTPUT_BODY", 133: "O", 134: "OCTSTR", 135: "OPEN_RANGE", 136: "OR", 137: "OUT", 138: "OUTPUT",
139: "P", 140: "PARAM", 141: "PARAMNAMES", 142: "PARAMS", 143: "PAREN", 139: "OUTPUT_BODY", 140: "OUTPUT_EXPRESSION", 141: "P", 142: "PARAM",
144: "PFPAR", 145: "PLUS", 146: "PLUS_INFINITY", 147: "POINT", 148: "PRIMARY", 143: "PARAMNAMES", 144: "PARAMS", 145: "PAREN", 146: "PFPAR", 147: "PLUS",
149: "PRIORITY", 150: "PROCEDURE", 151: "PROCEDURE_CALL", 152: "PROCEDURE_NAME", 148: "PLUS_INFINITY", 149: "POINT", 150: "PRIMARY", 151: "PRIORITY",
153: "PROCESS", 154: "PROVIDED", 155: "Q", 156: "QUESTION", 157: "R", 152: "PROCEDURE", 153: "PROCEDURE_CALL", 154: "PROCEDURE_NAME", 155: "PROCESS",
158: "RANGE", 159: "REFERENCED", 160: "REM", 161: "RESET", 162: "RETURN", 156: "PROVIDED", 157: "Q", 158: "QUESTION", 159: "R", 160: "RANGE",
163: "RETURNS", 164: "ROUTE", 165: "R_BRACKET", 166: "R_PAREN", 167: "S", 161: "REFERENCED", 162: "REM", 163: "RESET", 164: "RETURN", 165: "RETURNS",
168: "SAVE", 169: "SELECTOR", 170: "SEMI", 171: "SEQOF", 172: "SEQUENCE", 166: "ROUTE", 167: "R_BRACKET", 168: "R_PAREN", 169: "S", 170: "SAVE",
173: "SET", 174: "SIGNAL", 175: "SIGNALROUTE", 176: "SIGNAL_LIST", 177: "SORT", 171: "SELECTOR", 172: "SEMI", 173: "SEQOF", 174: "SEQUENCE", 175: "SET",
178: "SPECIFIC", 179: "START", 180: "STATE", 181: "STATELIST", 182: "STATE_AGGREGATION", 176: "SIGNAL", 177: "SIGNALROUTE", 178: "SIGNAL_LIST", 179: "SORT",
183: "STATE_PARTITION_CONNECTION", 184: "STIMULUS", 185: "STOP", 186: "STOPIF", 180: "SPECIFIC", 181: "START", 182: "STATE", 183: "STATELIST", 184: "STATE_AGGREGATION",
187: "STR", 188: "STRING", 189: "STRUCT", 190: "SUBSTRUCTURE", 191: "SYNONYM", 185: "STATE_PARTITION_CONNECTION", 186: "STIMULUS", 187: "STOP", 188: "STOPIF",
192: "SYNONYM_LIST", 193: "SYNTYPE", 194: "SYSTEM", 195: "T", 196: "TASK", 189: "STR", 190: "STRING", 191: "STRUCT", 192: "SUBSTRUCTURE", 193: "SYNONYM",
197: "TASK_BODY", 198: "TERMINATOR", 199: "TEXT", 200: "TEXTAREA", 201: "TEXTAREA_CONTENT", 194: "SYNONYM_LIST", 195: "SYNTYPE", 196: "SYSTEM", 197: "T", 198: "TASK",
202: "THEN", 203: "THIS", 204: "TIMER", 205: "TO", 206: "TRANSITION", 199: "TASK_BODY", 200: "TERMINATOR", 201: "TEXT", 202: "TEXTAREA", 203: "TEXTAREA_CONTENT",
207: "TRUE", 208: "TYPE", 209: "TYPE_INSTANCE", 210: "U", 211: "USE", 204: "THEN", 205: "THIS", 206: "TIMER", 207: "TO", 208: "TRANSITION",
212: "V", 213: "VALUE", 214: "VARIABLE", 215: "VARIABLES", 216: "VIA", 209: "TRUE", 210: "TYPE", 211: "TYPE_INSTANCE", 212: "U", 213: "USE",
217: "VIAPATH", 218: "VIEW", 219: "W", 220: "WITH", 221: "WS", 222: "X", 214: "V", 215: "VALUE", 216: "VARIABLE", 217: "VARIABLES", 218: "VIA",
223: "XOR", 224: "Y", 225: "Z" 219: "VIAPATH", 220: "VIEW", 221: "W", 222: "WITH", 223: "WS", 224: "X",
225: "XOR", 226: "Y", 227: "Z"
} }
Token.registerTokenNamesMap(tokenNamesMap) Token.registerTokenNamesMap(tokenNamesMap)
...@@ -330,10 +333,10 @@ class sdl92Lexer(Lexer): ...@@ -330,10 +333,10 @@ class sdl92Lexer(Lexer):
# $ANTLR start "T__226" # $ANTLR start "T__228"
def mT__226(self, ): def mT__228(self, ):
try: try:
_type = T__226 _type = T__228
_channel = DEFAULT_CHANNEL _channel = DEFAULT_CHANNEL
# sdl92.g:7:8: ( '!' ) # sdl92.g:7:8: ( '!' )
...@@ -348,14 +351,14 @@ class sdl92Lexer(Lexer): ...@@ -348,14 +351,14 @@ class sdl92Lexer(Lexer):
finally: finally:
pass pass
# $ANTLR end "T__226" # $ANTLR end "T__228"
# $ANTLR start "T__227" # $ANTLR start "T__229"
def mT__227(self, ): def mT__229(self, ):
try: try:
_type = T__227 _type = T__229
_channel = DEFAULT_CHANNEL _channel = DEFAULT_CHANNEL
# sdl92.g:8:8: ( '(.' ) # sdl92.g:8:8: ( '(.' )
...@@ -371,14 +374,14 @@ class sdl92Lexer(Lexer): ...@@ -371,14 +374,14 @@ class sdl92Lexer(Lexer):
finally: finally:
pass pass
# $ANTLR end "T__227" # $ANTLR end "T__229"
# $ANTLR start "T__228" # $ANTLR start "T__230"
def mT__228(self, ): def mT__230(self, ):
try: try:
_type = T__228 _type = T__230
_channel = DEFAULT_CHANNEL _channel = DEFAULT_CHANNEL
# sdl92.g:9:8: ( '*/' ) # sdl92.g:9:8: ( '*/' )
...@@ -394,14 +397,14 @@ class sdl92Lexer(Lexer): ...@@ -394,14 +397,14 @@ class sdl92Lexer(Lexer):
finally: finally:
pass pass
# $ANTLR end "T__228" # $ANTLR end "T__230"
# $ANTLR start "T__229" # $ANTLR start "T__231"
def mT__229(self, ): def mT__231(self, ):
try: try:
_type = T__229 _type = T__231
_channel = DEFAULT_CHANNEL _channel = DEFAULT_CHANNEL
# sdl92.g:10:8: ( '->' ) # sdl92.g:10:8: ( '->' )
...@@ -417,14 +420,14 @@ class sdl92Lexer(Lexer): ...@@ -417,14 +420,14 @@ class sdl92Lexer(Lexer):
finally: finally:
pass pass
# $ANTLR end "T__229" # $ANTLR end "T__231"
# $ANTLR start "T__230" # $ANTLR start "T__232"
def mT__230(self, ): def mT__232(self, ):
try: try:
_type = T__230 _type = T__232
_channel = DEFAULT_CHANNEL _channel = DEFAULT_CHANNEL
# sdl92.g:11:8: ( '.)' ) # sdl92.g:11:8: ( '.)' )
...@@ -440,14 +443,14 @@ class sdl92Lexer(Lexer): ...@@ -440,14 +443,14 @@ class sdl92Lexer(Lexer):
finally: finally:
pass pass
# $ANTLR end "T__230" # $ANTLR end "T__232"
# $ANTLR start "T__231" # $ANTLR start "T__233"
def mT__231(self, ): def mT__233(self, ):
try: try:
_type = T__231 _type = T__233
_channel = DEFAULT_CHANNEL _channel = DEFAULT_CHANNEL