Commit cdce9b24 authored by Thanassis Tsiodras's avatar Thanassis Tsiodras
Browse files

Appease Google's Mobile-friendly status on some pages

parent a0f45ecd
......@@ -4,7 +4,6 @@
"""
asn2dataModel converts ASN.1 modules to a variety of target languages
"""
from .asn2dataModel import main
from . import msgPrinter
from . import msgPrinterASN1
__version__ = 1.0
......@@ -41,7 +41,7 @@ def OnStartup(unused_modelingLanguage, asnFiles, outputDir):
#print "Use ASN1SCC to generate the structures for '%s'" % asnFile
asn1SccPath = spawn.find_executable('asn1.exe')
if not asn1SccPath:
panic("ASN1SCC seems not installed on your system (asn1.exe not found in PATH).\n") # pragma: no cover
panic("ASN1SCC seems to be missing from your system (asn1.exe not found in PATH).\n") # pragma: no cover
os.system(
("mono " if sys.argv[0].endswith('.py') and sys.platform.startswith('linux') else "") +
"\"{}\" -wordSize 8 -typePrefix asn1Scc -Ada -uPER -o \"".format(asn1SccPath) +
......
......@@ -40,7 +40,7 @@ def OnStartup(unused_modelingLanguage, asnFiles, outputDir):
#print "Use ASN1SCC to generate the structures for '%s'" % asnFile
asn1SccPath = spawn.find_executable('asn1.exe')
if not asn1SccPath:
panic("ASN1SCC seems not installed on your system (asn1.exe not found in PATH).\n") # pragma: no cover
panic("ASN1SCC seems to be missing from your system (asn1.exe not found in PATH).\n") # pragma: no cover
os.system(
("mono " if sys.argv[0].endswith('.py') and sys.platform.startswith('linux') else "") +
"\"{}\" -wordSize 8 -typePrefix asn1Scc -c -uPER -o \"".format(asn1SccPath) +
......
......@@ -28,6 +28,8 @@ code generator A.'''
import os
import sys
import re
import distutils.spawn as spawn
import commonPy
from commonPy.utility import panic, inform
from commonPy.asnAST import AsnBool, AsnInt, AsnReal, AsnString, AsnEnumerated, AsnSequence, AsnSet, AsnChoice, AsnMetaMember, AsnSequenceOf, AsnSetOf
......@@ -57,13 +59,13 @@ def CleanNameAsSimulinkWants(name):
def OnStartup(unused_modelingLanguage, asnFiles, outputDir):
#print "Use ASN1SCC to generate the structures for '%s'" % asnFile
if None == os.getenv("ASN1SCC"):
if None == os.getenv("DMT"): # pragma: no cover
panic("DMT environment variable is not set, you must set it.") # pragma: no cover
os.putenv("ASN1SCC", os.getenv("DMT") + os.sep + "asn1scc/asn1.exe") # pragma: no cover
asn1SccPath = spawn.find_executable('asn1.exe')
if not asn1SccPath:
panic("ASN1SCC seems to be missing from your system (asn1.exe not found in PATH).\n") # pragma: no cover
os.system(
("mono " if sys.argv[0].endswith('.py') and sys.platform.startswith('linux') else "") +
"\"$ASN1SCC\" -wordSize 8 -typePrefix asn1Scc -Ada -uPER -o \"" + outputDir + "\" \"" + "\" \"".join(asnFiles) + "\"")
"\"{}\" -wordSize 8 -typePrefix asn1Scc -c -uPER -o \"".format(asn1SccPath) +
outputDir + "\" \"" + "\" \"".join(asnFiles) + "\"")
os.system("rm -f \"" + outputDir + "\"/*.adb")
global g_bHasStartupRunOnce
......@@ -88,11 +90,11 @@ def OnStartup(unused_modelingLanguage, asnFiles, outputDir):
def OnBasic(nodeTypename, node, leafTypeDict):
pass
pass # pragma: nocover
def OnSequence(nodeTypename, node, leafTypeDict):
pass
pass # pragma: nocover
def OnSet(nodeTypename, node, leafTypeDict):
......@@ -100,11 +102,11 @@ def OnSet(nodeTypename, node, leafTypeDict):
def OnEnumerated(nodeTypename, node, leafTypeDict):
pass
pass # pragma: nocover
def OnSequenceOf(nodeTypename, node, leafTypeDict):
pass
pass # pragma: nocover
def OnSetOf(nodeTypename, node, leafTypeDict):
......@@ -112,7 +114,7 @@ def OnSetOf(nodeTypename, node, leafTypeDict):
def OnChoice(nodeTypename, node, leafTypeDict):
pass
pass # pragma: nocover
def OnShutdown():
......
......@@ -22,6 +22,7 @@
# generated code.
#
import re
import commonPy
from commonPy.utility import panic, inform
from commonPy.asnAST import AsnBool, AsnInt, AsnReal, AsnString, AsnEnumerated, AsnSequence, AsnSet, AsnChoice, AsnMetaMember, AsnSequenceOf, AsnSetOf
......@@ -68,11 +69,11 @@ def OnStartup(unused_modelingLanguage, unused_asnFile, outputDir):
def OnBasic(nodeTypename, node, leafTypeDict):
pass
pass # pragma: no cover
def OnSequence(nodeTypename, node, leafTypeDict):
pass
pass # pragma: no cover
def OnSet(nodeTypename, node, leafTypeDict):
......@@ -80,11 +81,11 @@ def OnSet(nodeTypename, node, leafTypeDict):
def OnEnumerated(nodeTypename, node, leafTypeDict):
pass
pass # pragma: no cover
def OnSequenceOf(nodeTypename, node, leafTypeDict):
pass
pass # pragma: no cover
def OnSetOf(nodeTypename, node, leafTypeDict):
......@@ -92,7 +93,7 @@ def OnSetOf(nodeTypename, node, leafTypeDict):
def OnChoice(nodeTypename, node, leafTypeDict):
pass
pass # pragma: no cover
def OnShutdown():
......
......@@ -47,7 +47,7 @@ def OnSequence(nodeTypename, node, leafTypeDict):
def OnSet(nodeTypename, node, leafTypeDict):
pass
pass # pragma: no cover
def OnEnumerated(nodeTypename, node, leafTypeDict):
......@@ -59,7 +59,7 @@ def OnSequenceOf(nodeTypename, node, leafTypeDict):
def OnSetOf(nodeTypename, node, leafTypeDict):
pass
pass # pragma: no cover
def OnChoice(nodeTypename, node, leafTypeDict):
......
......@@ -103,7 +103,7 @@ def OnStartup(unused_modelingLanguage, asnFile, outputDir):
inform("Scade612_A_mapper: Creating file '%s'...", outputFilename)
global g_outputFile
g_outputFile = open(outputDir + outputFilename, 'w')
g_outputFile = open(outputDir + outputFilename, 'wb')
global g_mainOid
g_mainOid = "/" + RandomHex(4) + "/" + RandomHex(3) + "/"
......
### $ANTLR 2.7.7 (20120126): "aadl.g" -> "AadlLexer.py"$
### import antlr and other modules ..
import sys
import antlr
version = sys.version.split()[0]
if version < '2.2.1':
False = 0
if version < '2.3':
True = not False
### header action >>>
### header action <<<
### preamble action >>>
### preamble action <<<
### >>>The Literals<<<
literals = {}
literals["type"] = 32
literals["inverse"] = 92
literals["constant"] = 70
literals["connections"] = 58
literals["public"] = 7
literals["list"] = 69
literals["initial"] = 87
literals["applies"] = 62
literals["end"] = 5
literals["aadlboolean"] = 39
literals["flows"] = 94
literals["memory"] = 20
literals["aadlstring"] = 40
literals["flow"] = 67
literals["system"] = 16
literals["implementation"] = 24
literals["to"] = 28
literals["and"] = 80
literals["not"] = 99
literals["package"] = 4
literals["inherit"] = 61
literals["aadlreal"] = 48
literals["source"] = 95
literals["reference"] = 57
literals["provides"] = 29
literals["server"] = 59
literals["sink"] = 96
literals["event"] = 66
literals["range"] = 54
literals["enumeration"] = 41
literals["calls"] = 85
literals["out"] = 91
literals["set"] = 37
literals["parameter"] = 68
literals["of"] = 55
literals["is"] = 38
literals["aadlinteger"] = 49
literals["or"] = 79
literals["access"] = 60
literals["none"] = 11
literals["features"] = 25
literals["data"] = 18
literals["all"] = 63
literals["thread"] = 12
literals["path"] = 97
literals["properties"] = 72
literals["units"] = 45
literals["bus"] = 21
literals["binding"] = 78
literals["extends"] = 13
literals["private"] = 8
literals["port"] = 65
literals["requires"] = 30
literals["refines"] = 31
literals["false"] = 82
literals["processor"] = 19
literals["device"] = 22
literals["property"] = 36
literals["annex"] = 34
literals["classifier"] = 56
literals["transitions"] = 100
literals["process"] = 15
literals["value"] = 76
literals["modes"] = 86
literals["in"] = 77
literals["delta"] = 71
literals["mode"] = 64
literals["true"] = 81
literals["group"] = 14
literals["refined"] = 27
literals["subprogram"] = 17
literals["subcomponents"] = 33
### import antlr.Token
from antlr import Token
### >>>The Known Token Types <<<
SKIP = antlr.SKIP
INVALID_TYPE = antlr.INVALID_TYPE
EOF_TYPE = antlr.EOF_TYPE
EOF = antlr.EOF
NULL_TREE_LOOKAHEAD = antlr.NULL_TREE_LOOKAHEAD
MIN_USER_TYPE = antlr.MIN_USER_TYPE
PACKAGE = 4
END = 5
SEMI = 6
PUBLIC = 7
PRIVATE = 8
IDENT = 9
DOUBLECOLON = 10
NONE = 11
THREAD = 12
EXTENDS = 13
GROUP = 14
PROCESS = 15
SYSTEM = 16
SUBPROGRAM = 17
DATA = 18
PROCESSOR = 19
MEMORY = 20
BUS = 21
DEVICE = 22
DOT = 23
IMPL = 24
FEATURES = 25
COLON = 26
REFINED = 27
TO = 28
PROVIDES = 29
REQUIRES = 30
REFINES = 31
TYPE = 32
SUBCOMPONENTS = 33
ANNEX = 34
ANNEX_TEXT = 35
PROPERTY = 36
SET = 37
IS = 38
BOOLEAN = 39
STRING = 40
ENUMERATION = 41
LPAREN = 42
COMMA = 43
RPAREN = 44
UNITS = 45
ASSIGN = 46
STAR = 47
REAL = 48
INTEGER = 49
DOTDOT = 50
PLUS = 51
MINUS = 52
NUMERIC_LIT = 53
RANGE = 54
OF = 55
CLASSIFIER = 56
REFERENCE = 57
CONNECTIONS = 58
SERVER = 59
ACCESS = 60
INHERIT = 61
APPLIES = 62
ALL = 63
MODE = 64
PORT = 65
EVENT = 66
FLOW = 67
PARAMETER = 68
LIST = 69
CONSTANT = 70
DELTA = 71
PROPERTIES = 72
LCURLY = 73
RCURLY = 74
ASSIGNPLUS = 75
VALUE = 76
IN = 77
BINDING = 78
OR = 79
AND = 80
TRUE = 81
FALSE = 82
NOT = 83
STRING_LITERAL = 84
CALLS = 85
MODES = 86
INITIAL = 87
LTRANS = 88
RTRANS = 89
ARROW = 90
OUT = 91
INVERSE = 92
DARROW = 93
FLOWS = 94
SOURCE = 95
SINK = 96
PATH = 97
AADLSPEC = 98
NOTT = 99
TRANSITIONS = 100
HASH = 101
DIGIT = 102
EXPONENT = 103
INT_EXPONENT = 104
EXTENDED_DIGIT = 105
BASED_INTEGER = 106
BASE = 107
ESC = 108
HEX_DIGIT = 109
WS = 110
SL_COMMENT = 111
class Lexer(antlr.CharScanner) :
### user action >>>
### user action <<<
def __init__(self, *argv, **kwargs) :
antlr.CharScanner.__init__(self, *argv, **kwargs)
self.caseSensitiveLiterals = False
self.setCaseSensitive(False)
self.literals = literals
def nextToken(self):
while True:
try: ### try again ..
while True:
_token = None
_ttype = INVALID_TYPE
self.resetText()
try: ## for char stream error handling
try: ##for lexical error handling
la1 = self.LA(1)
if False:
pass
elif la1 and la1 in '(':
pass
self.mLPAREN(True)
theRetToken = self._returnToken
elif la1 and la1 in ')':
pass
self.mRPAREN(True)
theRetToken = self._returnToken
elif la1 and la1 in '}':
pass
self.mRCURLY(True)
theRetToken = self._returnToken
elif la1 and la1 in '*':
pass
self.mSTAR(True)
theRetToken = self._returnToken
elif la1 and la1 in ';':
pass
self.mSEMI(True)
theRetToken = self._returnToken
elif la1 and la1 in ',':
pass
self.mCOMMA(True)
theRetToken = self._returnToken
elif la1 and la1 in '=':
pass
self.mASSIGN(True)
theRetToken = self._returnToken
elif la1 and la1 in ']':
pass
self.mRTRANS(True)
theRetToken = self._returnToken
elif la1 and la1 in '#':
pass
self.mHASH(True)
theRetToken = self._returnToken
elif la1 and la1 in 'abcdefghijklmnopqrstuvwxyz':
pass
self.mIDENT(True)
theRetToken = self._returnToken
elif la1 and la1 in '"':
pass
self.mSTRING_LITERAL(True)
theRetToken = self._returnToken
elif la1 and la1 in '0123456789':
pass
self.mNUMERIC_LIT(True)
theRetToken = self._returnToken
elif la1 and la1 in '\t\n\r ':
pass
self.mWS(True)
theRetToken = self._returnToken
else:
if (self.LA(1)=='-') and (self.LA(2)=='>') and (self.LA(3)=='>'):
pass
self.mDARROW(True)
theRetToken = self._returnToken
elif (self.LA(1)=='.') and (self.LA(2)=='.'):
pass
self.mDOTDOT(True)
theRetToken = self._returnToken
elif (self.LA(1)=='+') and (self.LA(2)=='='):
pass
self.mASSIGNPLUS(True)
theRetToken = self._returnToken
elif (self.LA(1)==':') and (self.LA(2)==':'):
pass
self.mDOUBLECOLON(True)
theRetToken = self._returnToken
elif (self.LA(1)=='-') and (self.LA(2)=='['):
pass
self.mLTRANS(True)
theRetToken = self._returnToken
elif (self.LA(1)=='-') and (self.LA(2)=='>') and (True):
pass
self.mARROW(True)
theRetToken = self._returnToken
elif (self.LA(1)=='{') and (self.LA(2)=='*'):
pass
self.mANNEX_TEXT(True)
theRetToken = self._returnToken
elif (self.LA(1)=='-') and (self.LA(2)=='-'):
pass
self.mSL_COMMENT(True)
theRetToken = self._returnToken
elif (self.LA(1)=='{') and (True):
pass
self.mLCURLY(True)
theRetToken = self._returnToken
elif (self.LA(1)==':') and (True):
pass
self.mCOLON(True)
theRetToken = self._returnToken
elif (self.LA(1)=='+') and (True):
pass
self.mPLUS(True)
theRetToken = self._returnToken
elif (self.LA(1)=='-') and (True):
pass
self.mMINUS(True)
theRetToken = self._returnToken
elif (self.LA(1)=='.') and (True):
pass
self.mDOT(True)
theRetToken = self._returnToken
else:
self.default(self.LA(1))
if not self._returnToken:
raise antlr.TryAgain ### found SKIP token
### option { testLiterals=true }
self.testForLiteral(self._returnToken)
### return token to caller
return self._returnToken
### handle lexical errors ....
except antlr.RecognitionException as e:
self.reportError(e)
self.consume()
### handle char stream errors ...
except antlr.CharStreamException as cse:
if isinstance(cse, antlr.CharStreamIOException):
raise antlr.TokenStreamIOException(cse.io)
else:
raise antlr.TokenStreamException(str(cse))
except antlr.TryAgain:
pass
def mLPAREN(self, _createToken):
_ttype = 0
_token = None
_begin = self.text.length()
_ttype = LPAREN
_saveIndex = 0
try: ## for error handling
pass
self.match('(')
except antlr.RecognitionException as ex:
self.reportError(ex)
self.consume()
self.consumeUntil(_tokenSet_0)
self.set_return_token(_createToken, _token, _ttype, _begin)
def mRPAREN(self, _createToken):
_ttype = 0
_token = None
_begin = self.text.length()
_ttype = RPAREN
_saveIndex = 0
try: ## for error handling
pass
self.match(')')
except antlr.RecognitionException as ex:
self.reportError(ex)
self.consume()
self.consumeUntil(_tokenSet_0)
self.set_return_token(_createToken, _token, _ttype, _begin)
def mLCURLY(self, _createToken):
_ttype = 0
_token = None
_begin = self.text.length()
_ttype = LCURLY
_saveIndex = 0
try: ## for error handling
pass
self.match('{')
except antlr.RecognitionException as ex:
self.reportError(ex)
self.consume()
self.consumeUntil(_tokenSet_0)
self.set_return_token(_createToken, _token, _ttype, _begin)
def mRCURLY(self, _createToken):
_ttype = 0
_token = None
_begin = self.text.length()
_ttype = RCURLY
_saveIndex = 0
try: ## for error handling
pass
self.match('}')
except antlr.RecognitionException as ex:
self.reportError(ex)
self.consume()
self.consumeUntil(_tokenSet_0)