Commit a70ac54d authored by Thanassis Tsiodras's avatar Thanassis Tsiodras

MyPy annotations added all over the place (also, pylint and flake8 pass)

parent aad5c927
#!/usr/bin/env python
#!/usr/bin/env python3
# vim: set expandtab ts=8 sts=4 shiftwidth=4
#
# (C) Semantix Information Technologies.
......@@ -20,29 +20,33 @@
# Note that in both cases, there are no charges (royalties) for the
# generated code.
#
'''
This is one of the code generators that Semantix developed for
the European research project ASSERT. It is now enhanced in the
context of Data Modelling and Data Modelling Tuning projects.
It reads the ASN.1 specification of the exchanged messages, and
generates "printer" functions for their content.
'''
import os
import sys
import copy
from typing import Tuple
import commonPy.configMT
#from commonPy.asnAST import AsnBool,AsnMetaMember,AsnInt,AsnReal,AsnOctetString,AsnEnumerated,AsnSequence,AsnSet,AsnChoice,sourceSequenceLimit
from commonPy.asnAST import sourceSequenceLimit
from commonPy.asnAST import sourceSequenceLimit, AsnNode # NOQA pylint: disable=unused-import
from commonPy.asnParser import (
AST_Lookup, AST_TypesOfFile, AST_TypenamesOfFile, AST_Leaftypes,
Typename, Filename, ParseAsnFileList)
from commonPy.utility import inform, panic
import commonPy.cleanupNodes
from commonPy.recursiveMapper import RecursiveMapper
import commonPy.verify
__doc__ = '''\
This is one of the code generators that Semantix developed for
the European research project ASSERT. It is now enhanced in the
context of Data Modelling and Data Modelling Tuning projects.
It reads the ASN.1 specification of the exchanged messages, and
generates "printer" functions for their content.
'''
def usage():
'''Print usage instructions.'''
msg = 'Usage: %s <options> input1.asn1 [input2.asn1]...\nWhere options are:\n'
......@@ -89,7 +93,7 @@ class Printer(RecursiveMapper):
return ['printf("%%s%s %%d\\n", paramName, (int)%s);' % (prefix, srcCVariable)]
def MapSequence(self, srcCVariable, prefix, node, leafTypeDict, names):
lines = []
lines = [] # type: List[str]
for child in node._members:
lines.extend(
self.Map(
......@@ -111,12 +115,13 @@ class Printer(RecursiveMapper):
lines.append(
"%sif (%s.kind == %s) {" %
(self.maybeElse(childNo), srcCVariable, self.CleanName(child[2])))
lines.extend([' '+x for x in self.Map(
"%s.u.%s" % (srcCVariable, self.CleanName(child[0])),
prefix + "::" + self.CleanName(child[0]),
child[1],
leafTypeDict,
names)])
lines.extend([' '+x
for x in self.Map(
"%s.u.%s" % (srcCVariable, self.CleanName(child[0])),
prefix + "::" + self.CleanName(child[0]),
child[1],
leafTypeDict,
names)])
lines.append("}")
return lines
......@@ -127,12 +132,13 @@ class Printer(RecursiveMapper):
lines.append(" int i%s;" % uniqueId)
limit = sourceSequenceLimit(node, srcCVariable)
lines.append(" for(i%s=0; i%s<%s; i%s++) {" % (uniqueId, uniqueId, limit, uniqueId))
lines.extend([" " + x for x in self.Map(
"%s.arr[i%s]" % (srcCVariable, uniqueId),
prefix + "::Elem",
node._containedType,
leafTypeDict,
names)])
lines.extend([" "+x
for x in self.Map(
"%s.arr[i%s]" % (srcCVariable, uniqueId),
prefix + "::Elem",
node._containedType,
leafTypeDict,
names)])
lines.append(" }")
lines.append("}")
return lines
......@@ -167,24 +173,25 @@ def main():
if not os.path.isfile(f):
panic("'%s' is not a file!\n" % f) # pragma: no cover
uniqueASNfiles = {}
for grammar in sys.argv[1:]:
uniqueASNfiles[grammar]=1
commonPy.asnParser.ParseAsnFileList(list(uniqueASNfiles.keys()))
ParseAsnFileList(sys.argv[1:])
Triples = Tuple[AST_Lookup, List[AsnNode], AST_Leaftypes] # NOQA pylint: disable=unused-variable
uniqueASNfiles = {} # type: Dict[Filename, Triples]
for asnFile in uniqueASNfiles:
tmpNames = {}
tmpNames = {} # Dict[Typename, AsnNode]
for name in commonPy.asnParser.g_typesOfFile[asnFile]:
tmpNames[name] = commonPy.asnParser.g_names[name]
uniqueASNfiles[asnFile] = [
uniqueASNfiles[asnFile] = (
copy.copy(tmpNames), # map Typename to type definition class from asnAST
copy.copy(commonPy.asnParser.g_astOfFile[asnFile]), # list of nameless type definitions
copy.copy(commonPy.asnParser.g_leafTypeDict)] # map from Typename to leafType
copy.copy(commonPy.asnParser.g_leafTypeDict) # map from Typename to leafType
)
inform("Checking that all base nodes have mandatory ranges set in %s..." % asnFile)
for node in list(tmpNames.values()):
verify.VerifyRanges(node, commonPy.asnParser.g_names)
commonPy.verify.VerifyRanges(node, commonPy.asnParser.g_names)
# If some AST nodes must be skipped (for any reason), go learn about them
badTypes = commonPy.cleanupNodes.DiscoverBadTypes()
......@@ -226,15 +233,20 @@ def main():
inform("Processing %s...", nodeTypename)
# First, make sure we know what leaf type this node is
assert(nodeTypename in leafTypeDict)
assert nodeTypename in leafTypeDict
C_HeaderFile.write('void Print%s(const char *paramName, const asn1Scc%s *pData);\n' % (cleanNodeTypename, cleanNodeTypename))
C_SourceFile.write('void Print%s(const char *paramName, const asn1Scc%s *pData)\n{\n' % (cleanNodeTypename, cleanNodeTypename))
C_SourceFile.write('#ifdef __linux__\n')
C_SourceFile.write(' pthread_mutex_lock(&g_printing_mutex);\n')
C_SourceFile.write('#endif\n')
#C_SourceFile.write('\n'.join(printer.Map('(*pData)', '', node, leafTypeDict, commonPy.asnParser.g_names)))
lines = [" "+x for x in printer.Map('(*pData)', '', node, leafTypeDict, commonPy.asnParser.g_names)]
lines = [" "+x
for x in printer.Map(
'(*pData)',
'',
node,
leafTypeDict,
commonPy.asnParser.g_names)]
C_SourceFile.write("\n".join(lines))
C_SourceFile.write('\n#ifdef __linux__\n')
C_SourceFile.write(' pthread_mutex_unlock(&g_printing_mutex);\n')
......@@ -249,7 +261,7 @@ def main():
if __name__ == "__main__":
if "-pdb" in sys.argv:
sys.argv.remove("-pdb") # pragma: no cover
import pdb # pragma: no cover
import pdb # pylint: disable=wrong-import-position pragma: nocover
pdb.run('main()') # pragma: no cover
else:
main()
......@@ -24,9 +24,13 @@ import os
import sys
import copy
from typing import Tuple
import commonPy.configMT
#from commonPy.asnAST import AsnBool,AsnMetaMember,AsnInt,AsnReal,AsnOctetString,AsnEnumerated,AsnSequence,AsnSet,AsnChoice,sourceSequenceLimit
from commonPy.asnAST import sourceSequenceLimit
from commonPy.asnAST import sourceSequenceLimit, AsnNode # NOQA pylint: disable=unused-import
from commonPy.asnParser import (
AST_Lookup, AST_TypesOfFile, AST_TypenamesOfFile, AST_Leaftypes,
Typename, Filename, ParseAsnFileList)
from commonPy.utility import inform, panic
import commonPy.cleanupNodes
from commonPy.recursiveMapper import RecursiveMapper
......@@ -186,24 +190,25 @@ def main():
if not os.path.isfile(f):
panic("'%s' is not a file!\n" % f) # pragma: no cover
uniqueASNfiles = {}
for grammar in sys.argv[1:]:
uniqueASNfiles[grammar] = 1
commonPy.asnParser.ParseAsnFileList(list(uniqueASNfiles.keys()))
ParseAsnFileList(sys.argv[1:])
Triples = Tuple[AST_Lookup, List[AsnNode], AST_Leaftypes] # NOQA pylint: disable=unused-variable
uniqueASNfiles = {} # type: Dict[Filename, Triples]
for asnFile in uniqueASNfiles:
tmpNames = {}
tmpNames = {} # Dict[Typename, AsnNode]
for name in commonPy.asnParser.g_typesOfFile[asnFile]:
tmpNames[name] = commonPy.asnParser.g_names[name]
uniqueASNfiles[asnFile] = [
uniqueASNfiles[asnFile] = (
copy.copy(tmpNames), # map Typename to type definition class from asnAST
copy.copy(commonPy.asnParser.g_astOfFile[asnFile]), # list of nameless type definitions
copy.copy(commonPy.asnParser.g_leafTypeDict)] # map from Typename to leafType
copy.copy(commonPy.asnParser.g_leafTypeDict) # map from Typename to leafType
)
inform("Checking that all base nodes have mandatory ranges set in %s..." % asnFile)
for node in list(tmpNames.values()):
verify.VerifyRanges(node, commonPy.asnParser.g_names)
commonPy.verify.VerifyRanges(node, commonPy.asnParser.g_names)
# If some AST nodes must be skipped (for any reason), go learn about them
badTypes = commonPy.cleanupNodes.DiscoverBadTypes()
......
......@@ -88,7 +88,7 @@ def OnShutdown(unused_badTypes):
# text = re.sub(r'--.*', '', text)
outputFile = open(g_outputDir + "DataView.pr", 'w')
outputFile.write('Datamodel DEFINITIONS ::= BEGIN\n\n')
import commonPy.xmlASTtoAsnAST
commonPy.xmlASTtoAsnAST.PrintGrammarFromAST(outputFile)
import commonPy.asnParser
commonPy.asnParser.PrintGrammarFromAST(outputFile)
outputFile.write('END\n')
outputFile.close()
......@@ -18,17 +18,23 @@
# Note that in both cases, there are no charges (royalties) for the
# generated code.
#
__doc__ = '''Implementation of mapping ASN.1 constructs
to SCADE's modeling language, using .xscade files. It is used by the
backend of Semantix's code generator A.'''
'''
Implementation of mapping ASN.1 constructs to SCADE's modeling language,
using .xscade files.
'''
import re
import os
import random
from xml.dom.minidom import Document
from xml.dom.minidom import Document, Node
from typing import Set
from commonPy.utility import inform, panic
from commonPy.asnAST import AsnBasicNode, AsnString, AsnEnumerated, AsnMetaMember, AsnSet, AsnSetOf, AsnSequence, AsnSequenceOf, AsnChoice
from commonPy.asnAST import (
AsnBasicNode, AsnString, AsnEnumerated, AsnMetaMember, AsnSet,
AsnSetOf, AsnSequence, AsnSequenceOf, AsnChoice
)
import commonPy.asnParser
g_lookup = {
......@@ -41,7 +47,7 @@ g_lookup = {
g_outputFile = None
# The assigned OIDs
g_oid = {}
g_oid = {} # type: Dict[str, str]
# The main OID for this module
g_mainOid = ""
......@@ -50,7 +56,7 @@ g_mainOid = ""
g_currOid = 0x1f00
# The types declared so far
g_declaredTypes = {}
g_declaredTypes = set() # type: Set[str]
# The DOM elements
g_doc = None
......@@ -67,7 +73,7 @@ def CleanNameAsScadeWants(name):
def RandomHex(digits):
result = ""
for i in range(0, digits):
for _ in range(0, digits):
result += random.choice('0123456789abcdef')
return result
......@@ -77,23 +83,23 @@ def FixupNestedStringsAndEnumerated():
leafTypeDict = commonPy.asnParser.g_leafTypeDict
for nodeTypename in list(names.keys()):
node = names[nodeTypename]
if isinstance(node, AsnSequence) or isinstance(node, AsnChoice) or isinstance(node, AsnSet):
if isinstance(node, (AsnSequence, AsnChoice, AsnSet)):
for child in node._members:
if isinstance(child[1], AsnString) or isinstance(child[1], AsnEnumerated):
newName = nodeTypename + "_" + child[0] # pragma: no cover
while newName in names: # pragma: no cover
newName += "_t" # pragma: no cover
names[newName] = child[1] # pragma: no cover
leafTypeDict[newName] = isinstance(child[1], AsnString) and 'OCTET STRING' or 'ENUMERATED' # pragma: no cover
child[1] = AsnMetaMember(asnFilename=child[1]._asnFilename, containedType=newName) # pragma: no cover
elif isinstance(node, AsnSequenceOf) or isinstance(node, AsnSetOf):
if isinstance(node._containedType, AsnString) or isinstance(node._containedType, AsnEnumerated):
newName = nodeTypename + "_contained" # pragma: no cover
while newName in names: # pragma: no cover
newName += "_t" # pragma: no cover
names[newName] = node._containedType # pragma: no cover
leafTypeDict[newName] = isinstance(node._containedType, AsnString) and 'OCTET STRING' or 'ENUMERATED' # pragma: no cover
node._containedType = newName # pragma: no cover
newName = nodeTypename + "_" + child[0] # pragma: no cover
while newName in names: # pragma: no cover
newName += "_t" # pragma: no cover
names[newName] = child[1] # pragma: no cover
leafTypeDict[newName] = isinstance(child[1], AsnString) and 'OCTET STRING' or 'ENUMERATED' # pragma: no cover
child[1] = AsnMetaMember(asnFilename=child[1]._asnFilename, containedType=newName) # pragma: no cover
elif isinstance(node, (AsnSequenceOf, AsnSetOf)):
if isinstance(node._containedType, (AsnString, AsnEnumerated)):
newName = nodeTypename + "_contained" # pragma: no cover
while newName in names: # pragma: no cover
newName += "_t" # pragma: no cover
names[newName] = node._containedType # pragma: no cover
leafTypeDict[newName] = isinstance(node._containedType, AsnString) and 'OCTET STRING' or 'ENUMERATED' # pragma: no cover
node._containedType = newName # pragma: no cover
def OnStartup(unused_modelingLanguage, asnFile, outputDir, unused_badTypes):
......@@ -125,10 +131,10 @@ def OnStartup(unused_modelingLanguage, asnFile, outputDir, unused_badTypes):
File.appendChild(g_Declarations)
def RenderElements(controlString):
def RenderElements(controlString: str):
if controlString.endswith(","):
controlString=controlString[:-1]
createdElements = {}
controlString = controlString[:-1]
createdElements = {} # type: Dict[str, Node]
parent = g_Declarations
for elem in controlString.split(","):
if '`' in elem:
......@@ -197,10 +203,10 @@ def CheckPrerequisites(nodeTypename):
# what type is this?
else: # pragma: no cover
panic("Unexpected type of element: %s" % leafTypeDict[nodeTypename]) # pragma: no cover
g_declaredTypes[nodeTypename] = True
g_declaredTypes.add(nodeTypename)
def HandleTypedef(nodeTypename):
def HandleTypedef(nodeTypename: str) -> bool:
if nodeTypename not in commonPy.asnParser.g_metatypes:
return False
controlString = 'Type$name=%s,definition,NamedType,type,TypeRef$name=%s' % \
......@@ -210,10 +216,10 @@ def HandleTypedef(nodeTypename):
def OnBasic(nodeTypename, node, unused_leafTypeDict):
assert(isinstance(node, AsnBasicNode))
assert isinstance(node, AsnBasicNode)
if nodeTypename in g_declaredTypes:
return
g_declaredTypes[nodeTypename] = 1
g_declaredTypes.add(nodeTypename)
if HandleTypedef(nodeTypename):
return
oid = GetOID(nodeTypename)
......@@ -229,7 +235,7 @@ def OnBasic(nodeTypename, node, unused_leafTypeDict):
# otherwise SCADE will not be able to create C code!
if node._range == []:
panic(("Scade612_A_mapper: string (in %s) must have a SIZE constraint inside ASN.1,\n" + # pragma: no cover
"or else SCADE can't generate C code!") % node.Location()) # pragma: no cover
"or else SCADE can't generate C code!") % node.Location()) # pragma: no cover
controlString += 'Table,type,NamedType,type,TypeRef$name=char,size`Table,ConstValue$value=%d,' % node._range[-1]
else:
# For the rest of the simple types, use the lookup table defined in g_lookup
......@@ -239,14 +245,14 @@ def OnBasic(nodeTypename, node, unused_leafTypeDict):
except KeyError: # pragma: no cover
panic("Scade612_A_mapper: Unsupported literal: %s (%s)\n" % (realLeafType, node.Location())) # pragma: no cover
controlString += 'pragmas`Type,ed:Type$oid=!ed/%(oid)s' % {"nodeTypename": nodeTypename, "oid": oid}
controlString += 'pragmas`Type,ed:Type$oid=!ed/%(oid)s' % {"oid": oid}
RenderElements(controlString)
def OnSequence(nodeTypename, node, unused_leafTypeDict, isChoice=False):
if nodeTypename in g_declaredTypes:
return
g_declaredTypes[nodeTypename] = 1
g_declaredTypes.add(nodeTypename)
if HandleTypedef(nodeTypename):
return
oid = GetOID(nodeTypename)
......@@ -279,7 +285,7 @@ def OnSet(nodeTypename, node, leafTypeDict):
def OnEnumerated(nodeTypename, node, unused_leafTypeDict):
if nodeTypename in g_declaredTypes:
return
g_declaredTypes[nodeTypename] = 1
g_declaredTypes.add(nodeTypename)
if HandleTypedef(nodeTypename):
return
oid = GetOID(nodeTypename)
......@@ -308,7 +314,7 @@ def OnEnumerated(nodeTypename, node, unused_leafTypeDict):
def OnSequenceOf(nodeTypename, node, unused_leafTypeDict):
if nodeTypename in g_declaredTypes:
return
g_declaredTypes[nodeTypename] = 1
g_declaredTypes.add(nodeTypename)
if HandleTypedef(nodeTypename):
return
if node._range == []:
......
......@@ -52,8 +52,8 @@ import tempfile
import re
import distutils.spawn as spawn
import xml.sax
from typing import Union, Dict # pylint: disable=W0611
import xml.sax # type: ignore
from typing import Union, List, Dict, Tuple, Any # pylint: disable=W0611
from . import configMT
from . import utility
......@@ -64,16 +64,23 @@ from .asnAST import (
AsnComplexNode, AsnBool, AsnOctetString, AsnAsciiString
)
g_asnFilename = ""
g_filename = ''
g_leafTypeDict = {}
g_names = {} # type: Dict[str, AsnNode]
g_metatypes = {}
g_typesOfFile = {} # type: Dict[str, List[str]]
g_astOfFile = {} # type: Dict[str, List[AsnNode]]
# MyPy type aliases
Typename = str
Filename = str
AST_Lookup = Dict[Typename, AsnNode]
AST_TypenamesOfFile = Dict[Filename, List[str]] # pylint: disable=invalid-sequence-index
AST_TypesOfFile = Dict[Filename, List[AsnNode]] # pylint: disable=invalid-sequence-index
AST_Leaftypes = Dict[Typename, str]
g_names = {} # type: AST_Lookup
g_typesOfFile = {} # type: AST_TypenamesOfFile
g_leafTypeDict = {} # type: AST_Leaftypes
g_astOfFile = {} # type: AST_TypesOfFile
g_checkedSoFarForKeywords = {} # type: Dict[str, int]
......@@ -223,7 +230,7 @@ def VerifyAndFixAST() -> Dict[str, str]:
unknownTypes = {} # type: Dict[str, int]
knownTypes = {} # type: Dict[str, str]
equivalents = {} # type: Dict[str, List[str]]
while True:
while True: # pylint: disable=too-many-nested-blocks
lastUnknownTypes = copy.copy(unknownTypes)
lastKnownTypes = copy.copy(knownTypes)
lastEquivalents = copy.copy(equivalents)
......@@ -319,17 +326,17 @@ def VerifyAndFixAST() -> Dict[str, str]:
Min = Max = None
node = g_names[nodeTypename]
if hasattr(node, "_Min") and Min is None:
Min = node._Min
Min = node._Min # type: ignore
if hasattr(node, "_Max") and Max is None:
Max = node._Max
Max = node._Max # type: ignore
originalNode = node
while isinstance(node, AsnMetaType):
g_metatypes[nodeTypename] = node._containedType
node = g_names[node._containedType]
if hasattr(node, "_Min") and Min is None:
Min = node._Min
Min = node._Min # type: ignore
if hasattr(node, "_Max") and Max is None:
Max = node._Max
Max = node._Max # type: ignore
# To cope with ReferenceTypes that redefine their
# constraints (for now, ASN1SCC provides only INTEGERs)
if isinstance(originalNode, AsnMetaType):
......@@ -337,12 +344,10 @@ def VerifyAndFixAST() -> Dict[str, str]:
target._asnFilename = originalNode._asnFilename
elif isinstance(node, AsnInt) and Min is not None and Max is not None:
target = copy.copy(node) # we need to keep the Min/Max
target._range = [Min, Max]
else:
target = node
g_names[nodeTypename] = target
if isinstance(node, AsnInt) and Min is not None and Max is not None:
target._range = [Min, Max]
for name, node in list(g_names.items()):
if not KnownType(node, g_names):
......@@ -367,7 +372,7 @@ def VerifyAndFixAST() -> Dict[str, str]:
# define a name and use it... (for SEQUENCEOFs/SETOFs, allow also 'str')
internalNo = 1
addedNewPseudoType = True
while addedNewPseudoType:
while addedNewPseudoType: # pylint: disable=too-many-nested-blocks
addedNewPseudoType = False
listOfTypenames = sorted(g_names.keys())
for nodeTypename in listOfTypenames:
......@@ -470,13 +475,14 @@ def ParseAsnFileList(listOfFilenames):
mono = "mono " if sys.argv[0].endswith('.py') and sys.platform.startswith('linux') else ""
spawnResult = os.system(mono + "\"" + asn1SccPath + "\" -customStg \"" + asn1SccDir + "/xml.stg:" + xmlAST + "\" -customStgAstVerion 4 \"" + "\" \"".join(listOfFilenames) + "\"")
if spawnResult != 0:
if 1 == spawnResult / 256:
errCode = spawnResult/256
if errCode == 1:
utility.panic("ASN1SCC reported syntax errors. Aborting...")
elif 2 == spawnResult / 256:
elif errCode == 2:
utility.panic("ASN1SCC reported semantic errors (or mono failed). Aborting...")
elif 3 == spawnResult / 256:
elif errCode == 3:
utility.panic("ASN1SCC reported internal error. Contact Semantix with this input. Aborting...")
elif 4 == spawnResult / 256:
elif errCode == 4:
utility.panic("ASN1SCC reported usage error. Aborting...")
else:
utility.panic("ASN1SCC generic error. Contact Semantix with this input. Aborting...")
......@@ -536,7 +542,7 @@ class Element:
def __init__(self, name, attrs):
self._name = name
self._attrs = attrs
self._children = []
self._children = [] # type: List[Element]
class InputFormatXMLHandler(xml.sax.ContentHandler):
......@@ -572,7 +578,7 @@ class InputFormatXMLHandler(xml.sax.ContentHandler):
def VisitAll(node, expectedType, Action):
results = []
results = [] # type: List[Any]
if node is not None:
if node._name == expectedType:
results = [Action(node)]
......@@ -664,7 +670,7 @@ def CreateEnumerated(newModule, lineNo, xmlEnumeratedNode):
# def CreateBitString(newModule, lineNo, xmlBitString):
def CreateBitString(_, __, ___):
utility.panic("BitString type is not supported by the toolchain."+ # pragma: no cover
utility.panic("BitString type is not supported by the toolchain. " # pragma: no cover
"Please use SEQUENCE OF BOOLEAN") # pragma: no cover
......@@ -691,14 +697,14 @@ def CreateNumericString(newModule, lineNo, xmlNumericStringNode):
def CreateReference(newModule, lineNo, xmlReferenceNode):
try:
mi = int(GetAttr(xmlReferenceNode, "Min"))
mi = int(GetAttr(xmlReferenceNode, "Min")) # type: Union[int, float]
except:
try:
mi = float(GetAttr(xmlReferenceNode, "Min"))
except:
mi = None
try:
ma = int(GetAttr(xmlReferenceNode, "Max"))
ma = int(GetAttr(xmlReferenceNode, "Max")) # type: Union[int, float]
except:
try:
ma = float(GetAttr(xmlReferenceNode, "Max"))
......@@ -827,9 +833,18 @@ def VisitTypeAssignment(newModule, xmlTypeAssignment):
GenericFactory(newModule, xmlType))
class Module(Pretty):
_id = None # type: str
_asnFilename = None # type: str
_exportedTypes = None # type: List[str]
_exportedVariables = None # type: List[str]
_importedModules = None # type: List[ Tuple[ str, List[str], List[str] ] ]
# (tuples of ModuleName, imported types, imported vars)
_typeAssignments = None # type: List[ Tuple[str, AsnNode] ]
pass
def VisitAsn1Module(xmlAsn1File, xmlModule, modules):
class Module(Pretty):
pass
newModule = Module()
newModule._id = GetAttr(xmlModule, "ID")
newModule._asnFilename = GetAttr(xmlAsn1File, "FileName")
......@@ -867,11 +882,11 @@ def VisitAsn1Module(xmlAsn1File, xmlModule, modules):
g_typesOfFile.setdefault(newModule._asnFilename, [])
g_typesOfFile[newModule._asnFilename].extend(
[x for x, y in newModule._typeAssignments])
[x for x, _ in newModule._typeAssignments])
g_astOfFile.setdefault(newModule._asnFilename, [])
g_astOfFile[newModule._asnFilename].extend(
[x for x, y in newModule._typeAssignments])
[y for _, y in newModule._typeAssignments])
modules.append(newModule)
......@@ -887,7 +902,7 @@ def ParseASN1SCC_AST(filename):
utility.panic("You must use an XML file that contains one ASN1AST node") # pragma: no cover
# Travel("", handler._roots[0])
modules = []
modules = [] # type: List[Module]
VisitAll(
handler._root._children[0],
"Asn1File",
......@@ -952,7 +967,7 @@ def PrintType(f, xmlType, indent, nameCleaner):
f.write('ENUMERATED {\n')
options = []
VisitAll(realType, "EnumValue", lambda x: options.append(x))
if len(options)>0:
if len(options) > 0:
f.write(indent + ' ' + nameCleaner(GetAttr(options[0], "StringValue")) + "(" + GetAttr(options[0], "IntValue") + ")")
for otherOptions in options[1:]:
f.write(',\n' + indent + ' ' + nameCleaner(GetAttr(otherOptions, "StringValue")) + "(" + GetAttr(otherOptions, "IntValue") + ")")
......
......@@ -5,6 +5,8 @@ from commonPy.asnAST import AsnBool, AsnInt, AsnReal, \
AsnEnumerated, AsnOctetString, AsnSequenceOf, AsnSet, \
AsnSetOf, AsnSequence, AsnChoice, AsnMetaMember
from typing import Any
# Level of verbosity
g_verboseLevel = 0
......@@ -14,7 +16,7 @@ red = ESC+"[31m"
green = ESC+"[32m"
white = ESC+"[0m"
yellow = ESC+"[33m"
colors=[red, green, white, yellow]
colors = [red, green, white, yellow]
# Lookup table for SMP2 types that map to AsnBasicNodes
......@@ -25,15 +27,15 @@ class MagicSmp2SimpleTypesDict(dict):
name = re.sub(r'/\d{4}/\d{2}/', '/', name)
return super(MagicSmp2SimpleTypesDict, self).__getitem__(name)
#---------------------------------------------------------------------------
# ---------------------------------------------------------------------------
def __contains__(self, name):
name = re.sub(r'/\d{4}/\d{2}/', '/', name)
return super(MagicSmp2SimpleTypesDict, self).__contains__(name)
#---------------------------------------------------------------------------
# ---------------------------------------------------------------------------
def has_key(self, name):
name = re.sub(r'/\d{4}/\d{2}/', '/', name)