Commit a70ac54d authored by Thanassis Tsiodras's avatar Thanassis Tsiodras

MyPy annotations added all over the place (also, pylint and flake8 pass)

parent aad5c927
#!/usr/bin/env python #!/usr/bin/env python3
# vim: set expandtab ts=8 sts=4 shiftwidth=4 # vim: set expandtab ts=8 sts=4 shiftwidth=4
# #
# (C) Semantix Information Technologies. # (C) Semantix Information Technologies.
...@@ -20,29 +20,33 @@ ...@@ -20,29 +20,33 @@
# Note that in both cases, there are no charges (royalties) for the # Note that in both cases, there are no charges (royalties) for the
# generated code. # generated code.
# #
'''
This is one of the code generators that Semantix developed for
the European research project ASSERT. It is now enhanced in the
context of Data Modelling and Data Modelling Tuning projects.
It reads the ASN.1 specification of the exchanged messages, and
generates "printer" functions for their content.
'''
import os import os
import sys import sys
import copy import copy
from typing import Tuple
import commonPy.configMT import commonPy.configMT
#from commonPy.asnAST import AsnBool,AsnMetaMember,AsnInt,AsnReal,AsnOctetString,AsnEnumerated,AsnSequence,AsnSet,AsnChoice,sourceSequenceLimit from commonPy.asnAST import sourceSequenceLimit, AsnNode # NOQA pylint: disable=unused-import
from commonPy.asnAST import sourceSequenceLimit from commonPy.asnParser import (
AST_Lookup, AST_TypesOfFile, AST_TypenamesOfFile, AST_Leaftypes,
Typename, Filename, ParseAsnFileList)
from commonPy.utility import inform, panic from commonPy.utility import inform, panic
import commonPy.cleanupNodes import commonPy.cleanupNodes
from commonPy.recursiveMapper import RecursiveMapper from commonPy.recursiveMapper import RecursiveMapper
import commonPy.verify import commonPy.verify
__doc__ = '''\
This is one of the code generators that Semantix developed for
the European research project ASSERT. It is now enhanced in the
context of Data Modelling and Data Modelling Tuning projects.
It reads the ASN.1 specification of the exchanged messages, and
generates "printer" functions for their content.
'''
def usage(): def usage():
'''Print usage instructions.''' '''Print usage instructions.'''
msg = 'Usage: %s <options> input1.asn1 [input2.asn1]...\nWhere options are:\n' msg = 'Usage: %s <options> input1.asn1 [input2.asn1]...\nWhere options are:\n'
...@@ -89,7 +93,7 @@ class Printer(RecursiveMapper): ...@@ -89,7 +93,7 @@ class Printer(RecursiveMapper):
return ['printf("%%s%s %%d\\n", paramName, (int)%s);' % (prefix, srcCVariable)] return ['printf("%%s%s %%d\\n", paramName, (int)%s);' % (prefix, srcCVariable)]
def MapSequence(self, srcCVariable, prefix, node, leafTypeDict, names): def MapSequence(self, srcCVariable, prefix, node, leafTypeDict, names):
lines = [] lines = [] # type: List[str]
for child in node._members: for child in node._members:
lines.extend( lines.extend(
self.Map( self.Map(
...@@ -111,12 +115,13 @@ class Printer(RecursiveMapper): ...@@ -111,12 +115,13 @@ class Printer(RecursiveMapper):
lines.append( lines.append(
"%sif (%s.kind == %s) {" % "%sif (%s.kind == %s) {" %
(self.maybeElse(childNo), srcCVariable, self.CleanName(child[2]))) (self.maybeElse(childNo), srcCVariable, self.CleanName(child[2])))
lines.extend([' '+x for x in self.Map( lines.extend([' '+x
"%s.u.%s" % (srcCVariable, self.CleanName(child[0])), for x in self.Map(
prefix + "::" + self.CleanName(child[0]), "%s.u.%s" % (srcCVariable, self.CleanName(child[0])),
child[1], prefix + "::" + self.CleanName(child[0]),
leafTypeDict, child[1],
names)]) leafTypeDict,
names)])
lines.append("}") lines.append("}")
return lines return lines
...@@ -127,12 +132,13 @@ class Printer(RecursiveMapper): ...@@ -127,12 +132,13 @@ class Printer(RecursiveMapper):
lines.append(" int i%s;" % uniqueId) lines.append(" int i%s;" % uniqueId)
limit = sourceSequenceLimit(node, srcCVariable) limit = sourceSequenceLimit(node, srcCVariable)
lines.append(" for(i%s=0; i%s<%s; i%s++) {" % (uniqueId, uniqueId, limit, uniqueId)) lines.append(" for(i%s=0; i%s<%s; i%s++) {" % (uniqueId, uniqueId, limit, uniqueId))
lines.extend([" " + x for x in self.Map( lines.extend([" "+x
"%s.arr[i%s]" % (srcCVariable, uniqueId), for x in self.Map(
prefix + "::Elem", "%s.arr[i%s]" % (srcCVariable, uniqueId),
node._containedType, prefix + "::Elem",
leafTypeDict, node._containedType,
names)]) leafTypeDict,
names)])
lines.append(" }") lines.append(" }")
lines.append("}") lines.append("}")
return lines return lines
...@@ -167,24 +173,25 @@ def main(): ...@@ -167,24 +173,25 @@ def main():
if not os.path.isfile(f): if not os.path.isfile(f):
panic("'%s' is not a file!\n" % f) # pragma: no cover panic("'%s' is not a file!\n" % f) # pragma: no cover
uniqueASNfiles = {} ParseAsnFileList(sys.argv[1:])
for grammar in sys.argv[1:]:
uniqueASNfiles[grammar]=1 Triples = Tuple[AST_Lookup, List[AsnNode], AST_Leaftypes] # NOQA pylint: disable=unused-variable
commonPy.asnParser.ParseAsnFileList(list(uniqueASNfiles.keys())) uniqueASNfiles = {} # type: Dict[Filename, Triples]
for asnFile in uniqueASNfiles: for asnFile in uniqueASNfiles:
tmpNames = {} tmpNames = {} # Dict[Typename, AsnNode]
for name in commonPy.asnParser.g_typesOfFile[asnFile]: for name in commonPy.asnParser.g_typesOfFile[asnFile]:
tmpNames[name] = commonPy.asnParser.g_names[name] tmpNames[name] = commonPy.asnParser.g_names[name]
uniqueASNfiles[asnFile] = [ uniqueASNfiles[asnFile] = (
copy.copy(tmpNames), # map Typename to type definition class from asnAST copy.copy(tmpNames), # map Typename to type definition class from asnAST
copy.copy(commonPy.asnParser.g_astOfFile[asnFile]), # list of nameless type definitions copy.copy(commonPy.asnParser.g_astOfFile[asnFile]), # list of nameless type definitions
copy.copy(commonPy.asnParser.g_leafTypeDict)] # map from Typename to leafType copy.copy(commonPy.asnParser.g_leafTypeDict) # map from Typename to leafType
)
inform("Checking that all base nodes have mandatory ranges set in %s..." % asnFile) inform("Checking that all base nodes have mandatory ranges set in %s..." % asnFile)
for node in list(tmpNames.values()): for node in list(tmpNames.values()):
verify.VerifyRanges(node, commonPy.asnParser.g_names) commonPy.verify.VerifyRanges(node, commonPy.asnParser.g_names)
# If some AST nodes must be skipped (for any reason), go learn about them # If some AST nodes must be skipped (for any reason), go learn about them
badTypes = commonPy.cleanupNodes.DiscoverBadTypes() badTypes = commonPy.cleanupNodes.DiscoverBadTypes()
...@@ -226,15 +233,20 @@ def main(): ...@@ -226,15 +233,20 @@ def main():
inform("Processing %s...", nodeTypename) inform("Processing %s...", nodeTypename)
# First, make sure we know what leaf type this node is # First, make sure we know what leaf type this node is
assert(nodeTypename in leafTypeDict) assert nodeTypename in leafTypeDict
C_HeaderFile.write('void Print%s(const char *paramName, const asn1Scc%s *pData);\n' % (cleanNodeTypename, cleanNodeTypename)) C_HeaderFile.write('void Print%s(const char *paramName, const asn1Scc%s *pData);\n' % (cleanNodeTypename, cleanNodeTypename))
C_SourceFile.write('void Print%s(const char *paramName, const asn1Scc%s *pData)\n{\n' % (cleanNodeTypename, cleanNodeTypename)) C_SourceFile.write('void Print%s(const char *paramName, const asn1Scc%s *pData)\n{\n' % (cleanNodeTypename, cleanNodeTypename))
C_SourceFile.write('#ifdef __linux__\n') C_SourceFile.write('#ifdef __linux__\n')
C_SourceFile.write(' pthread_mutex_lock(&g_printing_mutex);\n') C_SourceFile.write(' pthread_mutex_lock(&g_printing_mutex);\n')
C_SourceFile.write('#endif\n') C_SourceFile.write('#endif\n')
#C_SourceFile.write('\n'.join(printer.Map('(*pData)', '', node, leafTypeDict, commonPy.asnParser.g_names))) lines = [" "+x
lines = [" "+x for x in printer.Map('(*pData)', '', node, leafTypeDict, commonPy.asnParser.g_names)] for x in printer.Map(
'(*pData)',
'',
node,
leafTypeDict,
commonPy.asnParser.g_names)]
C_SourceFile.write("\n".join(lines)) C_SourceFile.write("\n".join(lines))
C_SourceFile.write('\n#ifdef __linux__\n') C_SourceFile.write('\n#ifdef __linux__\n')
C_SourceFile.write(' pthread_mutex_unlock(&g_printing_mutex);\n') C_SourceFile.write(' pthread_mutex_unlock(&g_printing_mutex);\n')
...@@ -249,7 +261,7 @@ def main(): ...@@ -249,7 +261,7 @@ def main():
if __name__ == "__main__": if __name__ == "__main__":
if "-pdb" in sys.argv: if "-pdb" in sys.argv:
sys.argv.remove("-pdb") # pragma: no cover sys.argv.remove("-pdb") # pragma: no cover
import pdb # pragma: no cover import pdb # pylint: disable=wrong-import-position pragma: nocover
pdb.run('main()') # pragma: no cover pdb.run('main()') # pragma: no cover
else: else:
main() main()
...@@ -24,9 +24,13 @@ import os ...@@ -24,9 +24,13 @@ import os
import sys import sys
import copy import copy
from typing import Tuple
import commonPy.configMT import commonPy.configMT
#from commonPy.asnAST import AsnBool,AsnMetaMember,AsnInt,AsnReal,AsnOctetString,AsnEnumerated,AsnSequence,AsnSet,AsnChoice,sourceSequenceLimit from commonPy.asnAST import sourceSequenceLimit, AsnNode # NOQA pylint: disable=unused-import
from commonPy.asnAST import sourceSequenceLimit from commonPy.asnParser import (
AST_Lookup, AST_TypesOfFile, AST_TypenamesOfFile, AST_Leaftypes,
Typename, Filename, ParseAsnFileList)
from commonPy.utility import inform, panic from commonPy.utility import inform, panic
import commonPy.cleanupNodes import commonPy.cleanupNodes
from commonPy.recursiveMapper import RecursiveMapper from commonPy.recursiveMapper import RecursiveMapper
...@@ -186,24 +190,25 @@ def main(): ...@@ -186,24 +190,25 @@ def main():
if not os.path.isfile(f): if not os.path.isfile(f):
panic("'%s' is not a file!\n" % f) # pragma: no cover panic("'%s' is not a file!\n" % f) # pragma: no cover
uniqueASNfiles = {} ParseAsnFileList(sys.argv[1:])
for grammar in sys.argv[1:]:
uniqueASNfiles[grammar] = 1 Triples = Tuple[AST_Lookup, List[AsnNode], AST_Leaftypes] # NOQA pylint: disable=unused-variable
commonPy.asnParser.ParseAsnFileList(list(uniqueASNfiles.keys())) uniqueASNfiles = {} # type: Dict[Filename, Triples]
for asnFile in uniqueASNfiles: for asnFile in uniqueASNfiles:
tmpNames = {} tmpNames = {} # Dict[Typename, AsnNode]
for name in commonPy.asnParser.g_typesOfFile[asnFile]: for name in commonPy.asnParser.g_typesOfFile[asnFile]:
tmpNames[name] = commonPy.asnParser.g_names[name] tmpNames[name] = commonPy.asnParser.g_names[name]
uniqueASNfiles[asnFile] = [ uniqueASNfiles[asnFile] = (
copy.copy(tmpNames), # map Typename to type definition class from asnAST copy.copy(tmpNames), # map Typename to type definition class from asnAST
copy.copy(commonPy.asnParser.g_astOfFile[asnFile]), # list of nameless type definitions copy.copy(commonPy.asnParser.g_astOfFile[asnFile]), # list of nameless type definitions
copy.copy(commonPy.asnParser.g_leafTypeDict)] # map from Typename to leafType copy.copy(commonPy.asnParser.g_leafTypeDict) # map from Typename to leafType
)
inform("Checking that all base nodes have mandatory ranges set in %s..." % asnFile) inform("Checking that all base nodes have mandatory ranges set in %s..." % asnFile)
for node in list(tmpNames.values()): for node in list(tmpNames.values()):
verify.VerifyRanges(node, commonPy.asnParser.g_names) commonPy.verify.VerifyRanges(node, commonPy.asnParser.g_names)
# If some AST nodes must be skipped (for any reason), go learn about them # If some AST nodes must be skipped (for any reason), go learn about them
badTypes = commonPy.cleanupNodes.DiscoverBadTypes() badTypes = commonPy.cleanupNodes.DiscoverBadTypes()
......
...@@ -88,7 +88,7 @@ def OnShutdown(unused_badTypes): ...@@ -88,7 +88,7 @@ def OnShutdown(unused_badTypes):
# text = re.sub(r'--.*', '', text) # text = re.sub(r'--.*', '', text)
outputFile = open(g_outputDir + "DataView.pr", 'w') outputFile = open(g_outputDir + "DataView.pr", 'w')
outputFile.write('Datamodel DEFINITIONS ::= BEGIN\n\n') outputFile.write('Datamodel DEFINITIONS ::= BEGIN\n\n')
import commonPy.xmlASTtoAsnAST import commonPy.asnParser
commonPy.xmlASTtoAsnAST.PrintGrammarFromAST(outputFile) commonPy.asnParser.PrintGrammarFromAST(outputFile)
outputFile.write('END\n') outputFile.write('END\n')
outputFile.close() outputFile.close()
...@@ -18,17 +18,23 @@ ...@@ -18,17 +18,23 @@
# Note that in both cases, there are no charges (royalties) for the # Note that in both cases, there are no charges (royalties) for the
# generated code. # generated code.
# #
__doc__ = '''Implementation of mapping ASN.1 constructs '''
to SCADE's modeling language, using .xscade files. It is used by the Implementation of mapping ASN.1 constructs to SCADE's modeling language,
backend of Semantix's code generator A.''' using .xscade files.
'''
import re import re
import os import os
import random import random
from xml.dom.minidom import Document from xml.dom.minidom import Document, Node
from typing import Set
from commonPy.utility import inform, panic from commonPy.utility import inform, panic
from commonPy.asnAST import AsnBasicNode, AsnString, AsnEnumerated, AsnMetaMember, AsnSet, AsnSetOf, AsnSequence, AsnSequenceOf, AsnChoice from commonPy.asnAST import (
AsnBasicNode, AsnString, AsnEnumerated, AsnMetaMember, AsnSet,
AsnSetOf, AsnSequence, AsnSequenceOf, AsnChoice
)
import commonPy.asnParser import commonPy.asnParser
g_lookup = { g_lookup = {
...@@ -41,7 +47,7 @@ g_lookup = { ...@@ -41,7 +47,7 @@ g_lookup = {
g_outputFile = None g_outputFile = None
# The assigned OIDs # The assigned OIDs
g_oid = {} g_oid = {} # type: Dict[str, str]
# The main OID for this module # The main OID for this module
g_mainOid = "" g_mainOid = ""
...@@ -50,7 +56,7 @@ g_mainOid = "" ...@@ -50,7 +56,7 @@ g_mainOid = ""
g_currOid = 0x1f00 g_currOid = 0x1f00
# The types declared so far # The types declared so far
g_declaredTypes = {} g_declaredTypes = set() # type: Set[str]
# The DOM elements # The DOM elements
g_doc = None g_doc = None
...@@ -67,7 +73,7 @@ def CleanNameAsScadeWants(name): ...@@ -67,7 +73,7 @@ def CleanNameAsScadeWants(name):
def RandomHex(digits): def RandomHex(digits):
result = "" result = ""
for i in range(0, digits): for _ in range(0, digits):
result += random.choice('0123456789abcdef') result += random.choice('0123456789abcdef')
return result return result
...@@ -77,23 +83,23 @@ def FixupNestedStringsAndEnumerated(): ...@@ -77,23 +83,23 @@ def FixupNestedStringsAndEnumerated():
leafTypeDict = commonPy.asnParser.g_leafTypeDict leafTypeDict = commonPy.asnParser.g_leafTypeDict
for nodeTypename in list(names.keys()): for nodeTypename in list(names.keys()):
node = names[nodeTypename] node = names[nodeTypename]
if isinstance(node, AsnSequence) or isinstance(node, AsnChoice) or isinstance(node, AsnSet): if isinstance(node, (AsnSequence, AsnChoice, AsnSet)):
for child in node._members: for child in node._members:
if isinstance(child[1], AsnString) or isinstance(child[1], AsnEnumerated): if isinstance(child[1], AsnString) or isinstance(child[1], AsnEnumerated):
newName = nodeTypename + "_" + child[0] # pragma: no cover newName = nodeTypename + "_" + child[0] # pragma: no cover
while newName in names: # pragma: no cover while newName in names: # pragma: no cover
newName += "_t" # pragma: no cover newName += "_t" # pragma: no cover
names[newName] = child[1] # pragma: no cover names[newName] = child[1] # pragma: no cover
leafTypeDict[newName] = isinstance(child[1], AsnString) and 'OCTET STRING' or 'ENUMERATED' # pragma: no cover leafTypeDict[newName] = isinstance(child[1], AsnString) and 'OCTET STRING' or 'ENUMERATED' # pragma: no cover
child[1] = AsnMetaMember(asnFilename=child[1]._asnFilename, containedType=newName) # pragma: no cover child[1] = AsnMetaMember(asnFilename=child[1]._asnFilename, containedType=newName) # pragma: no cover
elif isinstance(node, AsnSequenceOf) or isinstance(node, AsnSetOf): elif isinstance(node, (AsnSequenceOf, AsnSetOf)):
if isinstance(node._containedType, AsnString) or isinstance(node._containedType, AsnEnumerated): if isinstance(node._containedType, (AsnString, AsnEnumerated)):
newName = nodeTypename + "_contained" # pragma: no cover newName = nodeTypename + "_contained" # pragma: no cover
while newName in names: # pragma: no cover while newName in names: # pragma: no cover
newName += "_t" # pragma: no cover newName += "_t" # pragma: no cover
names[newName] = node._containedType # pragma: no cover names[newName] = node._containedType # pragma: no cover
leafTypeDict[newName] = isinstance(node._containedType, AsnString) and 'OCTET STRING' or 'ENUMERATED' # pragma: no cover leafTypeDict[newName] = isinstance(node._containedType, AsnString) and 'OCTET STRING' or 'ENUMERATED' # pragma: no cover
node._containedType = newName # pragma: no cover node._containedType = newName # pragma: no cover
def OnStartup(unused_modelingLanguage, asnFile, outputDir, unused_badTypes): def OnStartup(unused_modelingLanguage, asnFile, outputDir, unused_badTypes):
...@@ -125,10 +131,10 @@ def OnStartup(unused_modelingLanguage, asnFile, outputDir, unused_badTypes): ...@@ -125,10 +131,10 @@ def OnStartup(unused_modelingLanguage, asnFile, outputDir, unused_badTypes):
File.appendChild(g_Declarations) File.appendChild(g_Declarations)
def RenderElements(controlString): def RenderElements(controlString: str):
if controlString.endswith(","): if controlString.endswith(","):
controlString=controlString[:-1] controlString = controlString[:-1]
createdElements = {} createdElements = {} # type: Dict[str, Node]
parent = g_Declarations parent = g_Declarations
for elem in controlString.split(","): for elem in controlString.split(","):
if '`' in elem: if '`' in elem:
...@@ -197,10 +203,10 @@ def CheckPrerequisites(nodeTypename): ...@@ -197,10 +203,10 @@ def CheckPrerequisites(nodeTypename):
# what type is this? # what type is this?
else: # pragma: no cover else: # pragma: no cover
panic("Unexpected type of element: %s" % leafTypeDict[nodeTypename]) # pragma: no cover panic("Unexpected type of element: %s" % leafTypeDict[nodeTypename]) # pragma: no cover
g_declaredTypes[nodeTypename] = True g_declaredTypes.add(nodeTypename)
def HandleTypedef(nodeTypename): def HandleTypedef(nodeTypename: str) -> bool:
if nodeTypename not in commonPy.asnParser.g_metatypes: if nodeTypename not in commonPy.asnParser.g_metatypes:
return False return False
controlString = 'Type$name=%s,definition,NamedType,type,TypeRef$name=%s' % \ controlString = 'Type$name=%s,definition,NamedType,type,TypeRef$name=%s' % \
...@@ -210,10 +216,10 @@ def HandleTypedef(nodeTypename): ...@@ -210,10 +216,10 @@ def HandleTypedef(nodeTypename):
def OnBasic(nodeTypename, node, unused_leafTypeDict): def OnBasic(nodeTypename, node, unused_leafTypeDict):
assert(isinstance(node, AsnBasicNode)) assert isinstance(node, AsnBasicNode)
if nodeTypename in g_declaredTypes: if nodeTypename in g_declaredTypes:
return return
g_declaredTypes[nodeTypename] = 1 g_declaredTypes.add(nodeTypename)
if HandleTypedef(nodeTypename): if HandleTypedef(nodeTypename):
return return
oid = GetOID(nodeTypename) oid = GetOID(nodeTypename)
...@@ -229,7 +235,7 @@ def OnBasic(nodeTypename, node, unused_leafTypeDict): ...@@ -229,7 +235,7 @@ def OnBasic(nodeTypename, node, unused_leafTypeDict):
# otherwise SCADE will not be able to create C code! # otherwise SCADE will not be able to create C code!
if node._range == []: if node._range == []:
panic(("Scade612_A_mapper: string (in %s) must have a SIZE constraint inside ASN.1,\n" + # pragma: no cover panic(("Scade612_A_mapper: string (in %s) must have a SIZE constraint inside ASN.1,\n" + # pragma: no cover
"or else SCADE can't generate C code!") % node.Location()) # pragma: no cover "or else SCADE can't generate C code!") % node.Location()) # pragma: no cover
controlString += 'Table,type,NamedType,type,TypeRef$name=char,size`Table,ConstValue$value=%d,' % node._range[-1] controlString += 'Table,type,NamedType,type,TypeRef$name=char,size`Table,ConstValue$value=%d,' % node._range[-1]
else: else:
# For the rest of the simple types, use the lookup table defined in g_lookup # For the rest of the simple types, use the lookup table defined in g_lookup
...@@ -239,14 +245,14 @@ def OnBasic(nodeTypename, node, unused_leafTypeDict): ...@@ -239,14 +245,14 @@ def OnBasic(nodeTypename, node, unused_leafTypeDict):
except KeyError: # pragma: no cover except KeyError: # pragma: no cover
panic("Scade612_A_mapper: Unsupported literal: %s (%s)\n" % (realLeafType, node.Location())) # pragma: no cover panic("Scade612_A_mapper: Unsupported literal: %s (%s)\n" % (realLeafType, node.Location())) # pragma: no cover
controlString += 'pragmas`Type,ed:Type$oid=!ed/%(oid)s' % {"nodeTypename": nodeTypename, "oid": oid} controlString += 'pragmas`Type,ed:Type$oid=!ed/%(oid)s' % {"oid": oid}
RenderElements(controlString) RenderElements(controlString)
def OnSequence(nodeTypename, node, unused_leafTypeDict, isChoice=False): def OnSequence(nodeTypename, node, unused_leafTypeDict, isChoice=False):
if nodeTypename in g_declaredTypes: if nodeTypename in g_declaredTypes:
return return
g_declaredTypes[nodeTypename] = 1 g_declaredTypes.add(nodeTypename)
if HandleTypedef(nodeTypename): if HandleTypedef(nodeTypename):
return return
oid = GetOID(nodeTypename) oid = GetOID(nodeTypename)
...@@ -279,7 +285,7 @@ def OnSet(nodeTypename, node, leafTypeDict): ...@@ -279,7 +285,7 @@ def OnSet(nodeTypename, node, leafTypeDict):
def OnEnumerated(nodeTypename, node, unused_leafTypeDict): def OnEnumerated(nodeTypename, node, unused_leafTypeDict):
if nodeTypename in g_declaredTypes: if nodeTypename in g_declaredTypes:
return return
g_declaredTypes[nodeTypename] = 1 g_declaredTypes.add(nodeTypename)
if HandleTypedef(nodeTypename): if HandleTypedef(nodeTypename):
return return
oid = GetOID(nodeTypename) oid = GetOID(nodeTypename)
...@@ -308,7 +314,7 @@ def OnEnumerated(nodeTypename, node, unused_leafTypeDict): ...@@ -308,7 +314,7 @@ def OnEnumerated(nodeTypename, node, unused_leafTypeDict):
def OnSequenceOf(nodeTypename, node, unused_leafTypeDict): def OnSequenceOf(nodeTypename, node, unused_leafTypeDict):
if nodeTypename in g_declaredTypes: if nodeTypename in g_declaredTypes:
return return
g_declaredTypes[nodeTypename] = 1 g_declaredTypes.add(nodeTypename)
if HandleTypedef(nodeTypename): if HandleTypedef(nodeTypename):
return return
if node._range == []: if node._range == []:
......
...@@ -52,8 +52,8 @@ import tempfile ...@@ -52,8 +52,8 @@ import tempfile
import re import re
import distutils.spawn as spawn import distutils.spawn as spawn
import xml.sax import xml.sax # type: ignore
from typing import Union, Dict # pylint: disable=W0611 from typing import Union, List, Dict, Tuple, Any # pylint: disable=W0611
from . import configMT from . import configMT
from . import utility from . import utility
...@@ -64,16 +64,23 @@ from .asnAST import ( ...@@ -64,16 +64,23 @@ from .asnAST import (
AsnComplexNode, AsnBool, AsnOctetString, AsnAsciiString AsnComplexNode, AsnBool, AsnOctetString, AsnAsciiString
) )
g_asnFilename = "" g_asnFilename = ""
g_filename = '' g_filename = ''
g_leafTypeDict = {}
g_names = {} # type: Dict[str, AsnNode]
g_metatypes = {} g_metatypes = {}
g_typesOfFile = {} # type: Dict[str, List[str]] # MyPy type aliases
g_astOfFile = {} # type: Dict[str, List[AsnNode]] Typename = str
Filename = str
AST_Lookup = Dict[Typename, AsnNode]
AST_TypenamesOfFile = Dict[Filename, List[str]] # pylint: disable=invalid-sequence-index
AST_TypesOfFile = Dict[Filename, List[AsnNode]] # pylint: disable=invalid-sequence-index
AST_Leaftypes = Dict[Typename, str]
g_names = {} # type: AST_Lookup
g_typesOfFile = {} # type: AST_TypenamesOfFile
g_leafTypeDict = {} # type: AST_Leaftypes
g_astOfFile = {} # type: AST_TypesOfFile
g_checkedSoFarForKeywords = {} # type: Dict[str, int] g_checkedSoFarForKeywords = {} # type: Dict[str, int]
...@@ -223,7 +230,7 @@ def VerifyAndFixAST() -> Dict[str, str]: ...@@ -223,7 +230,7 @@ def VerifyAndFixAST() -> Dict[str, str]:
unknownTypes = {} # type: Dict[str, int] unknownTypes = {} # type: Dict[str, int]
knownTypes = {} # type: Dict[str, str] knownTypes = {} # type: Dict[str, str]
equivalents = {} # type: Dict[str, List[str]] equivalents = {} # type: Dict[str, List[str]]
while True: while True: # pylint: disable=too-many-nested-blocks
lastUnknownTypes = copy.copy(unknownTypes) lastUnknownTypes = copy.copy(unknownTypes)
lastKnownTypes = copy.copy(knownTypes) lastKnownTypes = copy.copy(knownTypes)
lastEquivalents = copy.copy(equivalents) lastEquivalents = copy.copy(equivalents)
...@@ -319,17 +326,17 @@ def VerifyAndFixAST() -> Dict[str, str]: ...@@ -319,17 +326,17 @@ def VerifyAndFixAST() -> Dict[str, str]:
Min = Max = None Min = Max = None
node = g_names[nodeTypename] node = g_names[nodeTypename]
if hasattr(node, "_Min") and Min is None: if hasattr(node, "_Min") and Min is None:
Min = node._Min Min = node._Min # type: ignore
if hasattr(node, "_Max") and Max is None: if hasattr(node, "_Max") and Max is None:
Max = node._Max Max = node._Max # type: ignore
originalNode = node originalNode = node
while isinstance(node, AsnMetaType): while isinstance(node, AsnMetaType):
g_metatypes[nodeTypename] = node._containedType g_metatypes[nodeTypename] = node._containedType
node = g_names[node._containedType] node = g_names[node._containedType]
if hasattr(node, "_Min") and Min is None: if hasattr(node, "_Min") and Min is None:
Min = node._Min