Commit f64c0cd7 authored by Thanassis Tsiodras's avatar Thanassis Tsiodras
Browse files

'make flake8 pylint' - all DMT source passes. Now for 'make mypy'...

parent 5ed832e9
...@@ -23,7 +23,7 @@ import sys ...@@ -23,7 +23,7 @@ import sys
import re import re
import copy import copy
import traceback import traceback
import DV_Types import DV_Types # pylint: disable=import-error
from ctypes import ( from ctypes import (
cdll, c_void_p, c_ubyte, c_double, c_uint, cdll, c_void_p, c_ubyte, c_double, c_uint,
c_longlong, c_bool, c_int, c_long c_longlong, c_bool, c_int, c_long
......
...@@ -2,7 +2,7 @@ ...@@ -2,7 +2,7 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
""" """
asn2dataModel converts ASN.1 modules to a variety of target languages asn2dataModel converts ASN.1 modules to a variety of target languages
""" """
from . import msgPrinter from . import msgPrinter
from . import msgPrinterASN1 from . import msgPrinterASN1
......
...@@ -49,31 +49,31 @@ def OnStartup(unused_modelingLanguage, asnFiles, outputDir, unused_badTypes): ...@@ -49,31 +49,31 @@ def OnStartup(unused_modelingLanguage, asnFiles, outputDir, unused_badTypes):
os.system("rm -f \"" + outputDir + "\"/*.adb") os.system("rm -f \"" + outputDir + "\"/*.adb")
def OnBasic(nodeTypename, node, leafTypeDict): def OnBasic(unused_nodeTypename, unused_node, unused_leafTypeDict):
pass # pragma: no cover pass # pragma: no cover
def OnSequence(nodeTypename, node, leafTypeDict): def OnSequence(unused_nodeTypename, unused_node, unused_leafTypeDict):
pass # pragma: no cover pass # pragma: no cover
def OnSet(nodeTypename, node, leafTypeDict): def OnSet(unused_nodeTypename, unused_node, unused_leafTypeDict):
pass # pragma: no cover pass # pragma: no cover
def OnEnumerated(nodeTypename, node, leafTypeDict): def OnEnumerated(unused_nodeTypename, unused_node, unused_leafTypeDict):
pass # pragma: no cover pass # pragma: no cover
def OnSequenceOf(nodeTypename, node, leafTypeDict): def OnSequenceOf(unused_nodeTypename, unused_node, _unused_leafTypeDict):
pass # pragma: no cover pass # pragma: no cover
def OnSetOf(nodeTypename, node, leafTypeDict): def OnSetOf(unused_nodeTypename, unused_node, unused_leafTypeDict):
pass # pragma: no cover pass # pragma: no cover
def OnChoice(nodeTypename, node, leafTypeDict): def OnChoice(unused_nodeTypename, unused_node, unused_leafTypeDict):
pass # pragma: no cover pass # pragma: no cover
......
...@@ -53,31 +53,31 @@ def OnStartup(unused_modelingLanguage, asnFiles, outputDir, unused_badTypes): ...@@ -53,31 +53,31 @@ def OnStartup(unused_modelingLanguage, asnFiles, outputDir, unused_badTypes):
os.system("rm -f \"" + outputDir + os.sep + os.path.basename(os.path.splitext(tmp)[0]) + ".c\"") os.system("rm -f \"" + outputDir + os.sep + os.path.basename(os.path.splitext(tmp)[0]) + ".c\"")
def OnBasic(nodeTypename, node, leafTypeDict): def OnBasic(unused_nodeTypename, unused_node, unused_leafTypeDict):
pass # pragma: no cover pass # pragma: no cover
def OnSequence(nodeTypename, node, leafTypeDict): def OnSequence(unused_nodeTypename, unused_node, unused_leafTypeDict):
pass # pragma: no cover pass # pragma: no cover
def OnSet(nodeTypename, node, leafTypeDict): def OnSet(unused_nodeTypename, unused_node, unused_leafTypeDict):
pass # pragma: no cover pass # pragma: no cover
def OnEnumerated(nodeTypename, node, leafTypeDict): def OnEnumerated(unused_nodeTypename, unused_node, unused_leafTypeDict):
pass # pragma: no cover pass # pragma: no cover
def OnSequenceOf(nodeTypename, node, leafTypeDict): def OnSequenceOf(unused_nodeTypename, unused_node, unused_leafTypeDict):
pass # pragma: no cover pass # pragma: no cover
def OnSetOf(nodeTypename, node, leafTypeDict): def OnSetOf(unused_nodeTypename, unused_node, unused_leafTypeDict):
pass # pragma: no cover pass # pragma: no cover
def OnChoice(nodeTypename, node, leafTypeDict): def OnChoice(unused_nodeTypename, unused_node, unused_leafTypeDict):
pass # pragma: no cover pass # pragma: no cover
......
...@@ -43,8 +43,8 @@ void main() ...@@ -43,8 +43,8 @@ void main()
%(enums_dump)s %(enums_dump)s
}""" % {"enums_dump": enums_dump, "base": sys.argv[1]}) }""" % {"enums_dump": enums_dump, "base": sys.argv[1]})
f.close() f.close()
if 0 != os.system( cmd = "gcc -o %s -I. %s.c" % (extractor_filename, extractor_filename)
"gcc -o %s -I. %s.c" % (extractor_filename, extractor_filename)): if os.system(cmd) != 0:
print("Failed to extract CHOICE enum values...") print("Failed to extract CHOICE enum values...")
sys.exit(1) sys.exit(1)
os.system(extractor_filename) os.system(extractor_filename)
......
#!/usr/bin/env python3 #!/usr/bin/env python3
# vim: set expandtab ts=8 sts=4 shiftwidth=4
# #
# (C) Semantix Information Technologies. # (C) Semantix Information Technologies.
# #
...@@ -34,12 +33,12 @@ import os ...@@ -34,12 +33,12 @@ import os
import sys import sys
import copy import copy
from typing import Tuple from typing import Tuple, List
import commonPy.configMT import commonPy.configMT
from commonPy.asnAST import sourceSequenceLimit, AsnNode # NOQA pylint: disable=unused-import from commonPy.asnAST import sourceSequenceLimit, AsnNode # NOQA pylint: disable=unused-import
from commonPy.asnParser import ( # NOQA from commonPy.asnParser import ( # NOQA pylint: disable=unused-import
AST_Lookup, AST_TypesOfFile, AST_TypenamesOfFile, AST_Leaftypes, AST_Lookup, AST_Leaftypes,
Typename, Filename, ParseAsnFileList) Typename, Filename, ParseAsnFileList)
from commonPy.utility import inform, panic from commonPy.utility import inform, panic
import commonPy.cleanupNodes import commonPy.cleanupNodes
...@@ -175,7 +174,7 @@ def main(): ...@@ -175,7 +174,7 @@ def main():
ParseAsnFileList(sys.argv[1:]) ParseAsnFileList(sys.argv[1:])
Triples = Tuple[AST_Lookup, List[AsnNode], AST_Leaftypes] # NOQA pylint: disable=unused-variable Triples = Tuple[AST_Lookup, List[AsnNode], AST_Leaftypes] # NOQA pylint: disable=unused-variable,invalid-sequence-index
uniqueASNfiles = {} # type: Dict[Filename, Triples] uniqueASNfiles = {} # type: Dict[Filename, Triples]
for asnFile in uniqueASNfiles: for asnFile in uniqueASNfiles:
...@@ -261,7 +260,9 @@ def main(): ...@@ -261,7 +260,9 @@ def main():
if __name__ == "__main__": if __name__ == "__main__":
if "-pdb" in sys.argv: if "-pdb" in sys.argv:
sys.argv.remove("-pdb") # pragma: no cover sys.argv.remove("-pdb") # pragma: no cover
import pdb # pylint: disable=wrong-import-position pragma: nocover import pdb # pragma: nocover pylint: disable=wrong-import-position,wrong-import-order
pdb.run('main()') # pragma: no cover pdb.run('main()') # pragma: no cover
else: else:
main() main()
# vim: set expandtab ts=8 sts=4 shiftwidth=4
#!/usr/bin/env python #!/usr/bin/env python
# vim: set expandtab ts=8 sts=4 shiftwidth=4 '''
This is one of the code generators that Semantix developed for
the European research project ASSERT. It is now enhanced in the
context of Data Modelling and Data Modelling Tuning projects.
It reads the ASN.1 specification of the exchanged messages, and
generates printer-functions for their content.
'''
# (C) Semantix Information Technologies. # (C) Semantix Information Technologies.
# #
...@@ -24,12 +31,12 @@ import os ...@@ -24,12 +31,12 @@ import os
import sys import sys
import copy import copy
from typing import Tuple from typing import Tuple, List
import commonPy.configMT import commonPy.configMT
from commonPy.asnAST import sourceSequenceLimit, AsnNode # NOQA pylint: disable=unused-import from commonPy.asnAST import sourceSequenceLimit, AsnNode # NOQA pylint: disable=unused-import
from commonPy.asnParser import ( # NOQA from commonPy.asnParser import ( # NOQA pylint: disable=unused-import
AST_Lookup, AST_TypesOfFile, AST_TypenamesOfFile, AST_Leaftypes, AST_Lookup, AST_Leaftypes,
Typename, Filename, ParseAsnFileList) Typename, Filename, ParseAsnFileList)
from commonPy.utility import inform, panic from commonPy.utility import inform, panic
import commonPy.cleanupNodes import commonPy.cleanupNodes
...@@ -37,15 +44,6 @@ from commonPy.recursiveMapper import RecursiveMapper ...@@ -37,15 +44,6 @@ from commonPy.recursiveMapper import RecursiveMapper
import commonPy.verify import commonPy.verify
__doc__ = '''\
This is one of the code generators that Semantix developed for
the European research project ASSERT. It is now enhanced in the
context of Data Modelling and Data Modelling Tuning projects.
It reads the ASN.1 specification of the exchanged messages, and
generates "printer" functions for their content.
'''
def usage(): def usage():
'''Print usage instructions.''' '''Print usage instructions.'''
...@@ -62,7 +60,7 @@ class Printer(RecursiveMapper): ...@@ -62,7 +60,7 @@ class Printer(RecursiveMapper):
self.uniqueID += 1 if self.uniqueID != 385 else 2 self.uniqueID += 1 if self.uniqueID != 385 else 2
return self.uniqueID return self.uniqueID
def MapInteger(self, srcCVariable, empty, _, __, ___): def MapInteger(self, srcCVariable, unused, _, __, ___):
lines = [] lines = []
lines.append('#if WORD_SIZE==8') lines.append('#if WORD_SIZE==8')
lines.append('printf("%%lld", %s);' % srcCVariable) lines.append('printf("%%lld", %s);' % srcCVariable)
...@@ -71,13 +69,13 @@ class Printer(RecursiveMapper): ...@@ -71,13 +69,13 @@ class Printer(RecursiveMapper):
lines.append('#endif') lines.append('#endif')
return lines return lines
def MapReal(self, srcCVariable, empty, _, __, ___): def MapReal(self, srcCVariable, unused, _, __, ___):
return ['printf("%%f", %s);' % srcCVariable] return ['printf("%%f", %s);' % srcCVariable]
def MapBoolean(self, srcCVariable, empty, _, __, ___): def MapBoolean(self, srcCVariable, unused, _, __, ___):
return ['printf("%%s", (int)%s?"TRUE":"FALSE");' % srcCVariable] return ['printf("%%s", (int)%s?"TRUE":"FALSE");' % srcCVariable]
def MapOctetString(self, srcCVariable, empty, node, __, ___): def MapOctetString(self, srcCVariable, unused, node, __, ___):
lines = [] lines = []
lines.append("{") lines.append("{")
lines.append(" int i;") lines.append(" int i;")
...@@ -89,7 +87,7 @@ class Printer(RecursiveMapper): ...@@ -89,7 +87,7 @@ class Printer(RecursiveMapper):
lines.append("}\n") lines.append("}\n")
return lines return lines
def MapEnumerated(self, srcCVariable, empty, node, __, ___): def MapEnumerated(self, srcCVariable, unused, node, __, ___):
lines = [] lines = []
lines.append("switch(%s) {" % srcCVariable) lines.append("switch(%s) {" % srcCVariable)
for d in node._members: for d in node._members:
...@@ -130,12 +128,14 @@ class Printer(RecursiveMapper): ...@@ -130,12 +128,14 @@ class Printer(RecursiveMapper):
"%sif (%s.kind == %s) {" % "%sif (%s.kind == %s) {" %
(self.maybeElse(childNo), srcCVariable, self.CleanName(child[2]))) (self.maybeElse(childNo), srcCVariable, self.CleanName(child[2])))
lines.append(" printf(\"%s:\");" % child[0]) # Choices need the field name printed lines.append(" printf(\"%s:\");" % child[0]) # Choices need the field name printed
lines.extend([' '+x for x in self.Map( lines.extend(
"%s.u.%s" % (srcCVariable, self.CleanName(child[0])), [' '+x
prefix + "::" + self.CleanName(child[0]), for x in self.Map(
child[1], "%s.u.%s" % (srcCVariable, self.CleanName(child[0])),
leafTypeDict, prefix + "::" + self.CleanName(child[0]),
names)]) child[1],
leafTypeDict,
names)])
lines.append("}") lines.append("}")
return lines return lines
...@@ -149,12 +149,14 @@ class Printer(RecursiveMapper): ...@@ -149,12 +149,14 @@ class Printer(RecursiveMapper):
lines.append(" for(i%s=0; i%s<%s; i%s++) {" % (uniqueId, uniqueId, limit, uniqueId)) lines.append(" for(i%s=0; i%s<%s; i%s++) {" % (uniqueId, uniqueId, limit, uniqueId))
lines.append(" if (i%s) " % uniqueId) lines.append(" if (i%s) " % uniqueId)
lines.append(" printf(\",\");") lines.append(" printf(\",\");")
lines.extend([" " + x for x in self.Map( lines.extend(
"%s.arr[i%s]" % (srcCVariable, uniqueId), [" "+x
prefix + "::Elem", for x in self.Map(
node._containedType, "%s.arr[i%s]" % (srcCVariable, uniqueId),
leafTypeDict, prefix + "::Elem",
names)]) node._containedType,
leafTypeDict,
names)])
lines.append(" }") lines.append(" }")
lines.append(" printf(\"}\");") lines.append(" printf(\"}\");")
lines.append("}") lines.append("}")
...@@ -192,7 +194,7 @@ def main(): ...@@ -192,7 +194,7 @@ def main():
ParseAsnFileList(sys.argv[1:]) ParseAsnFileList(sys.argv[1:])
Triples = Tuple[AST_Lookup, List[AsnNode], AST_Leaftypes] # NOQA pylint: disable=unused-variable Triples = Tuple[AST_Lookup, List[AsnNode], AST_Leaftypes] # NOQA pylint: disable=unused-variable,invalid-sequence-index
uniqueASNfiles = {} # type: Dict[Filename, Triples] uniqueASNfiles = {} # type: Dict[Filename, Triples]
for asnFile in uniqueASNfiles: for asnFile in uniqueASNfiles:
...@@ -250,7 +252,7 @@ def main(): ...@@ -250,7 +252,7 @@ def main():
inform("Processing %s...", nodeTypename) inform("Processing %s...", nodeTypename)
# First, make sure we know what leaf type this node is # First, make sure we know what leaf type this node is
assert(nodeTypename in leafTypeDict) assert nodeTypename in leafTypeDict
C_HeaderFile.write('void PrintASN1%s(const char *paramName, const asn1Scc%s *pData);\n' % (cleanNodeTypename, cleanNodeTypename)) C_HeaderFile.write('void PrintASN1%s(const char *paramName, const asn1Scc%s *pData);\n' % (cleanNodeTypename, cleanNodeTypename))
C_SourceFile.write('void PrintASN1%s(const char *paramName, const asn1Scc%s *pData)\n{\n' % (cleanNodeTypename, cleanNodeTypename)) C_SourceFile.write('void PrintASN1%s(const char *paramName, const asn1Scc%s *pData)\n{\n' % (cleanNodeTypename, cleanNodeTypename))
...@@ -275,7 +277,9 @@ def main(): ...@@ -275,7 +277,9 @@ def main():
if __name__ == "__main__": if __name__ == "__main__":
if "-pdb" in sys.argv: if "-pdb" in sys.argv:
sys.argv.remove("-pdb") # pragma: no cover sys.argv.remove("-pdb") # pragma: no cover
import pdb # pragma: no cover import pdb # pragma: no cover pylint: disable=wrong-import-position,wrong-import-order
pdb.run('main()') # pragma: no cover pdb.run('main()') # pragma: no cover
else: else:
main() main()
# vim: set expandtab ts=8 sts=4 shiftwidth=4
...@@ -38,31 +38,31 @@ def OnStartup(unused_modelingLanguage, asnFile, outputDir, unused_badTypes): ...@@ -38,31 +38,31 @@ def OnStartup(unused_modelingLanguage, asnFile, outputDir, unused_badTypes):
g_outputDir = outputDir g_outputDir = outputDir
def OnBasic(nodeTypename, node, leafTypeDict): def OnBasic(unused_nodeTypename, unused_node, unused_leafTypeDict):
pass pass
def OnSequence(nodeTypename, node, leafTypeDict): def OnSequence(unused_nodeTypename, unused_node, unused_leafTypeDict):
pass pass
def OnSet(nodeTypename, node, leafTypeDict): def OnSet(unused_nodeTypename, unused_node, unused_leafTypeDict):
pass # pragma: nocover pass # pragma: nocover
def OnEnumerated(nodeTypename, node, leafTypeDict): def OnEnumerated(unused_nodeTypename, unused_node, unused_leafTypeDict):
pass pass
def OnSequenceOf(nodeTypename, node, leafTypeDict): def OnSequenceOf(unused_nodeTypename, unused_node, unused_leafTypeDict):
pass pass
def OnSetOf(nodeTypename, node, leafTypeDict): def OnSetOf(unused_nodeTypename, unused_node, unused_leafTypeDict):
pass # pragma: nocover pass # pragma: nocover
def OnChoice(nodeTypename, node, leafTypeDict): def OnChoice(unused_nodeTypename, unused_node, unused_leafTypeDict):
pass pass
# obsolete, now the grammar is re-created from the AST (PrintGrammarFromAST) # obsolete, now the grammar is re-created from the AST (PrintGrammarFromAST)
......
...@@ -457,11 +457,8 @@ def DumpTypeDumper(codeIndent, outputIndent, lines, variableName, node, names): ...@@ -457,11 +457,8 @@ def DumpTypeDumper(codeIndent, outputIndent, lines, variableName, node, names):
def CreateDeclarationForType(nodeTypename: str, names: AST_Lookup, leafTypeDict: AST_Leaftypes): def CreateDeclarationForType(nodeTypename: str, names: AST_Lookup, leafTypeDict: AST_Leaftypes):
node = names[nodeTypename] node = names[nodeTypename]
name = CleanNameAsPythonWants(nodeTypename) name = CleanNameAsPythonWants(nodeTypename)
if isinstance(node, AsnBasicNode) or isinstance(node, AsnEnumerated) or \ if isinstance(node, (AsnBasicNode, AsnEnumerated, AsnSequence, AsnSet,
isinstance(node, AsnSequence) or isinstance(node, AsnSet) or \ AsnChoice, AsnSequenceOf, AsnSetOf)):
isinstance(node, AsnChoice) or isinstance(node, AsnSequenceOf) or \
isinstance(node, AsnSetOf):
g_outputFile.write("class " + name + "(COMMON):\n") g_outputFile.write("class " + name + "(COMMON):\n")
if isinstance(node, AsnEnumerated): if isinstance(node, AsnEnumerated):
g_outputFile.write(" # Allowed enumerants:\n") g_outputFile.write(" # Allowed enumerants:\n")
......
...@@ -89,31 +89,31 @@ def OnStartup(unused_modelingLanguage, asnFiles, outputDir, unused_badTypes): ...@@ -89,31 +89,31 @@ def OnStartup(unused_modelingLanguage, asnFiles, outputDir, unused_badTypes):
CreateDeclarationsForAllTypes(commonPy.asnParser.g_names, commonPy.asnParser.g_leafTypeDict) CreateDeclarationsForAllTypes(commonPy.asnParser.g_names, commonPy.asnParser.g_leafTypeDict)
def OnBasic(nodeTypename, node, leafTypeDict): def OnBasic(unused_nodeTypename, unused_node, unused_leafTypeDict):
pass # pragma: nocover pass # pragma: nocover
def OnSequence(nodeTypename, node, leafTypeDict): def OnSequence(unused_nodeTypename, unused_node, unused_leafTypeDict):
pass # pragma: nocover pass # pragma: nocover
def OnSet(nodeTypename, node, leafTypeDict): def OnSet(unused_nodeTypename, unused_node, unused_leafTypeDict):
pass # pragma: nocover pass # pragma: nocover
def OnEnumerated(nodeTypename, node, leafTypeDict): def OnEnumerated(unused_nodeTypename, unused_node, unused_leafTypeDict):
pass # pragma: nocover pass # pragma: nocover
def OnSequenceOf(nodeTypename, node, leafTypeDict): def OnSequenceOf(unused_nodeTypename, unused_node, unused_leafTypeDict):
pass # pragma: nocover pass # pragma: nocover
def OnSetOf(nodeTypename, node, leafTypeDict): def OnSetOf(unused_nodeTypename, unused_node, unused_leafTypeDict):
pass # pragma: nocover pass # pragma: nocover
def OnChoice(nodeTypename, node, leafTypeDict): def OnChoice(unused_nodeTypename, unused_node, unused_leafTypeDict):
pass # pragma: nocover pass # pragma: nocover
......
...@@ -68,31 +68,31 @@ def OnStartup(unused_modelingLanguage, unused_asnFile, outputDir, unused_badType ...@@ -68,31 +68,31 @@ def OnStartup(unused_modelingLanguage, unused_asnFile, outputDir, unused_badType
CreateDeclarationsForAllTypes(commonPy.asnParser.g_names, commonPy.asnParser.g_leafTypeDict) CreateDeclarationsForAllTypes(commonPy.asnParser.g_names, commonPy.asnParser.g_leafTypeDict)
def OnBasic(nodeTypename, node, leafTypeDict): def OnBasic(unused_nodeTypename, unused_node, unused_leafTypeDict):
pass # pragma: no cover pass # pragma: no cover
def OnSequence(nodeTypename, node, leafTypeDict): def OnSequence(unused_nodeTypename, unused_node, unused_leafTypeDict):
pass # pragma: no cover pass # pragma: no cover
def OnSet(nodeTypename, node, leafTypeDict): def OnSet(unused_nodeTypename, unused_node, unused_leafTypeDict):
pass # pragma: nocover pass # pragma: nocover
def OnEnumerated(nodeTypename, node, leafTypeDict): def OnEnumerated(unused_nodeTypename, unused_node, unused_leafTypeDict):
pass # pragma: no cover pass # pragma: no cover
def OnSequenceOf(nodeTypename, node, leafTypeDict): def OnSequenceOf(unused_nodeTypename, unused_node, unused_leafTypeDict):
pass # pragma: no cover pass # pragma: no cover
def OnSetOf(nodeTypename, node, leafTypeDict): def OnSetOf(unused_nodeTypename, unused_node, unused_leafTypeDict):
pass # pragma: nocover pass # pragma: nocover