Commit 0a85aa5b authored by Thanassis Tsiodras's avatar Thanassis Tsiodras
Browse files

[mypy] Reached asn2dataModel and aadl2glueC 100% pass (finally type-safe)

parent 86a84412
......@@ -43,7 +43,6 @@ output parameters, which have Ada callable interfaces.
from . import c_B_mapper
isAsynchronous = True
adaBackend = None
cBackend = None
......@@ -312,10 +311,7 @@ def OnStartup(unused_modelingLanguage, asnFile, outputDir, maybeFVname, useOSS):
global cBackend
# 2009-02-10: Since we now use ASN1SCC structures as dumpables (even for Ada)
# we no longer need these Ada-specific Dumpable structures.
# global adaBackend
# adaBackend = Ada_GlueGenerator()
cBackend = c_B_mapper.C_GlueGenerator()
# adaBackend.OnStartup(modelingLanguage, asnFile, outputDir, maybeFVname, useOSS)
cBackend.OnStartup("C", asnFile, outputDir, maybeFVname, useOSS)
......
......@@ -46,7 +46,6 @@ output parameters, which have Ada callable interfaces.
from . import c_B_mapper
isAsynchronous = True
adaBackend = None
cBackend = None
......@@ -60,10 +59,7 @@ def OnStartup(unused_modelingLanguage, asnFile, outputDir, maybeFVname, useOSS):
global cBackend
# 2009-02-10: Since we now use ASN1SCC structures as dumpables (even for Ada)
# we no longer need these Ada-specific Dumpable structures.
# global adaBackend
# adaBackend = Ada_GlueGenerator()
cBackend = c_B_mapper.C_GlueGenerator()
# adaBackend.OnStartup(modelingLanguage, asnFile, outputDir, maybeFVname, useOSS)
cBackend.OnStartup("C", asnFile, outputDir, maybeFVname, useOSS)
......
......@@ -68,7 +68,7 @@ class FromRTDSToASN1SCC(RecursiveMapper):
return ["%s = (%s==TRUE)?0xff:0;\n" % (destVar, srcSDLVariable)]
def MapOctetString(self, srcSDLVariable, destVar, node, __, ___):
lines = []
lines = [] # type: List[str]
lines.append("{\n")
lines.append(" int i;\n")
lines.append(" for(i=0; i<%s.__length; i++) {\n" % srcSDLVariable)
......@@ -85,7 +85,7 @@ class FromRTDSToASN1SCC(RecursiveMapper):
return ["%s = %s;\n" % (destVar, srcSDLVariable)]
def MapSequence(self, srcSDLVariable, destVar, node, leafTypeDict, names):
lines = []
lines = [] # type: List[str]
for child in node._members:
lines.extend(
self.Map(
......@@ -100,7 +100,7 @@ class FromRTDSToASN1SCC(RecursiveMapper):
return self.MapSequence(srcSDLVariable, destVar, node, leafTypeDict, names) # pragma: nocover
def MapChoice(self, srcSDLVariable, destVar, node, leafTypeDict, names):
lines = []
lines = [] # type: List[str]
childNo = 0
for child in node._members:
childNo += 1
......@@ -118,7 +118,7 @@ class FromRTDSToASN1SCC(RecursiveMapper):
return lines
def MapSequenceOf(self, srcSDLVariable, destVar, node, leafTypeDict, names):
lines = []
lines = [] # type: List[str]
lines.append("{\n")
uniqueId = self.UniqueID()
lines.append(" int i%s;\n" % uniqueId)
......@@ -165,7 +165,7 @@ class FromRTDSToOSS(RecursiveMapper):
return ["%s = (%s==SDL_TRUE)?0xff:0;\n" % (destVar, srcSDLVariable)]
def MapOctetString(self, srcSDLVariable, destVar, _, __, ___):
lines = []
lines = [] # type: List[str]
lines.append("{\n")
lines.append(" int i;\n")
lines.append(" for(i=0; i<%s.length; i++) {\n" % srcSDLVariable)
......@@ -190,7 +190,7 @@ class FromRTDSToOSS(RecursiveMapper):
return ["%s = %s;\n" % (destVar, srcSDLVariable)]
def MapSequence(self, srcSDLVariable, destVar, node, leafTypeDict, names):
lines = []
lines = [] # type: List[str]
for child in node._members:
lines.extend(
self.Map(
......@@ -205,7 +205,7 @@ class FromRTDSToOSS(RecursiveMapper):
return self.MapSequence(srcSDLVariable, destVar, node, leafTypeDict, names) # pragma: nocover
def MapChoice(self, srcSDLVariable, destVar, node, leafTypeDict, names):
lines = []
lines = [] # type: List[str]
childNo = 0
for child in node._members:
childNo += 1
......@@ -223,7 +223,7 @@ class FromRTDSToOSS(RecursiveMapper):
return lines
def MapSequenceOf(self, srcSDLVariable, destVar, node, leafTypeDict, names):
lines = []
lines = [] # type: List[str]
lines.append("{\n")
uniqueId = self.UniqueID()
lines.append(" int i%s;\n" % uniqueId)
......@@ -271,7 +271,7 @@ class FromASN1SCCtoRTDS(RecursiveMapper):
def MapOctetString(self, srcVar, dstSDLVariable, node, _, __):
# for i in xrange(0, node._range[-1]):
# lines.append("%s[%d] = %s->buf[%d];\n" % (dstSDLVariable, i, srcVar, i))
lines = []
lines = [] # type: List[str]
limit = sourceSequenceLimit(node, srcVar)
lines.append("{\n")
lines.append(" int i;\n")
......@@ -290,7 +290,7 @@ class FromASN1SCCtoRTDS(RecursiveMapper):
return ["%s = %s;\n" % (dstSDLVariable, srcVar)]
def MapSequence(self, srcVar, dstSDLVariable, node, leafTypeDict, names):
lines = []
lines = [] # type: List[str]
for child in node._members:
lines.extend(
self.Map(
......@@ -305,7 +305,7 @@ class FromASN1SCCtoRTDS(RecursiveMapper):
return self.MapSequence(srcVar, dstSDLVariable, node, leafTypeDict, names) # pragma: nocover
def MapChoice(self, srcVar, dstSDLVariable, node, leafTypeDict, names):
lines = []
lines = [] # type: List[str]
childNo = 0
for child in node._members:
childNo += 1
......@@ -324,7 +324,7 @@ class FromASN1SCCtoRTDS(RecursiveMapper):
return lines
def MapSequenceOf(self, srcVar, dstSDLVariable, node, leafTypeDict, names):
lines = []
lines = [] # type: List[str]
lines.append("{\n")
uniqueId = self.UniqueID()
limit = sourceSequenceLimit(node, srcVar)
......@@ -371,7 +371,7 @@ class FromOSStoRTDS(RecursiveMapper):
return ["%s = (%s)?SDL_TRUE:SDL_FALSE;\n" % (dstSDLVariable, srcVar)]
def MapOctetString(self, srcVar, dstSDLVariable, node, _, __):
lines = []
lines = [] # type: List[str]
# for i in xrange(0, node._range[-1]):
# lines.append("%s[%d] = %s->buf[%d];\n" % (dstSDLVariable, i, srcVar, i))
lines.append("{\n")
......@@ -408,7 +408,7 @@ class FromOSStoRTDS(RecursiveMapper):
return ["%s = %s;\n" % (dstSDLVariable, srcVar)]
def MapSequence(self, srcVar, dstSDLVariable, node, leafTypeDict, names):
lines = []
lines = [] # type: List[str]
for child in node._members:
lines.extend(
self.Map(
......@@ -423,7 +423,7 @@ class FromOSStoRTDS(RecursiveMapper):
return self.MapSequence(srcVar, dstSDLVariable, node, leafTypeDict, names) # pragma: nocover
def MapChoice(self, srcVar, dstSDLVariable, node, leafTypeDict, names):
lines = []
lines = [] # type: List[str]
childNo = 0
for child in node._members:
childNo += 1
......@@ -442,7 +442,7 @@ class FromOSStoRTDS(RecursiveMapper):
return lines
def MapSequenceOf(self, srcVar, dstSDLVariable, node, leafTypeDict, names):
lines = []
lines = [] # type: List[str]
lines.append("{\n")
uniqueId = self.UniqueID()
lines.append(" int i%s;\n" % uniqueId)
......
......@@ -84,10 +84,15 @@ import copy
import distutils.spawn as spawn
from importlib import import_module
from typing import Tuple, Any # NOQA pylint: disable=unused-import
from . import commonPy
from .commonPy.utility import panic, inform
from .commonPy import verify
from .commonPy.asnParser import Filename, Typename, AST_Lookup, AST_TypesOfFile, AST_Leaftypes # NOQA pylint: disable=unused-import
from .commonPy.asnAST import AsnNode # NOQA pylint: disable=unused-import
from .commonPy.aadlAST import ApLevelContainer # NOQA pylint: disable=unused-import
from . import B_mappers # NOQA pylint: disable=unused-import
......@@ -207,58 +212,41 @@ def main():
ParseAADLfilesAndResolveSignals()
uniqueDataFiles = {}
uniqueDataFiles = {} # type: Dict[Filename, Dict[str, List[ApLevelContainer]]]
for sp in list(commonPy.aadlAST.g_apLevelContainers.values()):
for param in sp._params:
uniqueDataFiles.setdefault(param._signal._asnFilename, {})
uniqueDataFiles[param._signal._asnFilename].setdefault(sp._language, [])
uniqueDataFiles[param._signal._asnFilename][sp._language].append(sp)
uniqueASNfiles = {}
uniqueASNfiles = {} # type: Dict[Filename, Tuple[AST_Lookup, List[AsnNode], AST_Leaftypes]]
if len(list(uniqueDataFiles.keys())) != 0:
commonPy.asnParser.ParseAsnFileList(list(uniqueDataFiles.keys()))
for asnFile in uniqueDataFiles:
tmpNames = {}
tmpNames = {} # type: AST_Lookup
for name in commonPy.asnParser.g_typesOfFile[asnFile]:
tmpNames[name] = commonPy.asnParser.g_names[name]
uniqueASNfiles[asnFile] = [
uniqueASNfiles[asnFile] = (
copy.copy(tmpNames), # map Typename to type definition class from asnAST
copy.copy(commonPy.asnParser.g_astOfFile[asnFile]), # list of nameless type definitions
copy.copy(commonPy.asnParser.g_leafTypeDict)] # map from Typename to leafType
copy.copy(commonPy.asnParser.g_leafTypeDict)) # map from Typename to leafType
inform("Checking that all base nodes have mandatory ranges set in %s..." % asnFile)
for node in list(tmpNames.values()):
verify.VerifyRanges(node, commonPy.asnParser.g_names)
# # For each ASN.1 grammar file referenced in the system level description
# for asnFile in uniqueDataFiles.iterkeys():
# names = uniqueASNfiles[asnFile][0]
# leafTypeDict = uniqueASNfiles[asnFile][2]
#
# modelingLanguages = uniqueDataFiles[asnFile]
#
# # For each modeling language used by subprograms whose messages reference the grammar
# for modelingLanguage, subProgramArray in modelingLanguages.iteritems():
# if modelingLanguage == None:
# continue
#
# for sp in subProgramArray:
loadedBackends = {}
loadedBackends = set() # type: Set[str]
SystemsAndImplementations = commonPy.aadlAST.g_subProgramImplementations[:]
SystemsAndImplementations.extend(commonPy.aadlAST.g_threadImplementations[:])
SystemsAndImplementations.extend(commonPy.aadlAST.g_processImplementations[:])
# obsolete, was used for OSS library init
# CreateInitializationFiles(useOSS, SystemsAndImplementations, uniqueDataFiles.iterkeys())
# Update ASN.1 nodes to carry size info (only for Signal params)
for si in SystemsAndImplementations:
sp, sp_impl, modelingLanguage = si[0], si[1], si[2]
sp = commonPy.aadlAST.g_apLevelContainers[sp]
spName, sp_impl, modelingLanguage = si[0], si[1], si[2]
sp = commonPy.aadlAST.g_apLevelContainers[spName]
for param in sp._params:
asnFile = param._signal._asnFilename
names = uniqueASNfiles[asnFile][0]
......@@ -269,7 +257,7 @@ def main():
node = names[nodeTypename]
if node._leafType == "AsciiString":
panic("You cannot use IA5String as a parameter - use OCTET STRING instead\n(%s)" % node.Location()) # pragma: no cover
node._asnSize = param._signal._asnSize
# (typo?) node._asnSize = param._signal._asnSize
# If some AST nodes must be skipped (for any reason), go learn about them
badTypes = commonPy.cleanupNodes.DiscoverBadTypes()
......@@ -277,11 +265,11 @@ def main():
if {"ada", "qgenada"} & {y[2].lower() for y in SystemsAndImplementations}:
SpecialCodes(SystemsAndImplementations, uniqueDataFiles, uniqueASNfiles, useOSS)
asynchronousBackends = []
asynchronousBackends = [] # type: List[Any] # No idea how to say list of module
for si in SystemsAndImplementations:
sp, sp_impl, modelingLanguage, maybeFVname = si[0], si[1], si[2], si[3]
sp = commonPy.aadlAST.g_apLevelContainers[sp]
spName, sp_impl, modelingLanguage, maybeFVname = si[0], si[1], si[2], si[3]
sp = commonPy.aadlAST.g_apLevelContainers[spName]
inform("Creating glue for parameters of %s.%s...", sp._id, sp_impl)
if modelingLanguage is None:
continue # pragma: no cover
......@@ -303,7 +291,7 @@ def main():
try:
backend = import_module(backendFilename[:-3], 'dmt.B_mappers') # pragma: no cover
if backendFilename[:-3] not in loadedBackends:
loadedBackends[backendFilename[:-3]] = 1
loadedBackends.add(backendFilename[:-3])
if commonPy.configMT.verbose:
backend.Version()
except ImportError as err: # pragma: no cover
......@@ -408,8 +396,8 @@ def main():
for si in [x for x in SystemsAndImplementations if x[2] is not None and x[2].lower() in ["gui_ri", "gui_pi", "vhdl"]]:
# We do, start the work
sp, sp_impl, lang, maybeFVname = si[0], si[1], si[2], si[3]
sp = commonPy.aadlAST.g_apLevelContainers[sp]
spName, sp_impl, lang, maybeFVname = si[0], si[1], si[2], si[3]
sp = commonPy.aadlAST.g_apLevelContainers[spName]
if len(sp._params) == 0:
if lang.lower() == "gui_ri": # pragma: no cover
if "gui_polling" not in sp._id: # pragma: no cover
......
......@@ -39,8 +39,12 @@ import sys
import copy
from importlib import import_module
from typing import Tuple, Any # NOQA pylint: disable=unused-import
from .commonPy import configMT, asnParser, cleanupNodes, verify
from .commonPy.utility import inform, panic
from .commonPy.asnParser import Filename, Typename, AST_Lookup, AST_TypesOfFile, AST_Leaftypes # NOQA pylint: disable=unused-import
from .commonPy.asnAST import AsnNode # NOQA pylint: disable=unused-import
from . import A_mappers # NOQA pylint:disable=unused-import
......@@ -111,20 +115,18 @@ def main():
if not os.path.isfile(f):
panic("'%s' is not a file!\n" % f) # pragma: no cover
uniqueASNfiles = {}
for grammar in sys.argv[1:]:
uniqueASNfiles[grammar] = 1
asnParser.ParseAsnFileList(list(uniqueASNfiles.keys()))
uniqueASNfiles = {} # type: Dict[Filename, Tuple[AST_Lookup, List[AsnNode], AST_Leaftypes]]
asnParser.ParseAsnFileList(list(set(sys.argv[1:])))
for asnFile in uniqueASNfiles:
tmpNames = {}
tmpNames = {} # type: AST_Lookup
for name in asnParser.g_typesOfFile[asnFile]:
tmpNames[name] = asnParser.g_names[name]
uniqueASNfiles[asnFile] = [
uniqueASNfiles[asnFile] = (
copy.copy(tmpNames), # map Typename to type definition class from asnAST
copy.copy(asnParser.g_astOfFile[asnFile]), # list of nameless type definitions
copy.copy(asnParser.g_leafTypeDict)] # map from Typename to leafType
copy.copy(asnParser.g_leafTypeDict)) # map from Typename to leafType
inform("Checking that all base nodes have mandatory ranges set in %s..." % asnFile)
for node in list(tmpNames.values()):
......@@ -133,7 +135,7 @@ def main():
if configMT.debugParser:
sys.exit(0) # pragma: no cover
loadedBackends = {}
loadedBackends = {} # type: Dict[Filename, Any]
# If some AST nodes must be skipped (for any reason), go learn about them
badTypes = cleanupNodes.DiscoverBadTypes()
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment