Commit 33891182 authored by Maxime Perrotin's avatar Maxime Perrotin

Merge branch 'master' of https://gitrepos.estec.esa.int/taste/dmt into MORA

parents 1cb51c5b f516eaf4
...@@ -13,4 +13,4 @@ jobs: ...@@ -13,4 +13,4 @@ jobs:
paths: paths:
- ~/docker - ~/docker
- run: - run:
command: docker run -it -v $(pwd):/root/tests dmt bash -c 'cd /root/tests ; LANG=C LC_ALL=C PATH=$PATH:/asn1scc make' command: docker run -it -v $(pwd):/root/tests dmt bash -c 'cd /root/tests ; pip3 uninstall dmt ; pip3 install --upgrade . ; LANG=C LC_ALL=C PATH=$PATH:/asn1scc make'
...@@ -5,7 +5,7 @@ RUN cat /etc/apt/apt.conf.d/99badproxies ...@@ -5,7 +5,7 @@ RUN cat /etc/apt/apt.conf.d/99badproxies
RUN apt-get update RUN apt-get update
RUN apt-get install -y libxslt-dev libxml2-dev python-psycopg2 mono-runtime libmono-system-data4.0-cil libmono-system-web4.0-cil libfsharp-core4.3-cil RUN apt-get install -y libxslt-dev libxml2-dev python-psycopg2 mono-runtime libmono-system-data4.0-cil libmono-system-web4.0-cil libfsharp-core4.3-cil
RUN apt-get install -y wget python3-pip RUN apt-get install -y wget python3-pip
RUN wget -O - -q https://github.com/ttsiodras/asn1scc/releases/download/4.1e/asn1scc-bin-4.1e.tar.bz2 | tar jxvf - RUN wget -O - -q https://github.com/ttsiodras/asn1scc/releases/download/4.2.0.1.f/asn1scc-bin-4.2.0.1f.tar.bz2 | tar jxvf -
RUN apt-get install -y python-pip RUN apt-get install -y python-pip
RUN wget -O - -q https://github.com/ttsiodras/DataModellingTools/files/335591/antlr-2.7.7.tar.gz | tar zxvf - ; cd antlr-2.7.7/lib/python ; pip2 install . RUN wget -O - -q https://github.com/ttsiodras/DataModellingTools/files/335591/antlr-2.7.7.tar.gz | tar zxvf - ; cd antlr-2.7.7/lib/python ; pip2 install .
RUN pip2 install SQLAlchemy psycopg2 RUN pip2 install SQLAlchemy psycopg2
......
...@@ -222,7 +222,7 @@ def setSharedLib(dll=None): ...@@ -222,7 +222,7 @@ def setSharedLib(dll=None):
'''.format(fvName=FVname, tcName=CleanSP)) '''.format(fvName=FVname, tcName=CleanSP))
g_PyDataModel.write('\ntc["{tcName}"] = '.format( g_PyDataModel.write('\ntc["{tcName}"] = '.format(
tcName=CleanSP.lower())) tcName=CleanSP.lower()))
buttons = ([["sendButton", "Send TC"], ["loadButton", "Load TC"], buttons = ([["sendButton", "Send TC"], ["loadButton", "Load TC"],
["saveButton", "Save TC"]]) ["saveButton", "Save TC"]])
classType = "asn1Editor" classType = "asn1Editor"
......
...@@ -80,6 +80,7 @@ but with an extra call to OnFinal at the end. ...@@ -80,6 +80,7 @@ but with an extra call to OnFinal at the end.
import os import os
import sys import sys
import hashlib
from distutils import spawn from distutils import spawn
from typing import cast, Optional, Dict, List, Tuple, Set, Any # NOQA pylint: disable=unused-import from typing import cast, Optional, Dict, List, Tuple, Set, Any # NOQA pylint: disable=unused-import
...@@ -145,17 +146,48 @@ def ParseAADLfilesAndResolveSignals() -> None: ...@@ -145,17 +146,48 @@ def ParseAADLfilesAndResolveSignals() -> None:
'''Invokes the ANTLR generated AADL parser, and resolves '''Invokes the ANTLR generated AADL parser, and resolves
all references to AAADL Data types into the param._signal member all references to AAADL Data types into the param._signal member
of each SUBPROGRAM param.''' of each SUBPROGRAM param.'''
import tempfile projectCache = os.getenv("PROJECT_CACHE")
f = tempfile.NamedTemporaryFile(delete=False) if projectCache is not None:
astFile = f.name if not os.path.isdir(projectCache):
f.close() panic("The configured cache folder:\n\n\t" + projectCache +
os.unlink(astFile) "\n\n...is not there!\n")
parserUtility = os.path.join(os.path.abspath(os.path.dirname(__file__)), "parse_aadl.py") cachedModelExists = False
cmd = "python2 " + parserUtility + " -o " + astFile + ' ' + ' '.join(sys.argv[1:]) aadlASTcache = None
if os.system(cmd) != 0: if projectCache is not None:
if os.path.exists(astFile): filehash = hashlib.md5()
os.unlink(astFile) for each in sorted(sys.argv[1:]):
panic("AADL parsing failed. Aborting...") filehash.update(open(each).read().encode('utf-8'))
newHash = filehash.hexdigest()
# set the name of the Pickle files containing the dumped AST
aadlASTcache = projectCache + os.sep + newHash + "_aadl_ast.pickle"
if not os.path.exists(aadlASTcache):
print("[DMT] No cached AADL model found for",
",".join(sys.argv[1:]))
else:
cachedModelExists = True
print("[DMT] Reusing cached AADL model for",
",".join(sys.argv[1:]))
import pickle
if cachedModelExists:
astInfo = pickle.load(open(aadlASTcache, 'rb'), fix_imports=False)
else:
import tempfile
f = tempfile.NamedTemporaryFile(delete=False)
astFile = f.name
f.close()
os.unlink(astFile)
parserUtility = os.path.join(
os.path.abspath(os.path.dirname(__file__)), "parse_aadl.py")
cmd = "python2 " + parserUtility + " -o " + astFile + ' ' + \
' '.join(sys.argv[1:])
if os.system(cmd) != 0:
if os.path.exists(astFile):
os.unlink(astFile)
panic("AADL parsing failed. Aborting...")
astInfo = pickle.load(open(astFile, 'rb'), fix_imports=False)
if aadlASTcache:
pickle.dump(astInfo, open(aadlASTcache, 'wb'), fix_imports=False)
def FixMetaClasses(sp: ApLevelContainer) -> None: def FixMetaClasses(sp: ApLevelContainer) -> None:
def patchMe(o: Any) -> None: def patchMe(o: Any) -> None:
...@@ -179,8 +211,6 @@ of each SUBPROGRAM param.''' ...@@ -179,8 +211,6 @@ of each SUBPROGRAM param.'''
for cn in sp._connections: for cn in sp._connections:
patchMe(cn) patchMe(cn)
try: try:
import pickle
astInfo = pickle.load(open(astFile, 'rb'), fix_imports=False)
for k in ['g_processImplementations', 'g_apLevelContainers', for k in ['g_processImplementations', 'g_apLevelContainers',
'g_signals', 'g_systems', 'g_subProgramImplementations', 'g_signals', 'g_systems', 'g_subProgramImplementations',
'g_threadImplementations']: 'g_threadImplementations']:
...@@ -452,6 +482,24 @@ def main() -> None: ...@@ -452,6 +482,24 @@ def main() -> None:
import pdb # pragma: no cover pylint: disable=wrong-import-position,wrong-import-order import pdb # pragma: no cover pylint: disable=wrong-import-position,wrong-import-order
pdb.set_trace() # pragma: no cover pdb.set_trace() # pragma: no cover
if "-profile" in sys.argv:
sys.argv.remove("-profile")
import cProfile
import pstats
import io
pr = cProfile.Profile()
pr.enable()
import atexit
def dumpSpeedData() -> None:
pr.disable()
s = io.StringIO()
sortby = 'cumulative'
ps = pstats.Stats(pr, stream=s).sort_stats(sortby)
ps.print_stats()
print(s.getvalue())
atexit.register(dumpSpeedData)
if "-v" in sys.argv: if "-v" in sys.argv:
import pkg_resources # pragma: no cover import pkg_resources # pragma: no cover
version = pkg_resources.require("dmt")[0].version # pragma: no cover version = pkg_resources.require("dmt")[0].version # pragma: no cover
......
...@@ -121,7 +121,7 @@ def calculateForNativeAndASN1SCC(absASN1SCCpath, autosrc, names, inputFiles): ...@@ -121,7 +121,7 @@ def calculateForNativeAndASN1SCC(absASN1SCCpath, autosrc, names, inputFiles):
configMT.outputDir = autosrc + os.sep configMT.outputDir = autosrc + os.sep
# dumpable.CreateDumpableCtypes(uniqueASNfiles) # dumpable.CreateDumpableCtypes(uniqueASNfiles)
for asnTypename in list(names.keys()): for asnTypename in sorted(list(names.keys())):
node = names[asnTypename] node = names[asnTypename]
if node._isArtificial: if node._isArtificial:
continue continue
...@@ -135,7 +135,7 @@ def calculateForNativeAndASN1SCC(absASN1SCCpath, autosrc, names, inputFiles): ...@@ -135,7 +135,7 @@ def calculateForNativeAndASN1SCC(absASN1SCCpath, autosrc, names, inputFiles):
# Code generation - asn1c part # Code generation - asn1c part
# Create a dictionary to lookup the asn-types from their corresponding c-type # Create a dictionary to lookup the asn-types from their corresponding c-type
namesDict = {} namesDict = {}
for asnTypename in list(names.keys()): for asnTypename in sorted(list(names.keys())):
node = names[asnTypename] node = names[asnTypename]
if node._isArtificial: if node._isArtificial:
continue continue
...@@ -407,7 +407,7 @@ properties ...@@ -407,7 +407,7 @@ properties
Data_Model::Data_Representation => Character; Data_Model::Data_Representation => Character;
end Stream_Element_Buffer; end Stream_Element_Buffer;
''') ''')
for asnTypename in list(asnParser.g_names.keys()): for asnTypename in sorted(list(asnParser.g_names.keys())):
node = asnParser.g_names[asnTypename] node = asnParser.g_names[asnTypename]
if node._isArtificial: if node._isArtificial:
continue continue
...@@ -502,7 +502,7 @@ end Stream_Element_Buffer; ...@@ -502,7 +502,7 @@ end Stream_Element_Buffer;
o.write('END Taste_DataView;\n\n') o.write('END Taste_DataView;\n\n')
o.write('SYSTEM IMPLEMENTATION Taste_DataView.others\n') o.write('SYSTEM IMPLEMENTATION Taste_DataView.others\n')
o.write('SUBCOMPONENTS\n') o.write('SUBCOMPONENTS\n')
for asnTypename in list(asnParser.g_names.keys()): for asnTypename in sorted(list(asnParser.g_names.keys())):
node = asnParser.g_names[asnTypename] node = asnParser.g_names[asnTypename]
if node._isArtificial: if node._isArtificial:
continue continue
...@@ -511,11 +511,11 @@ end Stream_Element_Buffer; ...@@ -511,11 +511,11 @@ end Stream_Element_Buffer;
o.write('END Taste_DataView.others;\n') o.write('END Taste_DataView.others;\n')
listOfAsn1Files = {} listOfAsn1Files = {}
for asnTypename in list(asnParser.g_names.keys()): for asnTypename in sorted(list(asnParser.g_names.keys())):
listOfAsn1Files[asnParser.g_names[asnTypename]._asnFilename] = 1 listOfAsn1Files[asnParser.g_names[asnTypename]._asnFilename] = 1
if bAADLv2: if bAADLv2:
for asnFilename in list(listOfAsn1Files.keys()): for asnFilename in sorted(list(listOfAsn1Files.keys())):
base = os.path.splitext(os.path.basename(asnFilename))[0] base = os.path.splitext(os.path.basename(asnFilename))[0]
possibleACN = ASNtoACN(asnFilename) possibleACN = ASNtoACN(asnFilename)
if os.path.exists(possibleACN): if os.path.exists(possibleACN):
......
...@@ -408,12 +408,12 @@ def ParseAsnFileList(listOfFilenames: List[str]) -> None: # pylint: disable=inv ...@@ -408,12 +408,12 @@ def ParseAsnFileList(listOfFilenames: List[str]) -> None: # pylint: disable=inv
xmlAST2 = projectCache + os.sep + newHash + "_ast_v1.xml" xmlAST2 = projectCache + os.sep + newHash + "_ast_v1.xml"
if not os.path.exists(xmlAST) or not os.path.exists(xmlAST2): if not os.path.exists(xmlAST) or not os.path.exists(xmlAST2):
someFilesHaveChanged = True someFilesHaveChanged = True
print("[DMT] ASN.1 model changed, re-processing...") print("[DMT] No cached model found for", ",".join(listOfFilenames))
else: else:
# no projectCache set, so xmlAST and xmlAST2 are set to None # no projectCache set, so xmlAST and xmlAST2 are set to None
someFilesHaveChanged = True someFilesHaveChanged = True
if not someFilesHaveChanged: if not someFilesHaveChanged:
print("[DMT] No change in ASN.1 model.") print("[DMT] Reusing cached ASN.1 AST for ", ",".join(listOfFilenames))
if not xmlAST: if not xmlAST:
(dummy, xmlAST) = tempfile.mkstemp() (dummy, xmlAST) = tempfile.mkstemp()
......
...@@ -152,6 +152,11 @@ class Printer(RecursiveMapper): ...@@ -152,6 +152,11 @@ class Printer(RecursiveMapper):
def main(): def main():
if "-pdb" in sys.argv:
sys.argv.remove("-pdb") # pragma: no cover
import pdb # pragma: no cover pylint: disable=wrong-import-position,wrong-import-order
pdb.set_trace() # pragma: no cover
if sys.argv.count("-o") != 0: if sys.argv.count("-o") != 0:
idx = sys.argv.index("-o") idx = sys.argv.index("-o")
try: try:
......
...@@ -11,7 +11,7 @@ from setuptools import setup, find_packages ...@@ -11,7 +11,7 @@ from setuptools import setup, find_packages
setup( setup(
name='dmt', name='dmt',
version="2.1.31", version="2.1.33",
packages=find_packages(), packages=find_packages(),
author='Thanassis Tsiodras', author='Thanassis Tsiodras',
author_email='Thanassis.Tsiodras@esa.int', author_email='Thanassis.Tsiodras@esa.int',
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment