Commit 33891182 authored by Maxime Perrotin's avatar Maxime Perrotin

Merge branch 'master' of https://gitrepos.estec.esa.int/taste/dmt into MORA

parents 1cb51c5b f516eaf4
......@@ -13,4 +13,4 @@ jobs:
paths:
- ~/docker
- run:
command: docker run -it -v $(pwd):/root/tests dmt bash -c 'cd /root/tests ; LANG=C LC_ALL=C PATH=$PATH:/asn1scc make'
command: docker run -it -v $(pwd):/root/tests dmt bash -c 'cd /root/tests ; pip3 uninstall dmt ; pip3 install --upgrade . ; LANG=C LC_ALL=C PATH=$PATH:/asn1scc make'
......@@ -5,7 +5,7 @@ RUN cat /etc/apt/apt.conf.d/99badproxies
RUN apt-get update
RUN apt-get install -y libxslt-dev libxml2-dev python-psycopg2 mono-runtime libmono-system-data4.0-cil libmono-system-web4.0-cil libfsharp-core4.3-cil
RUN apt-get install -y wget python3-pip
RUN wget -O - -q https://github.com/ttsiodras/asn1scc/releases/download/4.1e/asn1scc-bin-4.1e.tar.bz2 | tar jxvf -
RUN wget -O - -q https://github.com/ttsiodras/asn1scc/releases/download/4.2.0.1.f/asn1scc-bin-4.2.0.1f.tar.bz2 | tar jxvf -
RUN apt-get install -y python-pip
RUN wget -O - -q https://github.com/ttsiodras/DataModellingTools/files/335591/antlr-2.7.7.tar.gz | tar zxvf - ; cd antlr-2.7.7/lib/python ; pip2 install .
RUN pip2 install SQLAlchemy psycopg2
......
......@@ -222,7 +222,7 @@ def setSharedLib(dll=None):
'''.format(fvName=FVname, tcName=CleanSP))
g_PyDataModel.write('\ntc["{tcName}"] = '.format(
tcName=CleanSP.lower()))
tcName=CleanSP.lower()))
buttons = ([["sendButton", "Send TC"], ["loadButton", "Load TC"],
["saveButton", "Save TC"]])
classType = "asn1Editor"
......
......@@ -80,6 +80,7 @@ but with an extra call to OnFinal at the end.
import os
import sys
import hashlib
from distutils import spawn
from typing import cast, Optional, Dict, List, Tuple, Set, Any # NOQA pylint: disable=unused-import
......@@ -145,17 +146,48 @@ def ParseAADLfilesAndResolveSignals() -> None:
'''Invokes the ANTLR generated AADL parser, and resolves
all references to AAADL Data types into the param._signal member
of each SUBPROGRAM param.'''
import tempfile
f = tempfile.NamedTemporaryFile(delete=False)
astFile = f.name
f.close()
os.unlink(astFile)
parserUtility = os.path.join(os.path.abspath(os.path.dirname(__file__)), "parse_aadl.py")
cmd = "python2 " + parserUtility + " -o " + astFile + ' ' + ' '.join(sys.argv[1:])
if os.system(cmd) != 0:
if os.path.exists(astFile):
os.unlink(astFile)
panic("AADL parsing failed. Aborting...")
projectCache = os.getenv("PROJECT_CACHE")
if projectCache is not None:
if not os.path.isdir(projectCache):
panic("The configured cache folder:\n\n\t" + projectCache +
"\n\n...is not there!\n")
cachedModelExists = False
aadlASTcache = None
if projectCache is not None:
filehash = hashlib.md5()
for each in sorted(sys.argv[1:]):
filehash.update(open(each).read().encode('utf-8'))
newHash = filehash.hexdigest()
# set the name of the Pickle files containing the dumped AST
aadlASTcache = projectCache + os.sep + newHash + "_aadl_ast.pickle"
if not os.path.exists(aadlASTcache):
print("[DMT] No cached AADL model found for",
",".join(sys.argv[1:]))
else:
cachedModelExists = True
print("[DMT] Reusing cached AADL model for",
",".join(sys.argv[1:]))
import pickle
if cachedModelExists:
astInfo = pickle.load(open(aadlASTcache, 'rb'), fix_imports=False)
else:
import tempfile
f = tempfile.NamedTemporaryFile(delete=False)
astFile = f.name
f.close()
os.unlink(astFile)
parserUtility = os.path.join(
os.path.abspath(os.path.dirname(__file__)), "parse_aadl.py")
cmd = "python2 " + parserUtility + " -o " + astFile + ' ' + \
' '.join(sys.argv[1:])
if os.system(cmd) != 0:
if os.path.exists(astFile):
os.unlink(astFile)
panic("AADL parsing failed. Aborting...")
astInfo = pickle.load(open(astFile, 'rb'), fix_imports=False)
if aadlASTcache:
pickle.dump(astInfo, open(aadlASTcache, 'wb'), fix_imports=False)
def FixMetaClasses(sp: ApLevelContainer) -> None:
def patchMe(o: Any) -> None:
......@@ -179,8 +211,6 @@ of each SUBPROGRAM param.'''
for cn in sp._connections:
patchMe(cn)
try:
import pickle
astInfo = pickle.load(open(astFile, 'rb'), fix_imports=False)
for k in ['g_processImplementations', 'g_apLevelContainers',
'g_signals', 'g_systems', 'g_subProgramImplementations',
'g_threadImplementations']:
......@@ -452,6 +482,24 @@ def main() -> None:
import pdb # pragma: no cover pylint: disable=wrong-import-position,wrong-import-order
pdb.set_trace() # pragma: no cover
if "-profile" in sys.argv:
sys.argv.remove("-profile")
import cProfile
import pstats
import io
pr = cProfile.Profile()
pr.enable()
import atexit
def dumpSpeedData() -> None:
pr.disable()
s = io.StringIO()
sortby = 'cumulative'
ps = pstats.Stats(pr, stream=s).sort_stats(sortby)
ps.print_stats()
print(s.getvalue())
atexit.register(dumpSpeedData)
if "-v" in sys.argv:
import pkg_resources # pragma: no cover
version = pkg_resources.require("dmt")[0].version # pragma: no cover
......
......@@ -121,7 +121,7 @@ def calculateForNativeAndASN1SCC(absASN1SCCpath, autosrc, names, inputFiles):
configMT.outputDir = autosrc + os.sep
# dumpable.CreateDumpableCtypes(uniqueASNfiles)
for asnTypename in list(names.keys()):
for asnTypename in sorted(list(names.keys())):
node = names[asnTypename]
if node._isArtificial:
continue
......@@ -135,7 +135,7 @@ def calculateForNativeAndASN1SCC(absASN1SCCpath, autosrc, names, inputFiles):
# Code generation - asn1c part
# Create a dictionary to lookup the asn-types from their corresponding c-type
namesDict = {}
for asnTypename in list(names.keys()):
for asnTypename in sorted(list(names.keys())):
node = names[asnTypename]
if node._isArtificial:
continue
......@@ -407,7 +407,7 @@ properties
Data_Model::Data_Representation => Character;
end Stream_Element_Buffer;
''')
for asnTypename in list(asnParser.g_names.keys()):
for asnTypename in sorted(list(asnParser.g_names.keys())):
node = asnParser.g_names[asnTypename]
if node._isArtificial:
continue
......@@ -502,7 +502,7 @@ end Stream_Element_Buffer;
o.write('END Taste_DataView;\n\n')
o.write('SYSTEM IMPLEMENTATION Taste_DataView.others\n')
o.write('SUBCOMPONENTS\n')
for asnTypename in list(asnParser.g_names.keys()):
for asnTypename in sorted(list(asnParser.g_names.keys())):
node = asnParser.g_names[asnTypename]
if node._isArtificial:
continue
......@@ -511,11 +511,11 @@ end Stream_Element_Buffer;
o.write('END Taste_DataView.others;\n')
listOfAsn1Files = {}
for asnTypename in list(asnParser.g_names.keys()):
for asnTypename in sorted(list(asnParser.g_names.keys())):
listOfAsn1Files[asnParser.g_names[asnTypename]._asnFilename] = 1
if bAADLv2:
for asnFilename in list(listOfAsn1Files.keys()):
for asnFilename in sorted(list(listOfAsn1Files.keys())):
base = os.path.splitext(os.path.basename(asnFilename))[0]
possibleACN = ASNtoACN(asnFilename)
if os.path.exists(possibleACN):
......
......@@ -408,12 +408,12 @@ def ParseAsnFileList(listOfFilenames: List[str]) -> None: # pylint: disable=inv
xmlAST2 = projectCache + os.sep + newHash + "_ast_v1.xml"
if not os.path.exists(xmlAST) or not os.path.exists(xmlAST2):
someFilesHaveChanged = True
print("[DMT] ASN.1 model changed, re-processing...")
print("[DMT] No cached model found for", ",".join(listOfFilenames))
else:
# no projectCache set, so xmlAST and xmlAST2 are set to None
someFilesHaveChanged = True
if not someFilesHaveChanged:
print("[DMT] No change in ASN.1 model.")
print("[DMT] Reusing cached ASN.1 AST for ", ",".join(listOfFilenames))
if not xmlAST:
(dummy, xmlAST) = tempfile.mkstemp()
......
......@@ -152,6 +152,11 @@ class Printer(RecursiveMapper):
def main():
if "-pdb" in sys.argv:
sys.argv.remove("-pdb") # pragma: no cover
import pdb # pragma: no cover pylint: disable=wrong-import-position,wrong-import-order
pdb.set_trace() # pragma: no cover
if sys.argv.count("-o") != 0:
idx = sys.argv.index("-o")
try:
......
......@@ -11,7 +11,7 @@ from setuptools import setup, find_packages
setup(
name='dmt',
version="2.1.31",
version="2.1.33",
packages=find_packages(),
author='Thanassis Tsiodras',
author_email='Thanassis.Tsiodras@esa.int',
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment