Commit 42bf1a19 authored by Thanassis Tsiodras's avatar Thanassis Tsiodras

flake8 pass completed. Next-up: pylint

parent 91120d54
......@@ -38,7 +38,7 @@ def Version():
def OnStartup(unused_modelingLanguage, asnFiles, outputDir, unused_badTypes):
#print "Use ASN1SCC to generate the structures for '%s'" % asnFile
# print "Use ASN1SCC to generate the structures for '%s'" % asnFile
asn1SccPath = spawn.find_executable('asn1.exe')
if not asn1SccPath:
panic("ASN1SCC seems to be missing from your system (asn1.exe not found in PATH).\n") # pragma: no cover
......
......@@ -37,7 +37,7 @@ def Version():
# the second param is not asnFile, it is asnFiles
def OnStartup(unused_modelingLanguage, asnFiles, outputDir, unused_badTypes):
#print "Use ASN1SCC to generate the structures for '%s'" % asnFile
# print "Use ASN1SCC to generate the structures for '%s'" % asnFile
asn1SccPath = spawn.find_executable('asn1.exe')
if not asn1SccPath:
panic("ASN1SCC seems to be missing from your system (asn1.exe not found in PATH).\n") # pragma: no cover
......
......@@ -23,7 +23,7 @@ for line in open(sys.argv[1] + '.h', 'r'):
enums.append(line.strip().replace(",", "").split("="))
enums_dump = "\n ".join(
'printf("%s = %%d\\n", %s);' % (e,e)
'printf("%s = %%d\\n", %s);' % (e, e)
for e in choices
)
......@@ -32,7 +32,7 @@ enums_dump += "\n ".join(
for name, val in enums
)
uniq = os.getpid()
extractor_filename ="/tmp/enums_%d" % uniq
extractor_filename = "/tmp/enums_%d" % uniq
f = open(extractor_filename + ".c", 'w')
f.write("""
#include <stdio.h>
......@@ -41,7 +41,7 @@ f.write("""
void main()
{
%(enums_dump)s
}""" % {"enums_dump":enums_dump, "base":sys.argv[1]})
}""" % {"enums_dump": enums_dump, "base": sys.argv[1]})
f.close()
if 0 != os.system(
"gcc -o %s -I. %s.c" % (extractor_filename, extractor_filename)):
......
......@@ -38,7 +38,7 @@ from typing import Tuple
import commonPy.configMT
from commonPy.asnAST import sourceSequenceLimit, AsnNode # NOQA pylint: disable=unused-import
from commonPy.asnParser import (
from commonPy.asnParser import ( # NOQA
AST_Lookup, AST_TypesOfFile, AST_TypenamesOfFile, AST_Leaftypes,
Typename, Filename, ParseAsnFileList)
from commonPy.utility import inform, panic
......
......@@ -28,7 +28,7 @@ from typing import Tuple
import commonPy.configMT
from commonPy.asnAST import sourceSequenceLimit, AsnNode # NOQA pylint: disable=unused-import
from commonPy.asnParser import (
from commonPy.asnParser import ( # NOQA
AST_Lookup, AST_TypesOfFile, AST_TypenamesOfFile, AST_Leaftypes,
Typename, Filename, ParseAsnFileList)
from commonPy.utility import inform, panic
......@@ -259,7 +259,7 @@ def main():
C_SourceFile.write('#endif\n')
C_SourceFile.write(' //printf("%%s %s ::= ", paramName);\n' % nodeTypename)
C_SourceFile.write(' printf("%s ", paramName);\n')
#C_SourceFile.write('\n'.join(printer.Map('(*pData)', '', node, leafTypeDict, commonPy.asnParser.g_names)))
# C_SourceFile.write('\n'.join(printer.Map('(*pData)', '', node, leafTypeDict, commonPy.asnParser.g_names)))
lines = [" "+x for x in printer.Map('(*pData)', '', node, leafTypeDict, commonPy.asnParser.g_names)]
C_SourceFile.write("\n".join(lines))
C_SourceFile.write('\n#ifdef __linux__\n')
......
......@@ -21,7 +21,7 @@
# This is an evolving test implementation of mapping ASN.1 constructs
# to SDL. It is used by the backend of Semantix's code generator A.
#import re
# import re
g_outputDir = ""
g_asnFile = ""
......@@ -67,25 +67,25 @@ def OnChoice(nodeTypename, node, leafTypeDict):
# obsolete, now the grammar is re-created from the AST (PrintGrammarFromAST)
#
#def ClearUp(text):
# outputText = ""
# lParen = 0
# for c in text:
# if c == '(':
# lParen += 1
# if c == ')':
# lParen -= 1
# if 0 == lParen:
# outputText += c.replace('-', '_')
# else:
# outputText += c
# return outputText
# def ClearUp(text):
# outputText = ""
# lParen = 0
# for c in text:
# if c == '(':
# lParen += 1
# if c == ')':
# lParen -= 1
# if 0 == lParen:
# outputText += c.replace('-', '_')
# else:
# outputText += c
# return outputText
def OnShutdown(unused_badTypes):
# text = open(g_asnFile, 'r').read()
# text = re.sub(r'^.*BEGIN', 'Datamodel DEFINITIONS ::= BEGIN', text)
# text = re.sub(r'--.*', '', text)
# text = open(g_asnFile, 'r').read()
# text = re.sub(r'^.*BEGIN', 'Datamodel DEFINITIONS ::= BEGIN', text)
# text = re.sub(r'--.*', '', text)
outputFile = open(g_outputDir + "DataView.pr", 'w')
outputFile.write('Datamodel DEFINITIONS ::= BEGIN\n\n')
import commonPy.asnParser
......
......@@ -41,8 +41,8 @@ g_bHasStartupRunOnce = False
def Version():
print("Code generator: " + \
"$Id: python_A_mapper.py 2400 2012-09-04 10:40:19Z ttsiodras $") # pragma: no cover
print("Code generator: " +
"$Id: python_A_mapper.py 2400 2012-09-04 10:40:19Z ttsiodras $") # pragma: no cover
def CleanNameAsPythonWants(name: str) -> str:
......@@ -140,7 +140,7 @@ def OnStartup(unused_modelingLanguage: str, asnFile: str, outputDir: str, badTyp
# mono_exe = "mono " if sys.argv[0].endswith('.py') and sys.platform.startswith('linux') else ""
mono_exe = ""
Makefile.write(
'''\
'''\
ASN1SCC:=asn1.exe
ASN2DATAMODEL:=asn2dataModel
GRAMMAR := %(origGrammarBase)s
......
......@@ -58,7 +58,7 @@ def CleanNameAsSimulinkWants(name):
# the second param is not asnFile, it is asnFiles
def OnStartup(unused_modelingLanguage, asnFiles, outputDir, unused_badTypes):
#print "Use ASN1SCC to generate the structures for '%s'" % asnFile
# print "Use ASN1SCC to generate the structures for '%s'" % asnFile
asn1SccPath = spawn.find_executable('asn1.exe')
if not asn1SccPath:
panic("ASN1SCC seems to be missing from your system (asn1.exe not found in PATH).\n") # pragma: no cover
......@@ -122,15 +122,15 @@ def OnShutdown(unused_badTypes):
def MapInteger(node):
if node._range[0]>=0 and node._range[1]<=255:
if node._range[0] >= 0 and node._range[1] <= 255:
return "uint8"
elif node._range[0]>=-128 and node._range[1]<=127:
elif node._range[0] >= -128 and node._range[1] <= 127:
return "int8"
elif node._range[0]>=0 and node._range[1]<=65535:
elif node._range[0] >= 0 and node._range[1] <= 65535:
return "uint16"
elif node._range[0]>=-32768 and node._range[1]<=32767:
elif node._range[0] >= -32768 and node._range[1] <= 32767:
return "int16"
elif node._range[0]>=0:
elif node._range[0] >= 0:
return "uint32"
else:
return "int32"
......@@ -172,7 +172,7 @@ def DeclareSimpleCollection(node, name, internal):
g_outputFile.write("%s_member_data.dimensions=%d;\n\n" % (name, node._range[-1]))
bNeedLength = False
if len(node._range)>1 and node._range[0]!=node._range[1]:
if len(node._range) > 1 and node._range[0] != node._range[1]:
bNeedLength = True
if bNeedLength:
......@@ -194,7 +194,7 @@ def DeclareSimpleCollection(node, name, internal):
def CreateDeclarationForType(nodeTypename, names, leafTypeDict):
if nodeTypename in g_definedTypes:
return
g_definedTypes[nodeTypename]=1
g_definedTypes[nodeTypename] = 1
results = []
ScanChildren(nodeTypename, names[nodeTypename], names, results, isRoot=True, createInnerNodesInNames=True)
inform("Prerequisites of %s", nodeTypename)
......@@ -271,7 +271,7 @@ def CreateDeclarationForType(nodeTypename, names, leafTypeDict):
# so we will use the containedType_t for reference.
elif isinstance(childNode, (AsnSequence, AsnSequenceOf, AsnSet, AsnSetOf, AsnChoice)):
#mappedType = CleanNameAsSimulinkWants(child[1]._containedType + "_t") XYZ
# mappedType = CleanNameAsSimulinkWants(child[1]._containedType + "_t") XYZ
mappedType = CleanNameAsSimulinkWants(child[1]._containedType)
else: # pragma: no cover
panic("QGenAda_A_mapper: Unexpected category of child (%s)" % str(child[1])) # pragma: no cover
......@@ -281,11 +281,11 @@ def CreateDeclarationForType(nodeTypename, names, leafTypeDict):
g_outputFile.write("%s = Simulink.Bus;\n" % CleanNameAsSimulinkWants(nodeTypename))
g_outputFile.write("%s.Elements = " % CleanNameAsSimulinkWants(nodeTypename))
if elemNo>1:
if elemNo > 1:
g_outputFile.write('[')
for i in range(0, elemNo):
g_outputFile.write("%s_elem%02d " % (CleanNameAsSimulinkWants(nodeTypename), i+1))
if elemNo>1:
if elemNo > 1:
g_outputFile.write(']')
g_outputFile.write(";\n\n")
elif isinstance(node, AsnSequenceOf) or isinstance(node, AsnSetOf):
......
......@@ -101,15 +101,15 @@ def OnShutdown(unused_badTypes):
def MapInteger(node):
if node._range[0]>=0 and node._range[1]<=255:
if node._range[0] >= 0 and node._range[1] <= 255:
return "uint8"
elif node._range[0]>=-128 and node._range[1]<=127:
elif node._range[0] >= -128 and node._range[1] <= 127:
return "int8"
elif node._range[0]>=0 and node._range[1]<=65535:
elif node._range[0] >= 0 and node._range[1] <= 65535:
return "uint16"
elif node._range[0]>=-32768 and node._range[1]<=32767:
elif node._range[0] >= -32768 and node._range[1] <= 32767:
return "int16"
elif node._range[0]>=0:
elif node._range[0] >= 0:
return "uint32"
else:
return "int32"
......@@ -151,7 +151,7 @@ def DeclareSimpleCollection(node, name, internal):
g_outputFile.write("%s_member_data.dimensions=%d;\n\n" % (name, node._range[-1]))
bNeedLength = False
if len(node._range)>1 and node._range[0]!=node._range[1]:
if len(node._range) > 1 and node._range[0] != node._range[1]:
bNeedLength = True
if bNeedLength:
......@@ -173,7 +173,7 @@ def DeclareSimpleCollection(node, name, internal):
def CreateDeclarationForType(nodeTypename, names, leafTypeDict):
if nodeTypename in g_definedTypes:
return
g_definedTypes[nodeTypename]=1
g_definedTypes[nodeTypename] = 1
results = []
ScanChildren(nodeTypename, names[nodeTypename], names, results, isRoot=True, createInnerNodesInNames=True)
inform("Prerequisites of %s", nodeTypename)
......@@ -250,7 +250,7 @@ def CreateDeclarationForType(nodeTypename, names, leafTypeDict):
# so we will use the containedType_t for reference.
elif isinstance(childNode, (AsnSequence, AsnSequenceOf, AsnSet, AsnSetOf, AsnChoice)):
#mappedType = CleanNameAsSimulinkWants(child[1]._containedType + "_t") XYZ
# mappedType = CleanNameAsSimulinkWants(child[1]._containedType + "_t") XYZ
mappedType = CleanNameAsSimulinkWants(child[1]._containedType)
else: # pragma: no cover
panic("QGenC_A_mapper: Unexpected category of child (%s)" % str(child[1])) # pragma: no cover
......@@ -260,11 +260,11 @@ def CreateDeclarationForType(nodeTypename, names, leafTypeDict):
g_outputFile.write("%s = Simulink.Bus;\n" % CleanNameAsSimulinkWants(nodeTypename))
g_outputFile.write("%s.Elements = " % CleanNameAsSimulinkWants(nodeTypename))
if elemNo>1:
if elemNo > 1:
g_outputFile.write('[')
for i in range(0, elemNo):
g_outputFile.write("%s_elem%02d " % (CleanNameAsSimulinkWants(nodeTypename), i+1))
if elemNo>1:
if elemNo > 1:
g_outputFile.write(']')
g_outputFile.write(";\n\n")
elif isinstance(node, AsnSequenceOf) or isinstance(node, AsnSetOf):
......
......@@ -18,7 +18,7 @@
# Note that in both cases, there are no charges (royalties) for the
# generated code.
#
__doc__ = '''Implementation of mapping ASN.1 constructs
'''Implementation of mapping ASN.1 constructs
to RTDS. It is used by the backend of Semantix's code generator A.'''
import re
......@@ -68,31 +68,30 @@ def OnChoice(nodeTypename, node, leafTypeDict):
# obsolete, now the grammar is re-created from the AST (PrintGrammarFromAST)
#
#def ClearUp(text):
# outputText = ""
# lParen = 0
# for c in text:
# if c == '(':
# lParen += 1
# if c == ')':
# lParen -= 1
# if 0 == lParen:
# outputText += c.replace('-', '_')
# else:
# outputText += c
# return outputText
# def ClearUp(text):
# outputText = ""
# lParen = 0
# for c in text:
# if c == '(':
# lParen += 1
# if c == ')':
# lParen -= 1
# if 0 == lParen:
# outputText += c.replace('-', '_')
# else:
# outputText += c
# return outputText
def OnShutdown(unused_badTypes):
# text = open(g_asnFile, 'r').read()
# text = re.sub(r'^.*BEGIN', 'Datamodel DEFINITIONS ::= BEGIN', text)
# text = re.sub(r'--.*', '', text)
# outputFile = open(g_outputDir + "DataView.pr", 'w')
# outputFile.write('Datamodel DEFINITIONS ::= BEGIN\n\n')
# import commonPy.xmlASTtoAsnAST
# commonPy.xmlASTtoAsnAST.PrintGrammarFromAST(outputFile)
# outputFile.write('END\n')
# outputFile.close()
# text = open(g_asnFile, 'r').read()
# text = re.sub(r'^.*BEGIN', 'Datamodel DEFINITIONS ::= BEGIN', text)
# text = re.sub(r'--.*', '', text)
# outputFile = open(g_outputDir + "DataView.pr", 'w')
# outputFile.write('Datamodel DEFINITIONS ::= BEGIN\n\n')
# import commonPy.xmlASTtoAsnAST
# commonPy.xmlASTtoAsnAST.PrintGrammarFromAST(outputFile)
# outputFile.write('END\n')
# outputFile.close()
outputFile = open(g_outputDir + "RTDSdataView.asn", 'w')
outputFile.write(re.sub(r'^.*BEGIN', 'RTDSdataView DEFINITIONS ::= BEGIN', open(g_asnFile, 'r').read()))
......
......@@ -26,9 +26,9 @@ using .xscade files.
import re
import os
import random
from xml.dom.minidom import Document, Node
from typing import Set
from xml.dom.minidom import Document, Node # NOQA
from typing import Set # NOQA
from commonPy.utility import inform, panic
from commonPy.asnAST import (
......
......@@ -97,15 +97,15 @@ def OnShutdown(unused_badTypes):
def MapInteger(node):
if node._range[0]>=0 and node._range[1]<=255:
if node._range[0] >= 0 and node._range[1] <= 255:
return "uint8"
elif node._range[0]>=-128 and node._range[1]<=127:
elif node._range[0] >= -128 and node._range[1] <= 127:
return "int8"
elif node._range[0]>=0 and node._range[1]<=65535:
elif node._range[0] >= 0 and node._range[1] <= 65535:
return "uint16"
elif node._range[0]>=-32768 and node._range[1]<=32767:
elif node._range[0] >= -32768 and node._range[1] <= 32767:
return "int16"
elif node._range[0]>=0:
elif node._range[0] >= 0:
return "uint32"
else:
return "int32"
......@@ -147,7 +147,7 @@ def DeclareSimpleCollection(node, name, internal):
g_outputFile.write("%s_member_data.dimensions=%d;\n\n" % (name, node._range[-1]))
bNeedLength = False
if len(node._range)>1 and node._range[0]!=node._range[1]:
if len(node._range) > 1 and node._range[0] != node._range[1]:
bNeedLength = True
if bNeedLength:
......@@ -169,7 +169,7 @@ def DeclareSimpleCollection(node, name, internal):
def CreateDeclarationForType(nodeTypename, names, leafTypeDict):
if nodeTypename in g_definedTypes:
return
g_definedTypes[nodeTypename]=1
g_definedTypes[nodeTypename] = 1
results = []
ScanChildren(nodeTypename, names[nodeTypename], names, results, isRoot=True, createInnerNodesInNames=True)
inform("Prerequisites of %s", nodeTypename)
......@@ -248,7 +248,7 @@ def CreateDeclarationForType(nodeTypename, names, leafTypeDict):
# so we will use the containedType_t for reference.
elif isinstance(childNode, (AsnSequence, AsnSequenceOf, AsnSet, AsnSetOf, AsnChoice)):
#mappedType = CleanNameAsSimulinkWants(child[1]._containedType + "_t") XYZ
# mappedType = CleanNameAsSimulinkWants(child[1]._containedType + "_t") XYZ
mappedType = CleanNameAsSimulinkWants(child[1]._containedType)
else: # pragma: no cover
panic("Simulink_A_mapper: Unexpected category of child (%s)" % str(child[1])) # pragma: no cover
......@@ -258,11 +258,11 @@ def CreateDeclarationForType(nodeTypename, names, leafTypeDict):
g_outputFile.write("%s = Simulink.Bus;\n" % CleanNameAsSimulinkWants(nodeTypename))
g_outputFile.write("%s.Elements = " % CleanNameAsSimulinkWants(nodeTypename))
if elemNo>1:
if elemNo > 1:
g_outputFile.write('[')
for i in range(0, elemNo):
g_outputFile.write("%s_elem%02d " % (CleanNameAsSimulinkWants(nodeTypename), i+1))
if elemNo>1:
if elemNo > 1:
g_outputFile.write(']')
g_outputFile.write(";\n\n")
elif isinstance(node, AsnSequenceOf) or isinstance(node, AsnSetOf):
......
......@@ -220,7 +220,7 @@ def CreateEnumerated(nodeTypename, node, unused_leafTypeDict):
uido = getUID(nodeTypename + "_option_" + opt[0])
g_catalogueXML.write(' <Literal Name="%s" Value="%s" Id="ID_%s" />\n' %
(opt[0], opt[1], uido))
#g_catalogueXML.write(' <NativeType xlink:href="http://www.esa.int/2005/10/Smp#%s" />\n' % nativeSMP2type)
# g_catalogueXML.write(' <NativeType xlink:href="http://www.esa.int/2005/10/Smp#%s" />\n' % nativeSMP2type)
g_catalogueXML.write(' </Type>\n')
......
......@@ -203,7 +203,7 @@ def CreateSequence(nodeTypename, node, unused_leafTypeDict, isChoice=False):
cleanTypename = CleanName(nodeTypename)
g_sqlOutput.write(
'\nCREATE TABLE {cleanTypename} (\n id int NOT NULL,\n'.format(
cleanTypename=cleanTypename))
cleanTypename=cleanTypename))
if isChoice:
g_sqlOutput.write(' indexOfActualFieldUsed int NOT NULL,\n')
nullable = "" if isChoice else " NOT NULL"
......@@ -224,8 +224,8 @@ def CreateSequence(nodeTypename, node, unused_leafTypeDict, isChoice=False):
',\n CONSTRAINT {cleanFieldname}_fk '
'FOREIGN KEY ({cleanFieldname}_id)\n'
'\tREFERENCES {cleanTypename}(id)'.format(
cleanFieldname=cleanFieldname,
cleanTypename=containedTypename))
cleanFieldname=cleanFieldname,
cleanTypename=containedTypename))
g_sqlOutput.write(');\n\n')
......
......@@ -201,10 +201,9 @@ class {cleanTypename}_SQL(Base):
__table_args__ = (UniqueConstraint('iid'),)
iid = Column(Integer, primary_key=True)
data = Column({baseSqlType}{constraint})
'''.format(
cleanTypename=cleanTypename,
baseSqlType=baseSqlType,
constraint=constraint))
'''.format(cleanTypename=cleanTypename,
baseSqlType=baseSqlType,
constraint=constraint))
getter = "Get"
setter = "Set"
......@@ -316,15 +315,14 @@ class {cleanTypename}_SQL(Base):
'\n fk_%s_iid = Column(Integer, ' % cleanFieldname)
g_sqlalchemyOutput.write(
"ForeignKey('{containedTypename}.iid'), nullable={nl})".format(
nl=nullable,
containedTypename=containedTypename))
nl=nullable,
containedTypename=containedTypename))
g_sqlalchemyOutput.write(
'''
{relation} = relationship(
"{containedTypename}_SQL",
foreign_keys=[fk_{relation}_iid])'''.format(
relation=cleanFieldname,
containedTypename=containedTypename))
foreign_keys=[fk_{relation}_iid])'''.format(relation=cleanFieldname,
containedTypename=containedTypename))
g_sqlalchemyOutput.write('\n\n def __init__(self, pyObj):\n')
g_sqlalchemyOutput.write(' state = pyObj.GetState()\n')
if isChoice:
......@@ -598,8 +596,8 @@ import DV
if t in g_dependencyGraph and \
nodeTypename in g_dependencyGraph[t]:
del g_dependencyGraph[t][nodeTypename]
#g_sqlalchemyOutput.write('if __name__ == "__main__":\n')
#g_sqlalchemyOutput.write(' Base.metadata.create_all(engine)\n')
# g_sqlalchemyOutput.write('if __name__ == "__main__":\n')
# g_sqlalchemyOutput.write(' Base.metadata.create_all(engine)\n')
g_sqlalchemyOutput.close()
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
PY_SRC:=$(wildcard asn2dataModel.py aadl2glueC.py smp2asn.py *mappers/[a-zA-Z]*py commonPy/[a-zA-Z]*py)
all: flake8 pylint mypy
flake8:
flake8 ${PY_SRC} || exit 1
pylint:
pylint ${PY_SRC} || exit 1
mypy:
mypy --check-untyped-defs ${PY_SRC} || exit 1
.PHONY: flake8 pylint mypy
......@@ -46,8 +46,7 @@ from commonPy.utility import inform, panic
import commonPy.cleanupNodes
import commonPy.verify as verify
from functools import reduce
import A_mappers
import A_mappers # NOQA
def usage(argsToTools):
'''Print usage instructions.'''
......
......@@ -37,7 +37,6 @@
#
# Charge for Runtimes None None
#
#g_nodes = []
g_signals = {}
g_apLevelContainers = {}
......@@ -83,8 +82,8 @@ class AadlEventPort:
def __repr__(self):
result = "AadlEventPort("+self._direction+","
if self._sp:
result+=self._sp
result+=")"
result += self._sp
result += ")"
return result
......
......@@ -75,7 +75,7 @@
# | AsnMetaMember |
# +-----------------------------+
from typing import Union, Dict
from typing import Union, Dict # NOQA
from . import utility
......@@ -115,17 +115,15 @@ class AsnComplexNode(AsnNode):
def CommonIdenticalRangePerSMP2(range1, range2):
'''Helper for SMP2 comparisons of types with ranges.'''
def collapseSpan(r):
if len(r)==2 and r[0]==r[1]:
if len(r) == 2 and r[0] == r[1]:
return [r[0]]
return r
mySpan = collapseSpan(range1)
otherSpan = collapseSpan(range2)
return (
(mySpan==[] and otherSpan==[])
or
(len(mySpan)==1 and len(otherSpan)==1 and mySpan[0]==otherSpan[0])
or
(len(mySpan)==2 and len(otherSpan)==2 and mySpan[-1]==otherSpan[-1]))
(mySpan == [] and otherSpan == []) or
(len(mySpan) == 1 and len(otherSpan) == 1 and mySpan[0] == otherSpan[0]) or
(len(mySpan) == 2 and len(otherSpan) == 2 and mySpan[-1] == otherSpan[-1]))
class AsnBool(AsnBasicNode):
......@@ -288,7 +286,7 @@ Members:
def AsASN1(self, _={}):
ret = 'OCTET STRING'
if self._range:
if len(self._range)>1 and self._range[0] != self._range[1]:
if len(self._range) > 1 and self._range[0] != self._range[1]:
ret += ' (SIZE (' + str(self._range[0]) + ' .. ' + str(self._range[1]) + '))'
else:
ret += ' (SIZE (' + str(self._range[0]) + '))'
......@@ -303,13 +301,12 @@ class AsnOctetString(AsnString):
self._name = "OCTET STRING" # default in case of SEQUENCE_OF OCTET STRING
self._leafType = "OCTET STRING"
#class AsnBitString(AsnString):
# '''This class stores the semantic content of an ASN.1 BIT STRING.'''
# def __init__(self, **args):
# apply(AsnString.__init__, (self,), args)
# self._name = "BIT STRING" # default in case of SEQUENCE_OF BIT STRING
# self._leafType = "BIT STRING"
# class AsnBitString(AsnString):
# '''This class stores the semantic content of an ASN.1 BIT STRING.'''
# def __init__(self, **args):
# apply(AsnString.__init__, (self,), args)
# self._name = "BIT STRING" # default in case of SEQUENCE_OF BIT STRING
# self._leafType = "BIT STRING"
class AsnUTF8String(AsnString):
......@@ -392,8 +389,9 @@ Members:
for elem in self._members:
if elem[0] in existing:
utility.panic(
"member '%s' appears more than once in ENUMERATED %s" % (elem[0], # pragma: no cover
((self._lineno is not None) and ("defined in line %s" % self._lineno) or ("")))) # pragma: no cover
"member '%s' appears more than once in ENUMERATED %s" % ( # pragma: no cover
elem[0],
((self._lineno is not None) and ("defined in line %s" % self._lineno) or ("")))) # pragma: no cover
else:
existing[elem[0]] = 1
......@@ -409,7 +407,7 @@ Members:
return isinstance(other, AsnEnumerated) and sorted(self._members) == sorted(other._members)
def AsASN1(self, _={}):
ret=[]
ret = []
for m in self._members:
ret.append(m[0] + '(' + m[1] + ')')
return 'ENUMERATED {' + ", ".join(ret) + "}"
......@@ -432,7 +430,7 @@ def CommonIdenticalCheck(me, other, mynames, othernames):
def CommonAsASN1(kind, node, typeDict):
ret=[]
ret = []
for m in node._members:
child = m[1]
if isinstance(child, AsnMetaMember):
......@@ -468,10 +466,11 @@ Members:
for elem in self._members:
if elem[0] in existing:
utility.panic(
"member '%s' appears more than once in %s" % (elem[0], # pragma: no cover
((self._lineno is not None) and ("defined in line %s" % self._lineno) or ("")))) # pragma: no cover
"member '%s' appears more than once in %s" % ( # pragma: no cover