OpenApi.py: multiple file at once

Signed-off-by: Gabor Szalai <gabor.szalai@ericsson.com>
diff --git a/tools/OpenApi.py b/tools/OpenApi.py
index 4e53717..47deb81 100755
--- a/tools/OpenApi.py
+++ b/tools/OpenApi.py
@@ -4,6 +4,24 @@
 import pprint
 import re
 
+# Data structure used during conversion
+# modules - map of the module_name:module_data
+# module_data - map of the detailed data of one module:
+#     * import: list of the impoted modules
+#     * functions: list of the encoder functions to be generated
+#     * type_tree: list of the type definitions
+#     * extra_lists: list of the extra record of's and encoder functions
+
+# Global variable, they always the reference to the actually processed module.
+# modules
+# module_data
+# type_tree
+# module_name
+# ident_level
+# ident_c
+# saved_stdout
+
+
 # Reserved word list. Used to sanitaze the type and field names.
 ttcn_keywords=[ # Reserved words
                "action","activate","address","alive","all","alt","altstep","and","and4b","any","anytype",
@@ -128,28 +146,29 @@
 # If the generation of encoder function or array definiton is missed these functions
 # should be updated to find the missing references.
 #
-def process_path(data,module_data):
+def process_path(data):
   for m in data:
     #print("process_path ", m, " " , data[m])
     if m == "parameters":
       for p in data[m]:
-        process_used_schem_name(p,module_data)
+        process_used_schem_name(p)
     else:
       if "parameters"  in data[m]:
         for p in data[m]["parameters"]:
-          process_used_schem_name(p,module_data)
+          process_used_schem_name(p)
       
       if "requestBody" in data[m]:
-        process_used_schem_name(data[m]["requestBody"],module_data)
+        process_used_schem_name(data[m]["requestBody"])
       if "responses" in data[m]:
         for r in data[m]["responses"]:
-          process_used_schem_name(data[m]["responses"][r],module_data)
+          process_used_schem_name(data[m]["responses"][r])
       if "callbacks" in data[m]:
         for c in data[m]["callbacks"]:
           for p in data[m]["callbacks"][c]:
-            process_path(data[m]["callbacks"][c][p],module_data)
+            process_path(data[m]["callbacks"][c][p])
 
-def process_schema(schema,module_data):
+def process_schema(schema):
+  global module_data
   if "$ref" in schema:
     refmodule,refstr=get_module(schema["$ref"])
     if refmodule!= '':
@@ -161,29 +180,31 @@
     if schema["type"] == "array" :
       if "$ref" in schema["items"] :
         refmodule,refstr=get_module(schema["items"]["$ref"])  # This direct code generation should be moved out from here.
-        print("// " + refmodule + " type record of " + refstr + " " +refstr + "_list")
-        print(f'// external function f_enc_{refstr}_list(in {refstr}_list pdu) return octetstring ')
-        print('// with { extension "prototype(convert) encode(JSON)" }')
-        print("")
-        print(f'// external function f_dec_{refstr}_list(in octetstring stream, out {refstr}_list pdu) return integer ')
-        print('// with { extension "prototype(backtrack) decode(JSON)" }')
-        print("")
+        if not (refmodule,refstr) in module_data['extra_lists']:
+          module_data['extra_lists'].append((refmodule,refstr))
+#        print("// " + refmodule + " type record of " + refstr + " " +refstr + "_list")
+#        print(f'// external function f_enc_{refstr}_list(in {refstr}_list pdu) return octetstring ')
+#        print('// with { extension "prototype(convert) encode(JSON)" }')
+#        print("")
+#        print(f'// external function f_dec_{refstr}_list(in octetstring stream, out {refstr}_list pdu) return integer ')
+#        print('// with { extension "prototype(backtrack) decode(JSON)" }')
+#        print("")
     if schema["type"] == "object" :
       if "properties" in schema:
         if "jsonData" in schema["properties"]:
-          process_schema(schema["properties"]["jsonData"],module_data)
+          process_schema(schema["properties"]["jsonData"])
 
-def process_used_schem_name(data,module_data):
+def process_used_schem_name(data):
   #print("process_used_schem_name", data)
   if "content" in data:
     for ct in data["content"]:
       #print("ct ", ct)
       if "schema" in data["content"][ct]:
         #print("schema ", data["content"][ct]["schema"])
-        process_schema(data["content"][ct]["schema"],module_data)
+        process_schema(data["content"][ct]["schema"])
       
   if "schema" in data:
-    process_schema(data["schema"],module_data)   
+    process_schema(data["schema"])   
 
 # Processes one schema definition and build the data structure needed for code generation.
 # The processed data is appended to the type_tree list.
@@ -468,112 +489,167 @@
 
 Constructor.add_constructor(u'tag:yaml.org,2002:bool', add_bool)
 
+
+# Processes the content of one yaml file
+# Fills the module_data.
+# The code is not generated here.
+# doc - loaded yaml file
+# The global variable should be set
+def process_one_module(doc):
+  print("Processing " + module_name)
+  if "components" in doc:
+    if "schemas" in doc["components"]:
+      schemas=doc["components"]["schemas"]
+      for name in schemas:
+        # The type name suffix 'Rm' is used for a nullable type alias 
+        # Generate the nullable structure but use the reference to the original type
+        if (name[-2:] == "Rm" ) and (name[:-2] in schemas ):
+          type_tree.append({'fields': [{'name': 'null_val',
+                'nullable': False,
+                'type': 'JSON_generic_val.JSON_null_val'},
+               {'name': 'val',
+                'nullable': False,
+                'type': clean_name(name[:-2],True),
+                'variant': []}],
+                'name': clean_name(name,True),
+                'nullable': True,
+                'type': 'union',
+                'variant': ['JSON: as value']})
+          addImport("JSON_Generic")
+        elif name == "NullValue": # Special type, used for nullable enums
+          type_tree.append({'type': 'JSON_generic_val.JSON_null_val','name':'NullValue','nullable': True})
+        else:
+          # Normal type schema processing.
+          data=schemas[name]
+          type_builder(clean_name(name,True),data,type_tree)
+    if "responses" in doc["components"]:
+      #print(doc["components"]["responses"])
+      for r in doc["components"]["responses"]:
+        #print(r)
+        process_used_schem_name(doc["components"]["responses"][r])
+
+  if 'paths' in doc:
+    for p in doc["paths"]:
+      #print(p)
+      process_path(doc["paths"][p])
+  print("Processing completed")
+
+# Generates the code of one module
+# The global variable should be set
+def generate_one_module():
+  global ident_level
+  print("Generating code for " + module_name)
+  fout = open(module_name + ".ttcn",'wt')
+  sys.stdout = fout
+
+  print("module " + module_name + " {")
+  print("")
+
+  for i in module_data['extra_lists']:
+    refmodule, refstr = i
+    print("// " + refmodule + " type record of " + refstr + " " +refstr + "_list")
+    print(f'// external function f_enc_{refstr}_list(in {refstr}_list pdu) return octetstring ')
+    print('// with { extension "prototype(convert) encode(JSON)" }')
+    print("")
+    print(f'// external function f_dec_{refstr}_list(in octetstring stream, out {refstr}_list pdu) return integer ')
+    print('// with { extension "prototype(backtrack) decode(JSON)" }')
+    print("")
+
+  for i in module_data['import']:
+    print(f'  import from {i} all')
+  print("")
+
+  for fs in module_data['functions']:
+    f=clean_name(fs,True,".")
+    if "." in f:
+      pre="// "
+    else:
+      pre=""
+    print(pre+f'external function f_enc_{f}(in {f} pdu) return octetstring ')
+    print(pre+'with { extension "prototype(convert) encode(JSON)" }')
+    print("")
+    print(pre+f'external function f_dec_{f}(in octetstring stream, out {f} pdu) return integer ')
+    print(pre+'with { extension "prototype(backtrack) decode(JSON)" }')
+    print("")
+
+  print("")
+  #pprint.pprint(type_tree)
+  for t in type_tree:
+    print(ident_c*ident_level, "type ",end="",sep="")
+  #  ident_level+=1
+    print_type(t,"", top=True)
+    variants=[]
+    gather_variants(t,"",variants)
+    if variants != []:
+      print(" with {")
+      ident_level+=1
+      for v in variants:
+        print(ident_c*ident_level, "variant ",end="",sep="")
+        if v["spec"] != "":
+          print("(",v["spec"],") ",end="",sep="")
+        print('"',v["var"],'"',sep="")
+      ident_level-=1
+      print(ident_c*ident_level, "}",sep="")
+    else:
+      print("")
+  # ident_level-=1
+    print("")
+    
+  print("")
+
+  print("")
+  print('')
+  print('} with {')
+  print('  encode "JSON"')
+  print('}')
   
-f=open(sys.argv[1])
+  
+  sys.stdout = saved_stdout
+  fout.close
+  print("Code generation completed")
 
-module_name =  os.path.splitext(os.path.basename(sys.argv[1]))[0]
 
-sys.stdout = open(module_name + ".ttcn",'wt')
+# Main code starts here:
 
-print("module " + module_name + " {")
-print("")
+# global variables
 
-doc=yaml.load(f)
-
-schemas=[]
-
-type_tree=[]
-
-module_data={'import' : [], "functions" : []}
-buff=""
-
+saved_stdout = sys.stdout
+module_data = {}
+type_tree= []
+modules = {}
 ident_level=1
 ident_c="  "
+module_name = ""
 
-if "components" in doc:
-  if "schemas" in doc["components"]:
-    schemas=doc["components"]["schemas"]
-    for name in schemas:
-      # The type name suffix 'Rm' is used for a nullable type alias 
-      # Generate the nullable structure but use the reference to the original type
-      if (name[-2:] == "Rm" ) and (name[:-2] in schemas ):
-        type_tree.append({'fields': [{'name': 'null_val',
-              'nullable': False,
-              'type': 'JSON_generic_val.JSON_null_val'},
-             {'name': 'val',
-              'nullable': False,
-              'type': clean_name(name[:-2],True),
-              'variant': []}],
-              'name': clean_name(name,True),
-              'nullable': True,
-              'type': 'union',
-              'variant': ['JSON: as value']})
-        addImport("JSON_Generic")
-      elif name == "NullValue": # Special type, used for nullable enums
-        type_tree.append({'type': 'JSON_generic_val.JSON_null_val','name':'NullValue','nullable': True})
-      else:
-        # Normal type schema processing.
-        data=schemas[name]
-        type_builder(clean_name(name,True),data,type_tree)
-  if "responses" in doc["components"]:
-    #print(doc["components"]["responses"])
-    for r in doc["components"]["responses"]:
-      #print(r)
-      process_used_schem_name(doc["components"]["responses"][r],module_data)
 
-if 'paths' in doc:
-  for p in doc["paths"]:
-    #print(p)
-    process_path(doc["paths"][p],module_data)
-    
 
-for i in module_data['import']:
-  print(f'  import from {i} all')
-print("")
+for filename in sys.argv[1:]:
 
-for fs in module_data['functions']:
-  f=clean_name(fs,True,".")
-  if "." in f:
-    pre="// "
-  else:
-    pre=""
-  print(pre+f'external function f_enc_{f}(in {f} pdu) return octetstring ')
-  print(pre+'with { extension "prototype(convert) encode(JSON)" }')
-  print("")
-  print(pre+f'external function f_dec_{f}(in octetstring stream, out {f} pdu) return integer ')
-  print(pre+'with { extension "prototype(backtrack) decode(JSON)" }')
-  print("")
+  try:
+    f=open(filename)
+  except:
+    print("Can't open the file: ", filename)
+    pprint.pprint(sys.exc_info()[1])
+    quit()
 
-print("")
-#pprint.pprint(type_tree)
-for t in type_tree:
-  print(ident_c*ident_level, "type ",end="",sep="")
-#  ident_level+=1
-  print_type(t,"", top=True)
-  variants=[]
-  gather_variants(t,"",variants)
-  if variants != []:
-    print(" with {")
-    ident_level+=1
-    for v in variants:
-      print(ident_c*ident_level, "variant ",end="",sep="")
-      if v["spec"] != "":
-        print("(",v["spec"],") ",end="",sep="")
-      print('"',v["var"],'"',sep="")
-    ident_level-=1
-    print(ident_c*ident_level, "}",sep="")
-  else:
-    print("")
-# ident_level-=1
-  print("")
+  print("File opened: " + filename)
+  module_name =  os.path.splitext(os.path.basename(filename))[0]
+
+  ydoc=yaml.load(f)
+
+  # Already processed modules can add functions and type to the module
+  if module_name not in modules:
+    modules[module_name]={'import' : [], "functions" : [], "type_tree": [], "extra_lists":[]}
+
+  module_data=modules[module_name]
+  type_tree=module_data["type_tree"]
+
+  process_one_module(ydoc)
+
+for module_name in modules:
+  module_data=modules[module_name]
+  type_tree=module_data["type_tree"]
   
-#print(buff)
-print("")
+  generate_one_module()
 
-print("")
-print('')
-print('} with {')
-print('  encode "JSON"')
-print('}')
-
-#pprint.pprint(type_tree)
+#pprint.pprint(module_data)