[haiku-commits] haiku: hrev52793 - in src/apps/haikudepot: server/dumpexportpkg build/scripts server/dumpexportrepository .

  • From: Andrew Lindesay <apl@xxxxxxxxxxxxxx>
  • To: haiku-commits@xxxxxxxxxxxxx
  • Date: Thu, 24 Jan 2019 12:18:18 -0500 (EST)

hrev52793 adds 1 changeset to branch 'master'
old head: 3288c099965039872a3b253104be5dc3287f7bc6
new head: 81dab2139ece4278ca3b22670628dbb9f50f5ce0
overview: 
https://git.haiku-os.org/haiku/log/?qt=range&q=81dab2139ece+%5E3288c0999650

----------------------------------------------------------------------------

81dab2139ece: HaikuDepot : Generate Server Model + Parser
  
  The server uses JSON schema to generate some data-transfer-object (DTO)
  models for communication with other systems.  This same schema can be
  used to generate the C++ .cpp and .h files for use in HaikuDepot.  So
  far these have been generated by hand and then manually added to the
  Haiku repo.  Now the schema files can be copied over and from those, the
  sources are generated.
  
  Change-Id: Ia288cb7a50843e5e2bc403a6ce55508a04218c04
  Reviewed-on: https://review.haiku-os.org/c/858
  Reviewed-by: Adrien Destugues <pulkomandy@xxxxxxxxx>

                                    [ Andrew Lindesay <apl@xxxxxxxxxxxxxx> ]

----------------------------------------------------------------------------

Revision:    hrev52793
Commit:      81dab2139ece4278ca3b22670628dbb9f50f5ce0
URL:         https://git.haiku-os.org/haiku/commit/?id=81dab2139ece
Author:      Andrew Lindesay <apl@xxxxxxxxxxxxxx>
Date:        Thu Jan  3 23:46:33 2019 UTC

----------------------------------------------------------------------------

24 files changed, 2190 insertions(+), 4116 deletions(-)
src/apps/haikudepot/Jamfile                      |  144 +-
src/apps/haikudepot/build/jam/HdsSchemaGenRules  |   93 ++
.../build/scripts/hdsjsonschemacommon.py         |   95 ++
.../build/scripts/jsonschema2cppmodel.py         |  396 +++++
.../build/scripts/jsonschema2cppparser.py        | 1320 +++++++++++++++++
.../server/ServerPkgDataUpdateProcess.cpp        |    5 +-
.../server/dumpexportpkg/DumpExportPkg.cpp       |  345 -----
.../server/dumpexportpkg/DumpExportPkg.h         |   79 -
.../dumpexportpkg/DumpExportPkgCategory.cpp      |   52 -
.../server/dumpexportpkg/DumpExportPkgCategory.h |   28 -
.../dumpexportpkg/DumpExportPkgJsonListener.cpp  | 1347 ------------------
.../dumpexportpkg/DumpExportPkgJsonListener.h    |   89 --
.../dumpexportpkg/DumpExportPkgScreenshot.cpp    |  204 ---
.../dumpexportpkg/DumpExportPkgScreenshot.h      |   56 -
.../dumpexportpkg/DumpExportPkgVersion.cpp       |  380 -----
.../server/dumpexportpkg/DumpExportPkgVersion.h  |   84 --
.../DumpExportRepository.cpp                     |  209 ---
.../dumpexportrepository/DumpExportRepository.h  |   54 -
.../DumpExportRepositoryJsonListener.cpp         |  917 ------------
.../DumpExportRepositoryJsonListener.h           |   89 --
.../DumpExportRepositorySource.cpp               |  124 --
.../DumpExportRepositorySource.h                 |   40 -
.../haikudepot/server/schema/dumpexportpkg.json  |   96 ++
.../server/schema/dumpexportrepository.json      |   60 +

----------------------------------------------------------------------------

diff --git a/src/apps/haikudepot/Jamfile b/src/apps/haikudepot/Jamfile
index dc76250103..e81dff6784 100644
--- a/src/apps/haikudepot/Jamfile
+++ b/src/apps/haikudepot/Jamfile
@@ -1,7 +1,44 @@
 SubDir HAIKU_TOP src apps haikudepot ;
 
+include [ FDirName $(HAIKU_TOP) src apps haikudepot build jam
+       HdsSchemaGenRules ] ;
+
 UsePrivateHeaders interface kernel net package shared storage support ;
 
+local generatedTargetDirectory = $(TARGET_COMMON_DEBUG_LOCATE_TARGET) ;
+
+# During the build process, some sources are generated into directories.  These
+# are the directories.
+
+local dumpExportRepositoryBulkListerTargetDirectory =
+       [ FDirName $(TARGET_COMMON_DEBUG_LOCATE_TARGET)
+               dumpexportrepositorybulklistener ] ;
+local dumpExportPkgBulkListenerTargetDirectory =
+       [ FDirName $(TARGET_COMMON_DEBUG_LOCATE_TARGET)
+               dumpexportpkgbulklistener ] ;
+local dumpExportPkgModelTargetDirectory =
+       [ FDirName $(TARGET_COMMON_DEBUG_LOCATE_TARGET)
+               dumpexportpkgmodel ] ;
+local dumpExportRepositoryModelTargetDirectory =
+       [ FDirName $(TARGET_COMMON_DEBUG_LOCATE_TARGET)
+               dumpexportrepositorymodel ] ;
+
+# During the build process, some sources are generated into a directory.  In
+# order to maintain a timestamp on that generation process, a dummy file is
+# used to signify the target of the generation.  The leafname of this dummy 
file
+# is defined here.
+
+local dummyFile = "dummy.dat" ;
+
+local dumpExportRepositoryBulkListerTargetFile =
+       [ FDirName $(dumpExportRepositoryBulkListerTargetDirectory) 
$(dummyFile) ] ;
+local dumpExportPkgBulkListenerTargetFile =
+       [ FDirName $(dumpExportPkgBulkListenerTargetDirectory) $(dummyFile) ] ;
+local dumpExportPkgModelTargetFile =
+       [ FDirName $(dumpExportPkgModelTargetDirectory) $(dummyFile) ] ;
+local dumpExportRepositoryModelTargetFile =
+       [ FDirName $(dumpExportRepositoryModelTargetDirectory) $(dummyFile) ] ;
+
 # source directories
 local sourceDirs =
        edits_generic
@@ -10,8 +47,6 @@ local sourceDirs =
        ui
        ui_generic
        server
-       server/dumpexportrepository
-       server/dumpexportpkg
        tar
        util
 ;
@@ -22,6 +57,10 @@ for sourceDir in $(sourceDirs) {
 }
 
 SEARCH_SOURCE += [ FDirName $(HAIKU_TOP) src servers package ] ;
+SEARCH_SOURCE += $(dumpExportRepositoryBulkListerTargetDirectory) ;
+SEARCH_SOURCE += $(dumpExportPkgBulkListenerTargetDirectory) ;
+SEARCH_SOURCE += $(dumpExportPkgModelTargetDirectory) ;
+SEARCH_SOURCE += $(dumpExportRepositoryModelTargetDirectory) ;
 
 local textDocumentSources =
        # edits_generic
@@ -51,7 +90,7 @@ local textDocumentSources =
        UndoableEditListener.cpp
 ;
 
-Application HaikuDepot :
+local applicationSources =
        App.cpp
        BarberPole.cpp
        BitmapButton.cpp
@@ -86,16 +125,6 @@ Application HaikuDepot :
        UserLoginWindow.cpp
        WorkStatusView.cpp
 
-       # network + server - model
-       DumpExportPkg.cpp
-       DumpExportPkgCategory.cpp
-       DumpExportPkgJsonListener.cpp
-       DumpExportPkgScreenshot.cpp
-       DumpExportPkgVersion.cpp
-       DumpExportRepository.cpp
-       DumpExportRepositorySource.cpp
-       DumpExportRepositoryJsonListener.cpp
-
        # network + server / local processes
        AbstractProcess.cpp
        AbstractServerProcess.cpp
@@ -128,10 +157,48 @@ Application HaikuDepot :
        # package_daemon
        ProblemWindow.cpp
        ResultWindow.cpp
+;
 
-       # text view stuff
-       $(textDocumentSources)
+local generatedPkgModelSourceFiles =
+       DumpExportPkg.cpp
+       DumpExportPkgCategory.cpp
+       DumpExportPkgScreenshot.cpp
+       DumpExportPkgVersion.cpp
+;
+
+local generatedRepositoryModelSourceFiles =
+       DumpExportRepository.cpp
+       DumpExportRepositorySource.cpp
+       DumpExportRepositorySourceMirror.cpp
+;
+
+local generatedPkgParserSourceFiles =
+       DumpExportPkgJsonListener.cpp
+;
+
+local generatedRepositoryParserSourceFiles =
+       DumpExportRepositoryJsonListener.cpp
+;
 
+local generatedSourceFiles =
+       # network + server - model
+       DumpExportPkg.cpp
+       DumpExportPkgCategory.cpp
+       DumpExportPkgScreenshot.cpp
+       DumpExportPkgVersion.cpp
+       DumpExportRepository.cpp
+       DumpExportRepositorySource.cpp
+       DumpExportRepositorySourceMirror.cpp
+
+       # network + server - parser
+       DumpExportPkgJsonListener.cpp
+       DumpExportRepositoryJsonListener.cpp
+;
+
+Application HaikuDepot
+       : $(applicationSources) $(textDocumentSources)
+               $(generatedPkgModelSourceFiles) 
$(generatedRepositoryModelSourceFiles)
+               $(generatedPkgParserSourceFiles) 
$(generatedRepositoryParserSourceFiles)
        : be package bnetapi translation libcolumnlistview.a shared
                [ TargetLibstdc++ ] [ TargetLibsupc++ ] localestub
        : HaikuDepot.rdef
@@ -161,3 +228,50 @@ Application TextDocumentTest :
 
        : be translation shared [ TargetLibsupc++ ]
 ;
+
+# The following use of rules configures the generation of .cpp and .h files 
from
+# JSON schemas that are defined in the HaikuDepotServer system.  See the
+# included Jam rules and actions for the definitions.
+
+HdsSchemaGenModel $(dumpExportPkgModelTargetFile)
+       : dumpexportpkg.json : jsonschema2cppmodel.py ;
+
+HdsSchemaGenModel $(dumpExportRepositoryModelTargetFile)
+       : dumpexportrepository.json : jsonschema2cppmodel.py ;
+
+HdsSchemaGenBulkParser $(dumpExportRepositoryBulkListerTargetFile)
+       : dumpexportrepository.json : jsonschema2cppparser.py ;
+
+HdsSchemaGenBulkParser $(dumpExportPkgBulkListenerTargetFile)
+       : dumpexportpkg.json : jsonschema2cppparser.py ;
+
+HdsSchemaGenAppSrcDependsOnGeneration
+       [ FGristFiles $(generatedPkgParserSourceFiles) ]
+       [ FGristFiles $(generatedPkgParserSourceFiles:S=.h) ]
+       : $(dumpExportPkgBulkListenerTargetFile) ;
+
+HdsSchemaGenAppSrcDependsOnGeneration
+       [ FGristFiles $(generatedRepositoryParserSourceFiles) ]
+       [ FGristFiles $(generatedRepositoryParserSourceFiles:S=.h) ]
+       : $(dumpExportRepositoryBulkListerTargetFile) ;
+
+HdsSchemaGenAppSrcDependsOnGeneration
+       [ FGristFiles $(generatedRepositoryModelSourceFiles) ]
+       [ FGristFiles $(generatedRepositoryModelSourceFiles:S=.h) ]
+       : $(dumpExportRepositoryModelTargetFile) ;
+
+HdsSchemaGenAppSrcDependsOnGeneration
+       [ FGristFiles $(generatedPkgModelSourceFiles) ]
+       [ FGristFiles $(generatedPkgModelSourceFiles:S=.h) ]
+       : $(dumpExportPkgModelTargetFile) ;
+
+# This will ensure that if any of the generated files' header files change, 
then
+# the application should be re-built.
+
+Depends [ FGristFiles $(applicationSources:S=.o) ]
+       :
+       [ FGristFiles $(generatedPkgParserSourceFiles:S=.h) ]
+       [ FGristFiles $(generatedRepositoryParserSourceFiles:S=.h) ]
+       [ FGristFiles $(generatedRepositoryModelSourceFiles:S=.h) ]
+       [ FGristFiles $(generatedPkgModelSourceFiles:S=.h) ]
+;
\ No newline at end of file
diff --git a/src/apps/haikudepot/build/jam/HdsSchemaGenRules 
b/src/apps/haikudepot/build/jam/HdsSchemaGenRules
new file mode 100644
index 0000000000..7e40f4deeb
--- /dev/null
+++ b/src/apps/haikudepot/build/jam/HdsSchemaGenRules
@@ -0,0 +1,93 @@
+# =====================================
+# Copyright 2019, Andrew Lindesay
+# Distributed under the terms of the MIT License.
+# =====================================
+
+# HaikuDepotServer has a number of data-transfer-objects (DTO) that are defined
+# by JSON schemas.  The server uses these schemas to produce the objects at
+# compile time.  Likewise, the schema files also generate C++ side DTO model
+# objects in the form of .cpp and .h files as well.  This way the
+# HaikuDepotServer server and HaikuDepot desktop application are able to
+# communicate more 'safely'.  The schema files still need to be copied from
+# the server source to the Haiku source, but the generation process will ensure
+# that the data-structures are consistent.
+#
+# The C++ side classes are generated with python scripts that are included in
+# the Haiku source.  These rules and actions take care of making sure that the
+# python scripts are run when necessary to generate the C++ side classes.  Note
+# that there are two sorts of classes generated here; the model DTO objects and
+# also the supporting classes that parse the DTO objects.  The parsing classes
+# are intended to be used with Haiku JSON parsing systems.
+
+# pragma mark - Generic
+
+actions HdsSchemaGenTouch
+{
+       touch $(1)
+}
+
+# pragma mark - Model Class Generation
+
+# 1 : the dummy file in the class generation directory (target)
+# 2 : the JSON schema file
+# 3 : the Python script to use
+
+rule HdsSchemaGenModel
+{
+       SEARCH on $(2) = [ FDirName $(SUBDIR) server schema ] ;
+       SEARCH on $(3) = [ FDirName $(SUBDIR) build scripts ] ;
+
+       Clean $(1:D) ;
+       Depends $(1) : $(2) $(3) ;
+
+       MkDir $(1:D) ;
+       HdsSchemaGenModel1 $(1) : $(2) $(3) $(1:D) ;
+       HdsSchemaGenTouch $(1) ;
+}
+
+actions HdsSchemaGenModel1
+{
+       python $(2[2]) -i $(2[1]) --outputdirectory $(2[3])
+}
+
+# pragma mark - Bulk Parsing Class Generation
+
+# 1 : the dummy file in the class generation directory (target)
+# 2 : the JSON schema file
+# 3 : the Python script to use
+
+rule HdsSchemaGenBulkParser
+{
+       SEARCH on $(2) = [ FDirName $(SUBDIR) server schema ] ;
+       SEARCH on $(3) = [ FDirName $(SUBDIR) build scripts ] ;
+
+       Clean $(1:D) ;
+       Depends $(1) : $(2) $(3) ;
+
+       MkDir $(1:D) ;
+       HdsSchemaGenBulkParser1 $(1) : $(2) $(3) $(1:D) ;
+       HdsSchemaGenTouch $(1) ;
+}
+
+actions HdsSchemaGenBulkParser1
+{
+       python $(2[2]) -i $(2[1]) --outputdirectory $(2[3]) 
--supportbulkcontainer
+}
+
+# pragma mark - Registering Generated Classes
+
+# Because a number of .cpp and .h files will be generated from a single python
+# script's run, it is necessary to introduce a dependency between the known
+# output files and the target for a given python script run.
+
+# 1 : generated files (.h and .cpp)
+# 2 : target that will generate the generated files
+
+rule HdsSchemaGenAppSrcDependsOnGeneration {
+       local generatedSource ;
+       local applicationSource ;
+
+       MakeLocate $(1) : $(2:D) ;
+       Depends $(1) : $(2) ;
+       Clean $(1) ;
+}
\ No newline at end of file
diff --git a/src/apps/haikudepot/build/scripts/hdsjsonschemacommon.py 
b/src/apps/haikudepot/build/scripts/hdsjsonschemacommon.py
new file mode 100644
index 0000000000..0435801384
--- /dev/null
+++ b/src/apps/haikudepot/build/scripts/hdsjsonschemacommon.py
@@ -0,0 +1,95 @@
+
+# =====================================
+# Copyright 2017-2019, Andrew Lindesay
+# Distributed under the terms of the MIT License.
+# =====================================
+
+# common material related to generation of schema-generated artifacts.
+
+import datetime
+
+
+# The possible JSON types
+JSON_TYPE_STRING = "string"
+JSON_TYPE_OBJECT = "object"
+JSON_TYPE_ARRAY = "array"
+JSON_TYPE_BOOLEAN = "boolean"
+JSON_TYPE_INTEGER = "integer"
+JSON_TYPE_NUMBER = "number"
+
+
+# The possible C++ types
+CPP_TYPE_STRING = "BString"
+CPP_TYPE_ARRAY = "List"
+CPP_TYPE_BOOLEAN = "bool"
+CPP_TYPE_INTEGER = "int64"
+CPP_TYPE_NUMBER = "double"
+
+
+def uniondicts(d1, d2):
+    d = dict(d1)
+    d.update(d2)
+    return d
+
+
+def javatypetocppname(javaname):
+    return javaname[javaname.rindex('.')+1:]
+
+
+def propnametocppname(propname):
+    return propname[0:1].upper() + propname[1:]
+
+
+def propnametocppmembername(propname):
+    return 'f' + propnametocppname(propname)
+
+
+def propmetatojsoneventtypename(propmetadata):
+    type = propmetadata['type']
+
+
+
+def propmetadatatocpptypename(propmetadata):
+    type = propmetadata['type']
+
+    if type == JSON_TYPE_STRING:
+        return CPP_TYPE_STRING
+    if type == JSON_TYPE_BOOLEAN:
+        return CPP_TYPE_BOOLEAN
+    if type == JSON_TYPE_INTEGER:
+        return CPP_TYPE_INTEGER
+    if type == JSON_TYPE_NUMBER:
+        return CPP_TYPE_NUMBER
+    if type == JSON_TYPE_OBJECT:
+        javatype = propmetadata['javaType']
+
+        if not javatype or 0 == len(javatype):
+            raise Exception('missing "javaType" field')
+
+        return javatypetocppname(javatype)
+
+    if type == JSON_TYPE_ARRAY:
+        itemsmetadata = propmetadata['items']
+        itemsjavatype = itemsmetadata['javaType']
+
+        if not itemsjavatype or 0 == len(itemsjavatype):
+            raise Exception('missing "javaType" field')
+
+        return "%s <%s*, true>" % (CPP_TYPE_ARRAY, 
javatypetocppname(itemsjavatype))
+
+    raise Exception('unknown json-schema type [' + type + ']')
+
+
+def propmetadatatypeisscalar(propmetadata):
+    type = propmetadata['type']
+    return type == JSON_TYPE_BOOLEAN or type == JSON_TYPE_INTEGER or type == 
JSON_TYPE_NUMBER
+
+
+def writetopcomment(f, inputfilename, variant):
+    f.write((
+                '/*\n'
+                ' * Generated %s Object\n'
+                ' * source json-schema : %s\n'
+                ' * generated at : %s\n'
+                ' */\n'
+            ) % (variant, inputfilename, datetime.datetime.now().isoformat()))
diff --git a/src/apps/haikudepot/build/scripts/jsonschema2cppmodel.py 
b/src/apps/haikudepot/build/scripts/jsonschema2cppmodel.py
new file mode 100644
index 0000000000..ce4b453a43
--- /dev/null
+++ b/src/apps/haikudepot/build/scripts/jsonschema2cppmodel.py
@@ -0,0 +1,396 @@
+#!/usr/bin/python
+
+# =====================================
+# Copyright 2017-2019, Andrew Lindesay
+# Distributed under the terms of the MIT License.
+# =====================================
+
+# This simple tool will read a JSON schema and will then generate
+# some model objects that can be used to hold the data-structure
+# in the C++ environment.
+
+import json
+import argparse
+import os
+import hdsjsonschemacommon as jscom
+import string
+
+
+def hasanylistproperties(schema):
+    for propname, propmetadata in schema['properties'].items():
+        if propmetadata['type'] == 'array':
+            return True
+    return False
+
+
+def writelistaccessors(outputfile, cppclassname, cppname, cppmembername, 
cppcontainertype):
+
+    dict = {
+        'cppclassname' : cppclassname,
+        'cppname': cppname,
+        'cppmembername': cppmembername,
+        'cppcontainertype': cppcontainertype
+    }
+
+    outputfile.write(
+        string.Template("""
+void
+${cppclassname}::AddTo${cppname}(${cppcontainertype}* value)
+{
+    if (${cppmembername} == NULL)
+        ${cppmembername} = new List<${cppcontainertype}*, true>();
+    ${cppmembername}->Add(value);
+}
+
+
+void
+${cppclassname}::Set${cppname}(List<${cppcontainertype}*, true>* value)
+{
+   ${cppmembername} = value; 
+}
+
+
+int32
+${cppclassname}::Count${cppname}()
+{
+    if (${cppmembername} == NULL)
+        return 0;
+    return ${cppmembername}->CountItems();
+}
+
+
+${cppcontainertype}*
+${cppclassname}::${cppname}ItemAt(int32 index)
+{
+    return ${cppmembername}->ItemAt(index);
+}
+
+
+bool
+${cppclassname}::${cppname}IsNull()
+{
+    return ${cppmembername} == NULL;
+}
+""").substitute(dict))
+
+
+def writelistaccessorsheader(outputfile, cppname, cppcontainertype):
+    dict = {
+        'cppname': cppname,
+        'cppcontainertype': cppcontainertype
+    }
+
+    outputfile.write(
+        string.Template("""    void AddTo${cppname}(${cppcontainertype}* 
value);
+    void Set${cppname}(List<${cppcontainertype}*, true>* value);
+    int32 Count${cppname}();
+    ${cppcontainertype}* ${cppname}ItemAt(int32 index);
+    bool ${cppname}IsNull();
+""").substitute(dict))
+
+
+def writetakeownershipaccessors(outputfile, cppclassname, cppname, 
cppmembername, cpptype):
+
+    dict = {
+        'cppclassname': cppclassname,
+        'cppname': cppname,
+        'cppmembername': cppmembername,
+        'cpptype': cpptype
+    }
+
+    outputfile.write(
+        string.Template("""
+${cpptype}*
+${cppclassname}::${cppname}()
+{
+    return ${cppmembername};
+}
+
+
+void
+${cppclassname}::Set${cppname}(${cpptype}* value)
+{
+    ${cppmembername} = value;
+}
+
+
+void
+${cppclassname}::Set${cppname}Null()
+{
+    if (!${cppname}IsNull()) {
+        delete ${cppmembername};
+        ${cppmembername} = NULL;
+    }
+}
+
+
+bool
+${cppclassname}::${cppname}IsNull()
+{
+    return ${cppmembername} == NULL;
+}
+""").substitute(dict))
+
+
+def writetakeownershipaccessorsheader(outputfile, cppname, cpptype):
+    outputfile.write('    %s* %s();\n' % (cpptype, cppname))
+    outputfile.write('    void Set%s(%s* value);\n' % (cppname, cpptype))
+    outputfile.write('    void Set%sNull();\n' % cppname)
+    outputfile.write('    bool %sIsNull();\n' % cppname)
+
+
+def writescalaraccessors(outputfile, cppclassname, cppname, cppmembername, 
cpptype):
+
+    dict = {
+        'cppclassname': cppclassname,
+        'cppname': cppname,
+        'cppmembername': cppmembername,
+        'cpptype': cpptype
+    }
+
+    outputfile.write(
+        string.Template("""
+${cpptype}
+${cppclassname}::${cppname}()
+{
+    return *${cppmembername};
+}
+
+
+void
+${cppclassname}::Set${cppname}(${cpptype} value)
+{
+    if (${cppname}IsNull())
+        ${cppmembername} = new ${cpptype}[1];
+    ${cppmembername}[0] = value;
+}
+
+
+void
+${cppclassname}::Set${cppname}Null()
+{
+    if (!${cppname}IsNull()) {
+        delete ${cppmembername};
+        ${cppmembername} = NULL;
+    }
+}
+
+
+bool
+${cppclassname}::${cppname}IsNull()
+{
+    return ${cppmembername} == NULL;
+}
+""").substitute(dict))
+
+
+def writescalaraccessorsheader(outputfile, cppname, cpptype):
+    outputfile.write(
+        string.Template("""
+    ${cpptype} ${cppname}();
+    void Set${cppname}(${cpptype} value);
+    void Set${cppname}Null();
+    bool ${cppname}IsNull();
+""").substitute({'cppname': cppname, 'cpptype': cpptype}))
+
+
+def writeaccessors(outputfile, cppclassname, propname, propmetadata):
+    type = propmetadata['type']
+
+    if type == 'array':
+        writelistaccessors(outputfile,
+                           cppclassname,
+                           jscom.propnametocppname(propname),
+                           jscom.propnametocppmembername(propname),
+                           
jscom.javatypetocppname(propmetadata['items']['javaType']))
+    elif jscom.propmetadatatypeisscalar(propmetadata):
+        writescalaraccessors(outputfile,
+                             cppclassname,
+                             jscom.propnametocppname(propname),
+                             jscom.propnametocppmembername(propname),
+                             jscom.propmetadatatocpptypename(propmetadata))
+    else:
+        writetakeownershipaccessors(outputfile,
+                                    cppclassname,
+                                    jscom.propnametocppname(propname),
+                                    jscom.propnametocppmembername(propname),
+                                    
jscom.propmetadatatocpptypename(propmetadata))
+
+
+def writeaccessorsheader(outputfile, propname, propmetadata):
+    type = propmetadata['type']
+
+    if type == 'array':
+        writelistaccessorsheader(outputfile,
+                                 jscom.propnametocppname(propname),
+                                 
jscom.javatypetocppname(propmetadata['items']['javaType']))
+    elif jscom.propmetadatatypeisscalar(propmetadata):
+        writescalaraccessorsheader(outputfile,
+                                   jscom.propnametocppname(propname),
+                                   
jscom.propmetadatatocpptypename(propmetadata))
+    else:
+        writetakeownershipaccessorsheader(outputfile,
+                                          jscom.propnametocppname(propname),
+                                          
jscom.propmetadatatocpptypename(propmetadata))
+
+
+def writedestructorlogicforlist(outputfile, propname, propmetadata):
+    dict = {
+        'cppmembername': jscom.propnametocppmembername(propname),
+        'cpptype': jscom.javatypetocppname(propmetadata['items']['javaType'])
+    }
+
+    outputfile.write(
+        string.Template("""        int32 count = 
${cppmembername}->CountItems(); 
+        for (i = 0; i < count; i++)
+            delete ${cppmembername}->ItemAt(i);
+""").substitute(dict))
+
+
+def writedestructor(outputfile, cppname, schema):
+    outputfile.write('\n\n%s::~%s()\n{\n' % (cppname, cppname))
+
+    if hasanylistproperties(schema):
+        outputfile.write('    int32 i;\n\n')
+
+    for propname, propmetadata in schema['properties'].items():
+        propmembername = jscom.propnametocppmembername(propname)
+
+        outputfile.write('    if (%s != NULL) {\n' % propmembername)
+
+        if propmetadata['type'] == 'array':
+            writedestructorlogicforlist(outputfile, propname, propmetadata)
+
+        outputfile.write((
+            '        delete %s;\n'
+        ) % propmembername)
+
+        outputfile.write('    }\n\n')
+
+    outputfile.write('}\n')
+
+
+def writeconstructor(outputfile, cppname, schema):
+    outputfile.write('\n\n%s::%s()\n{\n' % (cppname, cppname))
+
+    for propname, propmetadata in schema['properties'].items():
+        outputfile.write('    %s = NULL;\n' % 
jscom.propnametocppmembername(propname))
+
+    outputfile.write('}\n')
+
+
+def writeheaderincludes(outputfile, properties):
+    for propname, propmetadata in properties.items():
+        jsontype = propmetadata['type']
+        javatype = None
+
+        if jsontype == 'object':
+            javatype = propmetadata['javaType']
+
+        if jsontype == 'array':
+            javatype = propmetadata['items']['javaType']
+
+        if javatype is not None:
+            outputfile.write('#include "%s.h"\n' % 
jscom.javatypetocppname(javatype))
+
+
+def schematocppmodels(inputfile, schema, outputdirectory):
+    if schema['type'] != 'object':
+        raise Exception('expecting object')
+
+    javatype = schema['javaType']
+
+    if not javatype or 0 == len(javatype):
+        raise Exception('missing "javaType" field')
+
+    cppclassname = jscom.javatypetocppname(javatype)
+    cpphfilename = os.path.join(outputdirectory, cppclassname + '.h')
+    cppifilename = os.path.join(outputdirectory, cppclassname + '.cpp')
+
+    with open(cpphfilename, 'w') as cpphfile:
+
+        jscom.writetopcomment(cpphfile, os.path.split(inputfile)[1], 'Model')
+        guarddefname = 'GEN_JSON_SCHEMA_MODEL__%s_H' % (cppclassname.upper())
+
+        cpphfile.write(string.Template("""
+#ifndef ${guarddefname}
+#define ${guarddefname}
+
+#include "List.h"
+#include "String.h"
+
+""").substitute({'guarddefname': guarddefname}))
+
+        writeheaderincludes(cpphfile, schema['properties'])
+
+        cpphfile.write(string.Template("""
+class ${cppclassname} {
+public:
+    ${cppclassname}();
+    virtual ~${cppclassname}();
+
+
+""").substitute({'cppclassname': cppclassname}))
+
+        for propname, propmetadata in schema['properties'].items():
+            writeaccessorsheader(cpphfile, propname, propmetadata)
+            cpphfile.write('\n')
+
+        # Now add the instance variables for the object as well.
+
+        cpphfile.write('private:\n')
+
+        for propname, propmetadata in schema['properties'].items():
+            cpphfile.write('    %s* %s;\n' % (
+                jscom.propmetadatatocpptypename(propmetadata),
+                jscom.propnametocppmembername(propname)))
+
+        cpphfile.write((
+            '};\n\n'
+            '#endif // %s'
+        ) % guarddefname)
+
+    with open(cppifilename, 'w') as cppifile:
+
+        jscom.writetopcomment(cppifile, os.path.split(inputfile)[1], 'Model')
+
+        cppifile.write('#include "%s.h"\n' % cppclassname)
+
+        writeconstructor(cppifile, cppclassname, schema)
+        writedestructor(cppifile, cppclassname, schema)
+
+        for propname, propmetadata in schema['properties'].items():
+            writeaccessors(cppifile, cppclassname, propname, propmetadata)
+            cppifile.write('\n')
+
+    # Now write out any subordinate structures.
+
+    for propname, propmetadata in schema['properties'].items():
+        jsontype = propmetadata['type']
+
+        if jsontype == 'array':
+            schematocppmodels(inputfile, propmetadata['items'], 
outputdirectory)
+
+        if jsontype == 'object':
+            schematocppmodels(inputfile, propmetadata, outputdirectory)
+
+
+def main():
+    parser = argparse.ArgumentParser(description='Convert JSON schema to Haiku 
C++ Models')
+    parser.add_argument('-i', '--inputfile', required=True, help='The input 
filename containing the JSON schema')
+    parser.add_argument('--outputdirectory', help='The output directory where 
the C++ files should be written')
+
+    args = parser.parse_args()
+
+    outputdirectory = args.outputdirectory
+
+    if not outputdirectory:
+        outputdirectory = '.'
+
+    with open(args.inputfile) as inputfile:
+        schema = json.load(inputfile)
+        schematocppmodels(args.inputfile, schema, outputdirectory)
+
+if __name__ == "__main__":
+    main()
+
diff --git a/src/apps/haikudepot/build/scripts/jsonschema2cppparser.py 
b/src/apps/haikudepot/build/scripts/jsonschema2cppparser.py
new file mode 100644
index 0000000000..ca8b8a6f9b
--- /dev/null
+++ b/src/apps/haikudepot/build/scripts/jsonschema2cppparser.py
@@ -0,0 +1,1320 @@
+#!/usr/bin/python
+
+# =====================================
+# Copyright 2017-2019, Andrew Lindesay
+# Distributed under the terms of the MIT License.
+# =====================================
+
+# This simple tool will read a JSON schema and will then generate a
+# listener for use the 'BJson' class in the Haiku system.  This
+# allows data conforming to the schema to be able to be parsed.
+
+import string
+import json
+import argparse
+import os
+import hdsjsonschemacommon as jscom
+
+
+# This naming is related to a sub-type in the schema; maybe not the top-level.
+
+class CppParserSubTypeNaming:
+    _cppmodelclassname = None
+    _naming = None
+
+    def __init__(self, schema, naming):
+        javatype = schema['javaType']
+
+        if not javatype or 0 == len(javatype):
+            raise Exception('missing "javaType" field')
+
+        self._cppmodelclassname = jscom.javatypetocppname(javatype)
+        self._naming = naming
+
+    def cppmodelclassname(self):
+        return self._cppmodelclassname
+
+    def cppstackedlistenerclassname(self):
+        return self._cppmodelclassname + '_' + 
self._naming.generatejsonlistenername('Stacked')
+
+    def cppstackedlistlistenerclassname(self):
+        return self._cppmodelclassname + '_List_' + 
self._naming.generatejsonlistenername('Stacked')
+
+    def todict(self):
+        return {
+            'subtype_cppmodelclassname': self.cppmodelclassname(),
+            'subtype_cppstackedlistenerclassname': 
self.cppstackedlistenerclassname(),
+            'subtype_cppstackedlistlistenerclassname': 
self.cppstackedlistlistenerclassname()
+        }
+
+
+# This naming relates to the whole schema.  It's point of reference is the top 
level.
+
+class CppParserNaming:
+    _schemaroot = None
+
+    def __init__(self, schemaroot):
+        self._schemaroot = schemaroot
+
+    def cpprootmodelclassname(self):
+        if self._schemaroot['type'] != 'object':
+            raise Exception('expecting object')
+
+        javatype = self._schemaroot['javaType']
+
+        if not javatype or 0 == len(javatype):
+            raise Exception('missing "javaType" field')
+
+        return jscom.javatypetocppname(javatype)
+
+    def generatejsonlistenername(self, prefix):
+        return prefix + self.cpprootmodelclassname() + 'JsonListener'
+
+    def cppsupermainlistenerclassname(self):
+        return self.generatejsonlistenername('AbstractMain')
+
+    def cppsinglemainlistenerclassname(self):
+        return self.generatejsonlistenername("Single")
+
+    def cppbulkcontainermainlistenerclassname(self):
+        return self.generatejsonlistenername('BulkContainer')
+
+    def cppsuperstackedlistenerclassname(self):
+        return self.generatejsonlistenername('AbstractStacked')
+
+    def cppbulkcontainerstackedlistenerclassname(self):
+        return self.generatejsonlistenername('BulkContainerStacked')
+
+    def cppbulkcontaineritemliststackedlistenerclassname(self):
+        return self.generatejsonlistenername('BulkContainerItemsStacked')
+
+# this is a sub-class of the root object json listener that can call out to the
+# item listener as the root objects are parsed.
+
+    def cppitemlistenerstackedlistenerclassname(self):
+        return self.generatejsonlistenername('ItemEmittingStacked')
+
+    def cppgeneralobjectstackedlistenerclassname(self):
+        return self.generatejsonlistenername('GeneralObjectStacked')
+
+    def cppgeneralarraystackedlistenerclassname(self):
+        return self.generatejsonlistenername('GeneralArrayStacked')
+
+    def cppitemlistenerclassname(self):
+        return self.cpprootmodelclassname() + 'Listener'
+
+    def todict(self):
+        return {
+            'cpprootmodelclassname': self.cpprootmodelclassname(),
+            'cppsupermainlistenerclassname': 
self.cppsupermainlistenerclassname(),
+            'cppsinglemainlistenerclassname': 
self.cppsinglemainlistenerclassname(),
+            'cppbulkcontainermainlistenerclassname': 
self.cppbulkcontainermainlistenerclassname(),
+            'cppbulkcontainerstackedlistenerclassname': 
self.cppbulkcontainerstackedlistenerclassname(),
+            'cppbulkcontaineritemliststackedlistenerclassname': 
self.cppbulkcontaineritemliststackedlistenerclassname(),
+            'cppsuperstackedlistenerclassname': 
self.cppsuperstackedlistenerclassname(),
+            'cppitemlistenerstackedlistenerclassname': 
self.cppitemlistenerstackedlistenerclassname(),
+            'cppgeneralobjectstackedlistenerclassname': 
self.cppgeneralobjectstackedlistenerclassname(),
+            'cppgeneralarraystackedlistenerclassname': 
self.cppgeneralarraystackedlistenerclassname(),
+            'cppitemlistenerclassname': self.cppitemlistenerclassname()
+        }
+
+
+class CppParserImplementationState:
+
+    _interfacehandledcppnames = []
+    _implementationhandledcppnames = []
+    _outputfile = None
+    _naming = None
+
+    def __init__(self, outputfile, naming):
+        self._outputfile = outputfile
+        self._naming = naming
+
+    def isinterfacehandledcppname(self, name):
+        return name in self._interfacehandledcppnames
+
+    def addinterfacehandledcppname(self, name):
+        self._interfacehandledcppnames.append(name)
+
+    def isimplementationhandledcppname(self, name):
+        return name in self._implementationhandledcppnames
+
+    def addimplementationhandledcppname(self, name):
+        self._implementationhandledcppnames.append(name)
+
+    def naming(self):
+        return self._naming
+
+    def outputfile(self):
+        return self._outputfile
+
+
+def writerootstackedlistenerinterface(istate):
+    istate.outputfile().write(
+        string.Template("""        
+/*! This class is the top level of the stacked listeners.  The stack structure
+    is maintained in a linked list and sub-classes implement specific behaviors
+    depending where in the parse tree the stacked listener is working at.
+*/        
+class ${cppsuperstackedlistenerclassname} : public BJsonEventListener {
+public:
+    ${cppsuperstackedlistenerclassname}(
+        ${cppsupermainlistenerclassname}* mainListener,
+        ${cppsuperstackedlistenerclassname}* parent);
+    ~${cppsuperstackedlistenerclassname}();
+
+    void HandleError(status_t status, int32 line, const char* message);
+    void Complete();
+
+    status_t ErrorStatus();
+
+    ${cppsuperstackedlistenerclassname}* Parent();
+
+    virtual bool WillPop();
+
+protected:
+    ${cppsupermainlistenerclassname}* fMainListener;
+
+    bool Pop();
+    void Push(${cppsuperstackedlistenerclassname}* stackedListener);
+
+
+private:
+    ${cppsuperstackedlistenerclassname}* fParent;
+};
+""").substitute(istate.naming().todict()))
+
+
+def writerootstackedlistenerimplementation(istate):
+    istate.outputfile().write(
+        string.Template("""
+${cppsuperstackedlistenerclassname}::${cppsuperstackedlistenerclassname} (
+    ${cppsupermainlistenerclassname}* mainListener,
+    ${cppsuperstackedlistenerclassname}* parent)
+{
+    fMainListener = mainListener;
+    fParent = parent;
+}
+
+${cppsuperstackedlistenerclassname}::~${cppsuperstackedlistenerclassname}()
+{
+}
+
+void
+${cppsuperstackedlistenerclassname}::HandleError(status_t status, int32 line, 
const char* message)
+{
+    fMainListener->HandleError(status, line, message);
+}
+
+void
+${cppsuperstackedlistenerclassname}::Complete()
+{
+   fMainListener->Complete();
+}
+
+status_t
+${cppsuperstackedlistenerclassname}::ErrorStatus()
+{
+    return fMainListener->ErrorStatus();
+}
+
+${cppsuperstackedlistenerclassname}*
+${cppsuperstackedlistenerclassname}::Parent()
+{
+    return fParent;
+}
+
+void
+${cppsuperstackedlistenerclassname}::Push(${cppsuperstackedlistenerclassname}* 
stackedListener)
+{
+    fMainListener->SetStackedListener(stackedListener);
+}
+
+bool
+${cppsuperstackedlistenerclassname}::WillPop()
+{
+    return true;
+}
+
+bool
+${cppsuperstackedlistenerclassname}::Pop()
+{
+    bool result = WillPop();
+    fMainListener->SetStackedListener(fParent);
+    return result;
+}
+""").substitute(istate.naming().todict()))
+
+
+def writeageneralstackedlistenerinterface(istate, alistenerclassname):
+    istate.outputfile().write(
+        string.Template("""
+class ${alistenerclassname} : public ${cppsuperstackedlistenerclassname} {
+public:
+    ${alistenerclassname}(
+        ${cppsupermainlistenerclassname}* mainListener,
+        ${cppsuperstackedlistenerclassname}* parent);
+    ~${alistenerclassname}();
+
+
+    bool Handle(const BJsonEvent& event);
+};
+""").substitute(jscom.uniondicts(
+            istate.naming().todict(),
+            {'alistenerclassname': alistenerclassname}
+        )))
+
+
+def writegeneralstackedlistenerinterface(istate):
+    writeageneralstackedlistenerinterface(istate, 
istate.naming().cppgeneralarraystackedlistenerclassname())
+    writeageneralstackedlistenerinterface(istate, 
istate.naming().cppgeneralobjectstackedlistenerclassname())
+
+
+def writegeneralnoopstackedlistenerconstructordestructor(istate, aclassname):
+
+    istate.outputfile().write(
+        string.Template("""
+${aclassname}::${aclassname}(
+    ${cppsupermainlistenerclassname}* mainListener,
+    ${cppsuperstackedlistenerclassname}* parent)
+    :
+    ${cppsuperstackedlistenerclassname}(mainListener, parent)
+{
+}
+
+${aclassname}::~${aclassname}()
+{
+}
+""").substitute(jscom.uniondicts(
+            istate.naming().todict(),
+            {'aclassname': aclassname}))
+    )
+
+
+def writegeneralstackedlistenerimplementation(istate):
+    outfile = istate.outputfile()
+    generalobjectclassname = 
istate.naming().cppgeneralobjectstackedlistenerclassname()
+    generalarrayclassname = 
istate.naming().cppgeneralarraystackedlistenerclassname()
+    substitutedict = {
+        'generalobjectclassname': generalobjectclassname,
+        'generalarrayclassname': generalarrayclassname
+    }
+
+# general object consumer that will parse-and-discard any json objects.
+
+    writegeneralnoopstackedlistenerconstructordestructor(istate, 
generalobjectclassname)
+
+    istate.outputfile().write(
+        string.Template("""
+bool
+${generalobjectclassname}::Handle(const BJsonEvent& event)
+{
+    switch (event.EventType()) {
+
+        case B_JSON_OBJECT_NAME:
+        case B_JSON_NUMBER:
+        case B_JSON_STRING:
+        case B_JSON_TRUE:
+        case B_JSON_FALSE:
+        case B_JSON_NULL:
+            // ignore
+            break;
+
+
+        case B_JSON_OBJECT_START:
+            Push(new ${generalobjectclassname}(fMainListener, this));
+            break;
+
+
+        case B_JSON_ARRAY_START:
+            Push(new ${generalarrayclassname}(fMainListener, this));
+            break;
+
+
+        case B_JSON_ARRAY_END:
+            HandleError(B_NOT_ALLOWED, JSON_EVENT_LISTENER_ANY_LINE, "illegal 
state - unexpected end of array");
+            break;
+
+
+        case B_JSON_OBJECT_END:
+        {
+            bool status = Pop() && (ErrorStatus() == B_OK);
+            delete this;
+            return status;
+        }
+
+
+    }
+
+
+    return ErrorStatus() == B_OK;
+}
+""").substitute(substitutedict))
+
+    # general array consumer that will parse-and-discard any json arrays.
+
+    writegeneralnoopstackedlistenerconstructordestructor(istate, 
generalarrayclassname)
+
+    outfile.write(
+        string.Template("""
+bool
+${generalarrayclassname}::Handle(const BJsonEvent& event)
+{
+    switch (event.EventType()) {
+
+        case B_JSON_OBJECT_NAME:
+        case B_JSON_NUMBER:
+        case B_JSON_STRING:
+        case B_JSON_TRUE:
+        case B_JSON_FALSE:
+        case B_JSON_NULL:
+            // ignore
+            break;
+
+
+        case B_JSON_OBJECT_START:
+            Push(new ${generalobjectclassname}(fMainListener, this));
+            break;
+
+
+        case B_JSON_ARRAY_START:
+            Push(new ${generalarrayclassname}(fMainListener, this));
+            break;
+
+
+        case B_JSON_OBJECT_END:
+            HandleError(B_NOT_ALLOWED, JSON_EVENT_LISTENER_ANY_LINE, "illegal 
state - unexpected end of object");
+            break;
+
+
+        case B_JSON_ARRAY_END:
+        {
+            bool status = Pop() && (ErrorStatus() == B_OK);
+            delete this;
+            return status;
+        }
+
+
+    }
+
+
+    return ErrorStatus() == B_OK;
+}
+""").substitute(substitutedict))
+
+
+def writestackedlistenerinterface(istate, subschema):
+    naming = istate.naming()
+    subtypenaming = CppParserSubTypeNaming(subschema, naming)
+
+    if not istate.isinterfacehandledcppname(subtypenaming.cppmodelclassname()):
+        istate.addinterfacehandledcppname(subtypenaming.cppmodelclassname())
+
+        istate.outputfile().write(
+            string.Template("""
+class ${subtype_cppstackedlistenerclassname} : public 
${cppsuperstackedlistenerclassname} {
+public:
+    ${subtype_cppstackedlistenerclassname}(
+        ${cppsupermainlistenerclassname}* mainListener,
+        ${cppsuperstackedlistenerclassname}* parent);
+    ~${subtype_cppstackedlistenerclassname}();
+
+
+    bool Handle(const BJsonEvent& event);
+
+
+    ${subtype_cppmodelclassname}* Target();
+
+
+protected:
+    ${subtype_cppmodelclassname}* fTarget;
+    BString fNextItemName;
+};
+
+class ${subtype_cppstackedlistlistenerclassname} : public 
${cppsuperstackedlistenerclassname} {
+public:
+    ${subtype_cppstackedlistlistenerclassname}(
+        ${cppsupermainlistenerclassname}* mainListener,
+        ${cppsuperstackedlistenerclassname}* parent);
+    ~${subtype_cppstackedlistlistenerclassname}();
+
+
+    bool Handle(const BJsonEvent& event);
+
+
+    List<${subtype_cppmodelclassname}*, true>* Target(); // list of %s pointers
+
+
+private:
+    List<${subtype_cppmodelclassname}*, true>* fTarget;
+};
+""").substitute(jscom.uniondicts(naming.todict(), subtypenaming.todict())))
+
+        for propname, propmetadata in subschema['properties'].items():
+            if propmetadata['type'] == 'array':
+                writestackedlistenerinterface(istate, propmetadata['items'])
+            elif propmetadata['type'] == 'object':
+                writestackedlistenerinterface(istate, propmetadata)
+
+
+def writebulkcontainerstackedlistenerinterface(istate, schema):
+    naming = istate.naming()
+    subtypenaming = CppParserSubTypeNaming(schema, naming)
+    outfile = istate.outputfile()
+
+# This is a sub-class of the main model object listener.  It will ping out to 
an item listener
+# when parsing is complete.
+
+    outfile.write(
+        string.Template("""
+class ${cppitemlistenerstackedlistenerclassname} : public 
${subtype_cppstackedlistenerclassname} {
+public:
+    ${cppitemlistenerstackedlistenerclassname}(
+        ${cppsupermainlistenerclassname}* mainListener,
+        ${cppsuperstackedlistenerclassname}* parent,
+        ${cppitemlistenerclassname}* itemListener);
+    ~${cppitemlistenerstackedlistenerclassname}();
+
+
+    bool WillPop();
+
+
+private:
+    ${cppitemlistenerclassname}* fItemListener;
+};
+
+
+class ${cppbulkcontainerstackedlistenerclassname} : public 
${cppsuperstackedlistenerclassname} {
+public:
+    ${cppbulkcontainerstackedlistenerclassname}(
+        ${cppsupermainlistenerclassname}* mainListener,
+        ${cppsuperstackedlistenerclassname}* parent,
+        ${cppitemlistenerclassname}* itemListener);
+    ~${cppbulkcontainerstackedlistenerclassname}();
+
+
+    bool Handle(const BJsonEvent& event);
+
+
+private:
+    BString fNextItemName;
+    ${cppitemlistenerclassname}* fItemListener;
+};
+
+
+class ${cppbulkcontaineritemliststackedlistenerclassname} : public 
${cppsuperstackedlistenerclassname} {
+public:
+    ${cppbulkcontaineritemliststackedlistenerclassname}(
+        ${cppsupermainlistenerclassname}* mainListener,
+        ${cppsuperstackedlistenerclassname}* parent,
+        ${cppitemlistenerclassname}* itemListener);
+    ~${cppbulkcontaineritemliststackedlistenerclassname}();
+
+
+    bool Handle(const BJsonEvent& event);
+    bool WillPop();
+
+
+private:
+    ${cppitemlistenerclassname}* fItemListener;
+};
+""").substitute(jscom.uniondicts(naming.todict(), subtypenaming.todict())))
+
+
+def writestackedlistenerfieldimplementation(
+        istate,
+        propname,
+        cppeventdataexpression):
+
+    istate.outputfile().write(
+        string.Template("""
+            if (fNextItemName == "${propname}")
+                fTarget->Set${cpppropname}(${cppeventdataexpression});
+        """).substitute({
+            'propname': propname,
+            'cpppropname': jscom.propnametocppname(propname),
+            'cppeventdataexpression': cppeventdataexpression
+        }))
+
+
+def writenullstackedlistenerfieldimplementation(
+        istate,
+        propname):
+
+    istate.outputfile().write(
+        string.Template("""
+            if (fNextItemName == "${propname}")
+                fTarget->Set${cpppropname}Null();
+        """).substitute({
+            'propname': propname,
+            'cpppropname': jscom.propnametocppname(propname)
+        }))
+
+
+def writestackedlistenerfieldsimplementation(
+        istate,
+        schema,
+        selectedcpptypename,
+        jsoneventtypename,
+        cppeventdataexpression):
+
+    outfile = istate.outputfile()
+
+    outfile.write('        case ' + jsoneventtypename + ':\n')
+
+    for propname, propmetadata in schema['properties'].items():
+        cpptypename = jscom.propmetadatatocpptypename(propmetadata)
+
+        if cpptypename == selectedcpptypename:
+            writestackedlistenerfieldimplementation(istate, propname, 
cppeventdataexpression)
+
+    outfile.write('            fNextItemName.SetTo("");\n')
+    outfile.write('            break;\n')
+
+
+def writestackedlistenertypedobjectimplementation(istate, schema):
+    outfile = istate.outputfile()
+    naming = istate.naming();
+    subtypenaming = CppParserSubTypeNaming(schema, naming)
+
+    outfile.write(
+        string.Template("""
+${subtype_cppstackedlistenerclassname}::${subtype_cppstackedlistenerclassname}(
+    ${cppsupermainlistenerclassname}* mainListener,
+    ${cppsuperstackedlistenerclassname}* parent)
+    :
+    ${cppsuperstackedlistenerclassname}(mainListener, parent)
+{
+    fTarget = new ${subtype_cppmodelclassname}();
+}
+
+
+${subtype_cppstackedlistenerclassname}::~${subtype_cppstackedlistenerclassname}()
+{
+}
+
+
+${subtype_cppmodelclassname}*
+${subtype_cppstackedlistenerclassname}::Target()
+{
+    return fTarget;
+}
+
+
+bool
+${subtype_cppstackedlistenerclassname}::Handle(const BJsonEvent& event)
+{
+    switch (event.EventType()) {
+
+
+        case B_JSON_ARRAY_END:
+            HandleError(B_NOT_ALLOWED, JSON_EVENT_LISTENER_ANY_LINE, "illegal 
state - unexpected start of array");
+            break;
+
+
+        case B_JSON_OBJECT_NAME:
+            fNextItemName = event.Content();
+            break;
+
+
+        case B_JSON_OBJECT_END:
+        {
+            bool status = Pop() && (ErrorStatus() == B_OK);
+            delete this;
+            return status;
+        }
+
+""").substitute(jscom.uniondicts(
+            naming.todict(),
+            subtypenaming.todict())))
+
+    # now extract the fields from the schema that need to be fed in.
+
+    writestackedlistenerfieldsimplementation(
+        istate, schema,
+        jscom.CPP_TYPE_STRING, 'B_JSON_STRING', 'new BString(event.Content())')
+
+    writestackedlistenerfieldsimplementation(
+        istate, schema,
+        jscom.CPP_TYPE_BOOLEAN, 'B_JSON_TRUE', 'true')
+
+    writestackedlistenerfieldsimplementation(
+        istate, schema,
+        jscom.CPP_TYPE_BOOLEAN, 'B_JSON_FALSE', 'false')
+
+    outfile.write('        case B_JSON_NULL:\n')
+    outfile.write('        {\n')
+
+    for propname, propmetadata in schema['properties'].items():
+        # TODO; deal with array case somehow.
+        if 'array' != propmetadata['type']:
+            writenullstackedlistenerfieldimplementation(istate, propname)
+    outfile.write('            fNextItemName.SetTo("");\n')
+    outfile.write('            break;\n')
+    outfile.write('        }\n')
+
+    # number type is a bit complex because it can either be a double or it can 
be an
+    # integral value.
+
+    outfile.write('        case B_JSON_NUMBER:\n')
+    outfile.write('        {\n')
+
+    for propname, propmetadata in schema['properties'].items():
+        propcpptypename = jscom.propmetadatatocpptypename(propmetadata)
+        if propcpptypename == jscom.CPP_TYPE_INTEGER:
+            writestackedlistenerfieldimplementation(istate, propname, 
'event.ContentInteger()')
+        elif propcpptypename == jscom.CPP_TYPE_NUMBER:
+            writestackedlistenerfieldimplementation(istate, propname, 
'event.ContentDouble()')
+    outfile.write('            fNextItemName.SetTo("");\n')
+    outfile.write('            break;\n')
+    outfile.write('        }\n')
+
+    # object type; could be a sub-type or otherwise just drop into a placebo 
consumer to keep the parse
+    # structure working.  This would most likely be additional sub-objects 
that are additional to the
+    # expected schema.
+
+    outfile.write('        case B_JSON_OBJECT_START:\n')
+    outfile.write('        {\n')
+
+    objectifclausekeyword = 'if'
+
+    for propname, propmetadata in schema['properties'].items():
+        if propmetadata['type'] == jscom.JSON_TYPE_OBJECT:
+            subtypenaming = CppParserSubTypeNaming(propmetadata, naming)
+
+            outfile.write('            %s (fNextItemName == "%s") {\n' % 
(objectifclausekeyword, propname))
+            outfile.write('                %s* nextListener = new 
%s(fMainListener, this);\n' % (
+                subtypenaming.cppstackedlistenerclassname(),
+                subtypenaming.cppstackedlistenerclassname()))
+            outfile.write('                
fTarget->Set%s(nextListener->Target());\n' % (
+                subtypenaming.cppmodelclassname()))
+            outfile.write('                Push(nextListener);\n')
+            outfile.write('            }\n')
+
+            objectifclausekeyword = 'else if'
+
+    outfile.write('            %s (1 == 1) {\n' % objectifclausekeyword)
+    outfile.write('                %s* nextListener = new %s(fMainListener, 
this);\n' % (
+        naming.cppgeneralobjectstackedlistenerclassname(),
+        naming.cppgeneralobjectstackedlistenerclassname()))
+    outfile.write('                Push(nextListener);\n')
+    outfile.write('            }\n')
+    outfile.write('            fNextItemName.SetTo("");\n')
+    outfile.write('            break;\n')
+    outfile.write('        }\n')
+
+    # array type; could be an array of objects or otherwise just drop into a 
placebo consumer to keep
+    # the parse structure working.
+
+    outfile.write('        case B_JSON_ARRAY_START:\n')
+    outfile.write('        {\n')
+
+    objectifclausekeyword = 'if'
+
+    for propname, propmetadata in schema['properties'].items():
+        if propmetadata['type'] == jscom.JSON_TYPE_ARRAY:
+            subtypenaming = CppParserSubTypeNaming(propmetadata['items'], 
naming)
+
+            outfile.write('            %s (fNextItemName == "%s") {\n' % 
(objectifclausekeyword, propname))
+            outfile.write('                %s* nextListener = new 
%s(fMainListener, this);\n' % (
+                subtypenaming.cppstackedlistlistenerclassname(),
+                subtypenaming.cppstackedlistlistenerclassname()))
+            outfile.write('                
fTarget->Set%s(nextListener->Target());\n' % (
+                jscom.propnametocppname(propname)))
+            outfile.write('                Push(nextListener);\n')
+            outfile.write('            }\n')
+
+            objectifclausekeyword = 'else if'
+
+    outfile.write('            %s (1 == 1) {\n' % objectifclausekeyword)
+    outfile.write('                %s* nextListener = new %s(fMainListener, 
this);\n' % (
+        naming.cppsuperstackedlistenerclassname(),
+        naming.cppgeneralarraystackedlistenerclassname()))
+    outfile.write('                Push(nextListener);\n')
+    outfile.write('            }\n')
+    outfile.write('            fNextItemName.SetTo("");\n')
+    outfile.write('            break;\n')
+    outfile.write('        }\n')
+
+    outfile.write("""
+    }
+
+
+    return ErrorStatus() == B_OK;
+}
+""")
+
+
+def writestackedlistenertypedobjectlistimplementation(istate, schema):
+    naming = istate.naming()
+    subtypenaming = CppParserSubTypeNaming(schema, naming)
+    outfile = istate.outputfile()
+
+    outfile.write(
+        string.Template("""
+${subtype_cppstackedlistlistenerclassname}::${subtype_cppstackedlistlistenerclassname}(
+    ${cppsupermainlistenerclassname}* mainListener,
+    ${cppsuperstackedlistenerclassname}* parent)
+    :
+    ${cppsuperstackedlistenerclassname}(mainListener, parent)
+{
+    fTarget = new List<${subtype_cppmodelclassname}*, true>();
+}
+
+
+${subtype_cppstackedlistlistenerclassname}::~${subtype_cppstackedlistlistenerclassname}()
+{
+}
+
+
+List<${subtype_cppmodelclassname}*, true>*
+${subtype_cppstackedlistlistenerclassname}::Target()
+{
+    return fTarget;
+}
+
+
+bool
+${subtype_cppstackedlistlistenerclassname}::Handle(const BJsonEvent& event)
+{
+    switch (event.EventType()) {
+
+
+        case B_JSON_ARRAY_END:
+        {
+            bool status = Pop() && (ErrorStatus() == B_OK);
+            delete this;
+            return status;
+        }
+
+
+        case B_JSON_OBJECT_START:
+        {
+            ${subtype_cppstackedlistenerclassname}* nextListener =
+                new ${subtype_cppstackedlistenerclassname}(fMainListener, 
this);
+            fTarget->Add(nextListener->Target());
+            Push(nextListener);
+            break;
+        }
+
+
+        default:
+            HandleError(B_NOT_ALLOWED, JSON_EVENT_LISTENER_ANY_LINE,
+                "illegal state - unexpected json event parsing an array of 
${subtype_cppmodelclassname}");
+            break;
+    }
+
+
+    return ErrorStatus() == B_OK;
+}
+""").substitute(jscom.uniondicts(naming.todict(), subtypenaming.todict())))
+
+
+def writebulkcontainerstackedlistenerimplementation(istate, schema):
+    naming = istate.naming()
+    subtypenaming = CppParserSubTypeNaming(schema, naming)
+    outfile = istate.outputfile()
+
+    outfile.write(
+        string.Template("""
+${cppitemlistenerstackedlistenerclassname}::${cppitemlistenerstackedlistenerclassname}(
+    ${cppsupermainlistenerclassname}* mainListener, 
${cppsuperstackedlistenerclassname}* parent,
+    ${cppitemlistenerclassname}* itemListener)
+:
+${subtype_cppstackedlistenerclassname}(mainListener, parent)
+{
+    fItemListener = itemListener;
+}
+
+
+${cppitemlistenerstackedlistenerclassname}::~${cppitemlistenerstackedlistenerclassname}()
+{
+}
+
+
+bool
+${cppitemlistenerstackedlistenerclassname}::WillPop()
+{
+    bool result = fItemListener->Handle(fTarget);
+    delete fTarget;
+    fTarget = NULL;
+    return result;
+}
+
+
+${cppbulkcontainerstackedlistenerclassname}::${cppbulkcontainerstackedlistenerclassname}(
+    ${cppsupermainlistenerclassname}* mainListener, 
${cppsuperstackedlistenerclassname}* parent,
+    ${cppitemlistenerclassname}* itemListener)
+:
+${cppsuperstackedlistenerclassname}(mainListener, parent)
+{
+    fItemListener = itemListener;
+}
+
+
+${cppbulkcontainerstackedlistenerclassname}::~${cppbulkcontainerstackedlistenerclassname}()
+{
+}
+
+
+bool
+${cppbulkcontainerstackedlistenerclassname}::Handle(const BJsonEvent& event)
+{
+    switch (event.EventType()) {
+
+
+        case B_JSON_ARRAY_END:
+            HandleError(B_NOT_ALLOWED, JSON_EVENT_LISTENER_ANY_LINE, "illegal 
state - unexpected start of array");
+            break;
+
+
+        case B_JSON_OBJECT_NAME:
+            fNextItemName = event.Content();
+            break;
+
+
+        case B_JSON_OBJECT_START:
+            Push(new 
${cppgeneralobjectstackedlistenerclassname}(fMainListener, this));
+            break;
+
+
+        case B_JSON_ARRAY_START:
+            if (fNextItemName == "items")
+                Push(new 
${cppbulkcontaineritemliststackedlistenerclassname}(fMainListener, this, 
fItemListener));
+            else
+                Push(new 
${cppgeneralarraystackedlistenerclassname}(fMainListener, this));
+            break;
+
+
+        case B_JSON_OBJECT_END:
+        {
+            bool status = Pop() && (ErrorStatus() == B_OK);
+            delete this;
+            return status;
+        }
+
+
+        default:
+                // ignore
+            break;
+    }
+
+
+    return ErrorStatus() == B_OK;
+}
+
+
+${cppbulkcontaineritemliststackedlistenerclassname}::${cppbulkcontaineritemliststackedlistenerclassname}(
+    ${cppsupermainlistenerclassname}* mainListener, 
${cppsuperstackedlistenerclassname}* parent,
+    ${cppitemlistenerclassname}* itemListener)
+:
+${cppsuperstackedlistenerclassname}(mainListener, parent)
+{
+    fItemListener = itemListener;
+}
+
+
+${cppbulkcontaineritemliststackedlistenerclassname}::~${cppbulkcontaineritemliststackedlistenerclassname}()
+{
+}
+
+
+bool
+${cppbulkcontaineritemliststackedlistenerclassname}::Handle(const BJsonEvent& 
event)
+{
+    switch (event.EventType()) {
+
+
+        case B_JSON_OBJECT_START:
+            Push(new ${cppitemlistenerstackedlistenerclassname}(fMainListener, 
this, fItemListener));
+            break;
+
+
+        case B_JSON_ARRAY_END:
+        {
+            bool status = Pop() && (ErrorStatus() == B_OK);
+            delete this;
+            return status;
+        }
+
+
+        default:
+            HandleError(B_NOT_ALLOWED, JSON_EVENT_LISTENER_ANY_LINE, "illegal 
state - unexpected json event");
+            break;
+    }
+
+
+    return ErrorStatus() == B_OK;
+}
+
+
+bool
+${cppbulkcontaineritemliststackedlistenerclassname}::WillPop()
+{
+    fItemListener->Complete();
+    return true;
+}
+
+
+""").substitute(jscom.uniondicts(naming.todict(), subtypenaming.todict())))
+
+
+def writestackedlistenerimplementation(istate, schema):
+    subtypenaming = CppParserSubTypeNaming(schema, istate.naming())
+
+    if not 
istate.isimplementationhandledcppname(subtypenaming.cppmodelclassname()):
+        
istate.addimplementationhandledcppname(subtypenaming.cppmodelclassname())
+
+        writestackedlistenertypedobjectimplementation(istate, schema)
+        writestackedlistenertypedobjectlistimplementation(istate, schema)  # 
TODO; only if necessary.
+
+        # now create the parser types for any subordinate objects descending.
+
+        for propname, propmetadata in schema['properties'].items():
+            if propmetadata['type'] == 'array':
+                writestackedlistenerimplementation(istate, 
propmetadata['items'])
+            elif propmetadata['type'] == 'object':
+                writestackedlistenerimplementation(istate, propmetadata)
+
+
+def writemainlistenerimplementation(istate, schema, supportbulkcontainer):
+    outfile = istate.outputfile()
+    naming = istate.naming()
+    subtypenaming = CppParserSubTypeNaming(schema, istate.naming())
+
+# super (abstract) listener
+
+    outfile.write(
+        string.Template("""
+${cppsupermainlistenerclassname}::${cppsupermainlistenerclassname}()
+{
+    fStackedListener = NULL;
+    fErrorStatus = B_OK;
+}
+
+
+${cppsupermainlistenerclassname}::~${cppsupermainlistenerclassname}()
+{
+}
+
+
+void
+${cppsupermainlistenerclassname}::HandleError(status_t status, int32 line, 
const char* message)
+{
+    if (message != NULL) {
+        fprintf(stderr, "an error has arisen processing json for 
'${cpprootmodelclassname}'; %s\\n", message);
+    } else {
+        fprintf(stderr, "an error has arisen processing json for 
'${cpprootmodelclassname}'\\n");
+    }
+    fErrorStatus = status;
+}
+
+
+void
+${cppsupermainlistenerclassname}::Complete()
+{
+}
+
+
+status_t
+${cppsupermainlistenerclassname}::ErrorStatus()
+{
+    return fErrorStatus;
+}
+
+void
+${cppsupermainlistenerclassname}::SetStackedListener(
+    ${cppsuperstackedlistenerclassname}* stackedListener)
+{
+    fStackedListener = stackedListener;
+}
+
+""").substitute(naming.todict()))
+
+# single parser
+
+    outfile.write(
+        string.Template("""
+${cppsinglemainlistenerclassname}::${cppsinglemainlistenerclassname}()
+:
+${cppsupermainlistenerclassname}()
+{
+    fTarget = NULL;
+}
+
+
+${cppsinglemainlistenerclassname}::~${cppsinglemainlistenerclassname}()
+{
+}
+
+
+bool
+${cppsinglemainlistenerclassname}::Handle(const BJsonEvent& event)
+{
+    if (fErrorStatus != B_OK)
+       return false;
+
+
+    if (fStackedListener != NULL)
+        return fStackedListener->Handle(event);
+
+
+    switch (event.EventType()) {
+
+
+        case B_JSON_OBJECT_START:
+        {
+            ${subtype_cppstackedlistenerclassname}* nextListener = new 
${subtype_cppstackedlistenerclassname}(
+                this, NULL);
+            fTarget = nextListener->Target();
+            SetStackedListener(nextListener);
+            break;
+        }
+
+
+        default:
+            HandleError(B_NOT_ALLOWED, JSON_EVENT_LISTENER_ANY_LINE,
+                "illegal state - unexpected json event parsing top level for 
${cpprootmodelclassname}");
+            break;
+    }
+
+
+    return ErrorStatus() == B_OK;
+}
+
+
+${cpprootmodelclassname}*
+${cppsinglemainlistenerclassname}::Target()
+{
+    return fTarget;
+}
+
+""").substitute(jscom.uniondicts(naming.todict(), subtypenaming.todict())))
+
+    if supportbulkcontainer:
+
+        # create a main listener that can work through the list of top level 
model objects and ping the listener
+
+        outfile.write(
+            string.Template("""
+${cppbulkcontainermainlistenerclassname}::${cppbulkcontainermainlistenerclassname}(
+    ${cppitemlistenerclassname}* itemListener) : 
${cppsupermainlistenerclassname}()
+{
+    fItemListener = itemListener;
+}
+
+
+${cppbulkcontainermainlistenerclassname}::~${cppbulkcontainermainlistenerclassname}()
+{
+}
+
+
+bool
+${cppbulkcontainermainlistenerclassname}::Handle(const BJsonEvent& event)
+{
+    if (fErrorStatus != B_OK)
+       return false;
+
+
+    if (fStackedListener != NULL)
+        return fStackedListener->Handle(event);
+
+
+    switch (event.EventType()) {
+
+
+        case B_JSON_OBJECT_START:
+        {
+            ${cppbulkcontainerstackedlistenerclassname}* nextListener =
+                new ${cppbulkcontainerstackedlistenerclassname}(
+                    this, NULL, fItemListener);
+            SetStackedListener(nextListener);
+            return true;
+            break;
+        }
+
+
+        default:
+            HandleError(B_NOT_ALLOWED, JSON_EVENT_LISTENER_ANY_LINE,
+                "illegal state - unexpected json event parsing top level for 
${cppbulkcontainermainlistenerclassname}");
+            break;
+    }
+
+
+    return ErrorStatus() == B_OK;
+}
+
+""").substitute(jscom.uniondicts(naming.todict(), subtypenaming.todict())))
+
+
+def schematocppparser(inputfile, schema, outputdirectory, 
supportbulkcontainer):
+    naming = CppParserNaming(schema)
+    cppheaderleafname = naming.cpprootmodelclassname() + 'JsonListener.h'
+    cppheaderfilename = os.path.join(outputdirectory, cppheaderleafname)
+    cppimplementationfilename = os.path.join(outputdirectory, 
naming.cpprootmodelclassname() + 'JsonListener.cpp')
+
+    with open(cppheaderfilename, 'w') as cpphfile:
+        jscom.writetopcomment(cpphfile, os.path.split(inputfile)[1], 
'Listener')
+        guarddefname = 'GEN_JSON_SCHEMA_PARSER__%s_H' % 
(naming.cppsinglemainlistenerclassname().upper())
+
+        cpphfile.write(
+            string.Template("""
+#ifndef ${guarddefname}
+#define ${guarddefname}
+""").substitute({'guarddefname': guarddefname}))
+
+        cpphfile.write(
+            string.Template("""
+#include <JsonEventListener.h>
+
+#include "${cpprootmodelclassname}.h"
+
+class ${cppsuperstackedlistenerclassname};
+
+class ${cppsupermainlistenerclassname} : public BJsonEventListener {
+friend class ${cppsuperstackedlistenerclassname};
+public:
+    ${cppsupermainlistenerclassname}();
+    virtual ~${cppsupermainlistenerclassname}();
+
+
+    void HandleError(status_t status, int32 line, const char* message);
+    void Complete();
+    status_t ErrorStatus();
+
+
+protected:
+    void SetStackedListener(
+        ${cppsuperstackedlistenerclassname}* listener);
+    status_t fErrorStatus;
+    ${cppsuperstackedlistenerclassname}* fStackedListener;
+};
+
+
+/*! Use this listener when you want to parse some JSON data that contains
+    just a single instance of ${cpprootmodelclassname}.
+*/
+class ${cppsinglemainlistenerclassname}
+    : public ${cppsupermainlistenerclassname} {
+friend class ${cppsuperstackedlistenerclassname};
+public:
+    ${cppsinglemainlistenerclassname}();
+    virtual ~${cppsinglemainlistenerclassname}();
+
+
+    bool Handle(const BJsonEvent& event);
+    ${cpprootmodelclassname}* Target();
+
+
+private:
+    ${cpprootmodelclassname}* fTarget;
+};
+
+""").substitute(naming.todict()))
+
+# class interface for concrete class of single listener
+
+
+        # If bulk enveloping is selected then also output a listener and an 
interface
+        # which can deal with call-backs.
+
+        if supportbulkcontainer:
+            cpphfile.write(
+                string.Template("""    
+/*! Concrete sub-classes of this class are able to respond to each
+    ${cpprootmodelclassname}* instance as
+    it is parsed from the bulk container.  When the stream is
+    finished, the Complete() method is invoked.
+
+
+    Note that the item object will be deleted after the Handle method
+    is invoked.  The Handle method need not take responsibility
+    for deleting the item itself.
+*/         
+class ${cppitemlistenerclassname} {
+public:
+    virtual bool Handle(${cpprootmodelclassname}* item) = 0;
+    virtual void Complete() = 0;
+};
+""").substitute(naming.todict()))
+
+            cpphfile.write(
+                string.Template("""
+
+
+/*! Use this listener, together with an instance of a concrete
+    subclass of ${cppitemlistenerclassname}
+    in order to parse the JSON data in a specific "bulk
+    container" format.  Each time that an instance of
+    ${cpprootmodelclassname}
+    is parsed, the instance item listener will be invoked.
+*/ 
+class ${cppbulkcontainermainlistenerclassname}
+    : public ${cppsupermainlistenerclassname} {
+friend class ${cppsuperstackedlistenerclassname};
+public:
+    ${cppbulkcontainermainlistenerclassname}(
+        ${cppitemlistenerclassname}* itemListener);
+    ~${cppbulkcontainermainlistenerclassname}();
+
+
+    bool Handle(const BJsonEvent& event);
+
+
+private:
+    ${cppitemlistenerclassname}* fItemListener;
+};
+""").substitute(naming.todict()))
+
+        cpphfile.write('\n#endif // %s' % guarddefname)
+
+    with open(cppimplementationfilename, 'w') as cppifile:
+        istate = CppParserImplementationState(cppifile, naming)
+        jscom.writetopcomment(cppifile, os.path.split(inputfile)[1], 
'Listener')
+        cppifile.write('#include "%s"\n' % cppheaderleafname)
+        cppifile.write('#include "List.h"\n\n')
+        cppifile.write('#include <stdio.h>\n\n')
+
+        cppifile.write('// #pragma mark - private interfaces for the stacked 
listeners\n\n')
+
+        writerootstackedlistenerinterface(istate)
+        writegeneralstackedlistenerinterface(istate)
+        writestackedlistenerinterface(istate, schema)
+
+        if supportbulkcontainer:
+            writebulkcontainerstackedlistenerinterface(istate, schema)
+
+        cppifile.write('// #pragma mark - implementations for the stacked 
listeners\n\n')
+
+        writerootstackedlistenerimplementation(istate)
+        writegeneralstackedlistenerimplementation(istate)
+        writestackedlistenerimplementation(istate, schema)
+
+        if supportbulkcontainer:
+            writebulkcontainerstackedlistenerimplementation(istate, schema)
+
+        cppifile.write('// #pragma mark - implementations for the main 
listeners\n\n')
+
+        writemainlistenerimplementation(istate, schema, supportbulkcontainer)
+
+
+def main():
+    parser = argparse.ArgumentParser(description='Convert JSON schema to Haiku 
C++ Parsers')
+    parser.add_argument('-i', '--inputfile', required=True, help='The input 
filename containing the JSON schema')
+    parser.add_argument('--outputdirectory', help='The output directory where 
the C++ files should be written')
+    parser.add_argument('--supportbulkcontainer', help='Produce a parser that 
deals with a bulk envelope of items',
+                        action='store_true')
+
+    args = parser.parse_args()
+
+    outputdirectory = args.outputdirectory
+
+    if not outputdirectory:
+        outputdirectory = '.'
+
+    with open(args.inputfile) as inputfile:
+        schema = json.load(inputfile)
+        schematocppparser(args.inputfile, schema, outputdirectory, 
args.supportbulkcontainer or False)
+
+if __name__ == "__main__":
+    main()
\ No newline at end of file
diff --git a/src/apps/haikudepot/server/ServerPkgDataUpdateProcess.cpp 
b/src/apps/haikudepot/server/ServerPkgDataUpdateProcess.cpp
index b87f7450f9..596a2a2a5c 100644
--- a/src/apps/haikudepot/server/ServerPkgDataUpdateProcess.cpp
+++ b/src/apps/haikudepot/server/ServerPkgDataUpdateProcess.cpp
@@ -1,5 +1,5 @@
 /*
- * Copyright 2017-2018, Andrew Lindesay <apl@xxxxxxxxxxxxxx>.
+ * Copyright 2017-2019, Andrew Lindesay <apl@xxxxxxxxxxxxxx>.
  * All rights reserved. Distributed under the terms of the MIT License.
  */
 
@@ -162,9 +162,6 @@ PackageFillingPkgListener::ConsumePackage(const 
PackageInfoRef& package,
        if (!pkg->ProminenceOrderingIsNull())
                package->SetProminence(pkg->ProminenceOrdering());
 
-       if (!pkg->PkgChangelogContentIsNull())
-               package->SetChangelog(*(pkg->PkgChangelogContent()));
-
        int32 countPkgScreenshots = pkg->CountPkgScreenshots();
 
        for (i = 0; i < countPkgScreenshots; i++) {
diff --git a/src/apps/haikudepot/server/dumpexportpkg/DumpExportPkg.cpp 
b/src/apps/haikudepot/server/dumpexportpkg/DumpExportPkg.cpp
deleted file mode 100644
index 68108afb1d..0000000000
--- a/src/apps/haikudepot/server/dumpexportpkg/DumpExportPkg.cpp
+++ /dev/null
@@ -1,345 +0,0 @@
-/*
- * Generated Model Object
- * source json-schema : dumpexport.json
- * generated at : 2017-12-07T23:22:17.116794
- */
-#include "DumpExportPkg.h"
-
-
-DumpExportPkg::DumpExportPkg()
-{
-    fPkgChangelogContent = NULL;
-    fName = NULL;
-    fPkgVersions = NULL;
-    fDerivedRating = NULL;
-    fPkgScreenshots = NULL;
-    fProminenceOrdering = NULL;
-    fPkgCategories = NULL;
-    fModifyTimestamp = NULL;
-}
-
-
-DumpExportPkg::~DumpExportPkg()
-{
-    int32 i;
-
-    if (fPkgChangelogContent != NULL) {
-        delete fPkgChangelogContent;
-    }
-
-    if (fName != NULL) {
-        delete fName;
-    }
-
-    if (fPkgVersions != NULL) {
-        int32 count = fPkgVersions->CountItems(); 
-        for (i = 0; i < count; i++)
-            delete fPkgVersions->ItemAt(i);
-        delete fPkgVersions;
-    }
-
-    if (fDerivedRating != NULL) {
-        delete fDerivedRating;
-    }
-
-    if (fPkgScreenshots != NULL) {
-        int32 count = fPkgScreenshots->CountItems(); 
-        for (i = 0; i < count; i++)
-            delete fPkgScreenshots->ItemAt(i);
-        delete fPkgScreenshots;
-    }
-
-    if (fProminenceOrdering != NULL) {
-        delete fProminenceOrdering;
-    }
-
-    if (fPkgCategories != NULL) {
-        int32 count = fPkgCategories->CountItems(); 
-        for (i = 0; i < count; i++)
-            delete fPkgCategories->ItemAt(i);
-        delete fPkgCategories;
-    }
-
-    if (fModifyTimestamp != NULL) {
-        delete fModifyTimestamp;
-    }
-
-}
-
-BString*
-DumpExportPkg::PkgChangelogContent()
-{
-    return fPkgChangelogContent;
-}
-
-
-void
-DumpExportPkg::SetPkgChangelogContent(BString* value)
-{
-    fPkgChangelogContent = value;
-}
-
-
-void
-DumpExportPkg::SetPkgChangelogContentNull()
-{
-    if (!PkgChangelogContentIsNull()) {
-        delete fPkgChangelogContent;
-        fPkgChangelogContent = NULL;
-    }
-}
-
-
-bool
-DumpExportPkg::PkgChangelogContentIsNull()
-{
-    return fPkgChangelogContent == NULL;
-}
-
-
-BString*
-DumpExportPkg::Name()
-{
-    return fName;
-}
-
-
-void
-DumpExportPkg::SetName(BString* value)
-{
-    fName = value;
-}
-
-
-void
-DumpExportPkg::SetNameNull()
-{
-    if (!NameIsNull()) {
-        delete fName;
-        fName = NULL;
-    }
-}
-
-
-bool
-DumpExportPkg::NameIsNull()
-{
-    return fName == NULL;
-}
-
-
-void
-DumpExportPkg::AddToPkgVersions(DumpExportPkgVersion* value)
-{
-    if (fPkgVersions == NULL)
-        fPkgVersions = new List<DumpExportPkgVersion*, true>();
-    fPkgVersions->Add(value);
-}
-
-
-void
-DumpExportPkg::SetPkgVersions(List<DumpExportPkgVersion*, true>* value)
-{
-   fPkgVersions = value; 
-}
-
-
-int32
-DumpExportPkg::CountPkgVersions()
-{
-    if (fPkgVersions == NULL)
-        return 0;
-    return fPkgVersions->CountItems();
-}
-
-
-DumpExportPkgVersion*
-DumpExportPkg::PkgVersionsItemAt(int32 index)
-{
-    return fPkgVersions->ItemAt(index);
-}
-
-
-bool
-DumpExportPkg::PkgVersionsIsNull()
-{
-    return fPkgVersions == NULL;
-}
-
-
-double
-DumpExportPkg::DerivedRating()
-{
-    return *fDerivedRating;
-}
-
-
-void
-DumpExportPkg::SetDerivedRating(double value)
-{
-    if (DerivedRatingIsNull())
-        fDerivedRating = new double[1];
-    fDerivedRating[0] = value;
-}
-
-
-void
-DumpExportPkg::SetDerivedRatingNull()
-{
-    if (!DerivedRatingIsNull()) {
-        delete fDerivedRating;
-        fDerivedRating = NULL;
-    }
-}
-
-
-bool
-DumpExportPkg::DerivedRatingIsNull()
-{
-    return fDerivedRating == NULL;
-}
-
-
-void
-DumpExportPkg::AddToPkgScreenshots(DumpExportPkgScreenshot* value)
-{
-    if (fPkgScreenshots == NULL)
-        fPkgScreenshots = new List<DumpExportPkgScreenshot*, true>();
-    fPkgScreenshots->Add(value);
-}
-
-
-void
-DumpExportPkg::SetPkgScreenshots(List<DumpExportPkgScreenshot*, true>* value)
-{
-   fPkgScreenshots = value; 
-}
-
-
-int32
-DumpExportPkg::CountPkgScreenshots()
-{
-    if (fPkgScreenshots == NULL)
-        return 0;
-    return fPkgScreenshots->CountItems();
-}
-
-
-DumpExportPkgScreenshot*
-DumpExportPkg::PkgScreenshotsItemAt(int32 index)
-{
-    return fPkgScreenshots->ItemAt(index);
-}
-
-
-bool
-DumpExportPkg::PkgScreenshotsIsNull()
-{
-    return fPkgScreenshots == NULL;
-}
-
-
-int64
-DumpExportPkg::ProminenceOrdering()
-{
-    return *fProminenceOrdering;
-}
-
-
-void
-DumpExportPkg::SetProminenceOrdering(int64 value)
-{
-    if (ProminenceOrderingIsNull())
-        fProminenceOrdering = new int64[1];
-    fProminenceOrdering[0] = value;
-}
-
-
-void
-DumpExportPkg::SetProminenceOrderingNull()
-{
-    if (!ProminenceOrderingIsNull()) {
-        delete fProminenceOrdering;
-        fProminenceOrdering = NULL;
-    }
-}
-
-
-bool
-DumpExportPkg::ProminenceOrderingIsNull()
-{
-    return fProminenceOrdering == NULL;
-}
-
-
-void
-DumpExportPkg::AddToPkgCategories(DumpExportPkgCategory* value)
-{
-    if (fPkgCategories == NULL)
-        fPkgCategories = new List<DumpExportPkgCategory*, true>();
-    fPkgCategories->Add(value);
-}
-
-
-void
-DumpExportPkg::SetPkgCategories(List<DumpExportPkgCategory*, true>* value)
-{
-   fPkgCategories = value; 
-}
-
-
-int32
-DumpExportPkg::CountPkgCategories()
-{
-    if (fPkgCategories == NULL)
-        return 0;
-    return fPkgCategories->CountItems();
-}
-
-
-DumpExportPkgCategory*
-DumpExportPkg::PkgCategoriesItemAt(int32 index)
-{
-    return fPkgCategories->ItemAt(index);
-}
-
-
-bool
-DumpExportPkg::PkgCategoriesIsNull()
-{
-    return fPkgCategories == NULL;
-}
-
-
-int64
-DumpExportPkg::ModifyTimestamp()
-{
-    return *fModifyTimestamp;
-}
-
-
-void
-DumpExportPkg::SetModifyTimestamp(int64 value)
-{
-    if (ModifyTimestampIsNull())
-        fModifyTimestamp = new int64[1];
-    fModifyTimestamp[0] = value;
-}
-
-
-void
-DumpExportPkg::SetModifyTimestampNull()
-{
-    if (!ModifyTimestampIsNull()) {
-        delete fModifyTimestamp;
-        fModifyTimestamp = NULL;
-    }
-}
-
-
-bool
-DumpExportPkg::ModifyTimestampIsNull()
-{
-    return fModifyTimestamp == NULL;
-}
-

[ *** diff truncated: 4020 lines dropped *** ]



Other related posts:

  • » [haiku-commits] haiku: hrev52793 - in src/apps/haikudepot: server/dumpexportpkg build/scripts server/dumpexportrepository . - Andrew Lindesay