Skip to content

Commit

Permalink
Merge pull request #34 from bigladder/add-core-to-cpp
Browse files Browse the repository at this point in the history
Add core to cpp
  • Loading branch information
nealkruis authored Nov 14, 2023
2 parents 6e251ce + 49b28c2 commit c33c422
Show file tree
Hide file tree
Showing 8 changed files with 145 additions and 63 deletions.
40 changes: 20 additions & 20 deletions dodo.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,6 +101,26 @@ def task_generate_markdown():
'clean': True
}

def task_generate_cpp_code():
'''Generate CPP headers and source for example schema.'''
for example in examples:
name = os.path.basename(example.root_directory)
yield {
'name': name,
'task_dep': [f"validate_schemas:{name}"],
'file_dep': [schema.path for schema in example.cpp_schemas] +
[schema.meta_schema_path for schema in example.schemas] +
[CORE_SCHEMA_PATH,
BASE_META_SCHEMA_PATH,
os.path.join(SOURCE_PATH, "header_entries.py")],
'targets': [schema.cpp_header_path for schema in example.cpp_schemas] +
[schema.cpp_source_path for schema in example.cpp_schemas],
'actions': [
(example.generate_cpp_headers, [])
],
'clean': True
}

def task_generate_web_docs():
'''Generate markdown documentation from templates'''
for example in examples:
Expand All @@ -121,23 +141,3 @@ def task_test():
return {
'actions': ['pytest -v test']
}

def task_generate_cpp_code():
'''Generate CPP headers and source for example schema.'''
for example in examples:
name = os.path.basename(example.root_directory)
yield {
'name': name,
'task_dep': [f"validate_schemas:{name}"],
'file_dep': [schema.path for schema in example.schemas] +
[schema.meta_schema_path for schema in example.schemas] +
[CORE_SCHEMA_PATH,
BASE_META_SCHEMA_PATH,
os.path.join(SOURCE_PATH, "header_entries.py")],
'targets': [schema.cpp_header_path for schema in example.schemas] +
[schema.cpp_source_path for schema in example.schemas],
'actions': [
(example.generate_cpp_headers, [])
],
'clean': True
}
16 changes: 16 additions & 0 deletions lattice/cpp/generate_support_headers.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
from jinja2 import Template
import os
import sys
from lattice.file_io import dump
from lattice.util import snake_style
from pathlib import Path

# file_loader = FileSystemLoader(os.path.join(os.path.dirname(__file__), 'generation_templates'))
# env = Environment(loader=file_loader)

def generate_support_headers(namespace_name: str, output_directory: Path):
for template in Path(__file__).with_name("templates").iterdir():
enum_info = Template(template.read_text())
generated_file_name = "-".join(snake_style(template.stem).split("_"))
dump(enum_info.render(namespace=namespace_name), Path(output_directory) / generated_file_name)

16 changes: 16 additions & 0 deletions lattice/cpp/templates/enum-info.h.j2
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
#ifndef ENUM_INFO_H_
#define ENUM_INFO_H_

#include <string_view>

namespace {{namespace}} {

struct enum_info
{
std::string_view enumerant_name;
std::string_view display_text;
std::string_view description;
};
}

#endif
33 changes: 33 additions & 0 deletions lattice/cpp/templates/load-object.h.j2
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
#ifndef LOAD_OBJECT_H_
#define LOAD_OBJECT_H_

#include <nlohmann/json.hpp>
#include <courierr/courierr.h>

namespace {{namespace}} {

template<class T>
void json_get(nlohmann::json j,
Courierr::Courierr& logger,
const char *subnode,
T& object,
bool& object_is_set,
bool required = false)
{
try
{
object = j.at(subnode).get<T>();
object_is_set = true;
}
catch (nlohmann::json::out_of_range & ex)
{
object_is_set = false;
if (required)
{
logger.warning(ex.what());
}
}
}
}

#endif
2 changes: 1 addition & 1 deletion lattice/cpp_entries.py
Original file line number Diff line number Diff line change
Expand Up @@ -394,4 +394,4 @@ def _get_items_to_serialize(self, header_tree):
# .............................................................................................
def _add_included_headers(self, main_header):
self._preamble.clear()
self._preamble.append(f'#include <{snake_style(main_header)}.h>\n#include <loadobject_205.h>\n')
self._preamble.append(f'#include <{snake_style(main_header)}.h>\n#include <load-object.h>\n')
82 changes: 45 additions & 37 deletions lattice/header_entries.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
import os
import re
from .file_io import load
from .util import snake_style
from .file_io import load, get_base_stem
from .util import snake_style, hyphen_separated_lowercase_style
from typing import Optional
import pathlib

def remove_prefix(text, prefix):
return text[len(prefix):] if text.startswith(prefix) else text
Expand Down Expand Up @@ -460,7 +462,7 @@ def __init__(self):
self._doxynotes = '/// @note This class has been auto-generated. Local changes will not be saved!\n'
self._epilogue = list()
self._data_group_types = ['Data Group']

self._forward_declaration_dir: Optional[pathlib.Path] = None

# .............................................................................................
def __str__(self):
Expand Down Expand Up @@ -500,15 +502,16 @@ def modified_insertion_sort(obj_list):
return swapped

# .............................................................................................
def translate(self, input_file_path, container_class_name, schema_base_class_name=None):
def translate(self, input_file_path, top_namespace: str, forward_declarations_path: pathlib.Path):
'''X'''
self._source_dir = os.path.dirname(os.path.abspath(input_file_path))
self._forward_declaration_dir = forward_declarations_path
self._schema_name = os.path.splitext(os.path.splitext(os.path.basename(input_file_path))[0])[0]
self._references.clear()
self._fundamental_data_types.clear()
self._preamble.clear()
self._epilogue.clear()

self._contents = load(input_file_path)

# Load meta info first (assuming that base level tag == Schema means object type == Meta)
Expand All @@ -517,7 +520,7 @@ def translate(self, input_file_path, container_class_name, schema_base_class_nam
self._add_included_headers(self._contents['Schema'].get('References'))

# Create "root" node(s)
self._top_namespace = HeaderEntry(f'{container_class_name}')
self._top_namespace = HeaderEntry(top_namespace)
self._namespace = HeaderEntry(f'{snake_style(self._schema_name)}_ns', parent=self._top_namespace)

# First, assemble typedefs
Expand Down Expand Up @@ -597,7 +600,7 @@ def translate(self, input_file_path, container_class_name, schema_base_class_nam
'Name')
HeaderTranslator.modified_insertion_sort(self._namespace.child_entries)
# PerformanceMapBase object needs sibling grid/lookup vars to be created, so parse last
self._add_performance_overloads()
#self._add_performance_overloads()

# Final passes through dictionary in order to add elements related to serialization
for base_level_tag in (
Expand All @@ -611,6 +614,36 @@ def translate(self, input_file_path, container_class_name, schema_base_class_nam
# objects might be included and used from elsewhere.
ObjectSerializationDeclaration(base_level_tag, self._namespace)

# .............................................................................................
def _load_meta_info(self, schema_section):
'''Store the global/common types and the types defined by any named references.'''
self._root_data_group = schema_section.get('Root Data Group')
refs = {f'{self._schema_name}' : os.path.join(self._source_dir, f'{self._schema_name}.schema.yaml'),
'core' : os.path.join(os.path.dirname(__file__),'core.schema.yaml')}
if 'References' in schema_section:
for ref in schema_section['References']:
refs.update({f'{ref}' : os.path.join(self._source_dir, ref + '.schema.yaml')})
if (self._schema_name == "core"
and self._forward_declaration_dir
and self._forward_declaration_dir.is_dir()):
for file in self._forward_declaration_dir.iterdir():
ref = get_base_stem(file)
refs.update({ref : file})

for ref_file in refs:
ext_dict = load(refs[ref_file])
self._data_group_types.extend([name for name in ext_dict if ext_dict[name]['Object Type'] == 'Data Group Template'])
self._references[ref_file] = [name for name in ext_dict if ext_dict[name]['Object Type'] in self._data_group_types + ['Enumeration']]

cpp_types = {'integer' : 'int',
'string' : 'std::string',
'number' : 'double',
'boolean': 'bool'}
for base_item in [name for name in ext_dict if ext_dict[name]['Object Type'] == 'Data Type']:
self._fundamental_data_types[base_item] = cpp_types.get(ext_dict[base_item]['JSON Schema Type'])
for base_item in [name for name in ext_dict if ext_dict[name]['Object Type'] == 'String Type']:
self._fundamental_data_types[base_item] = 'std::string'

# .............................................................................................
def _add_include_guard(self, header_name):
s1 = f'#ifndef {header_name.upper()}_H_'
Expand All @@ -624,55 +657,30 @@ def _add_included_headers(self, ref_list):
if ref_list:
includes = ''
for r in ref_list:
includes += f'#include <{snake_style(r)}.h>'
includes += f'#include <{hyphen_separated_lowercase_style(r)}.h>'
includes += '\n'
self._preamble.append(includes)
self._preamble.append('#include <string>\n#include <vector>\n#include <nlohmann/json.hpp>\n#include <typeinfo_205.h>\n')
self._preamble.append('#include <string>\n#include <vector>\n#include <nlohmann/json.hpp>\n#include <enum-info.h>\n')

# .............................................................................................
def _add_member_headers(self, data_element):
if 'unique_ptr' in data_element.type:
m = re.search(r'\<(.*)\>', data_element.type)
if m:
include = f'#include <{snake_style(m.group(1))}.h>\n'
include = f'#include <{hyphen_separated_lowercase_style(m.group(1))}.h>\n'
if include not in self._preamble:
self._preamble.append(include)
if data_element.superclass:
include = f'#include <{snake_style(data_element.superclass)}.h>\n'
include = f'#include <{hyphen_separated_lowercase_style(data_element.superclass)}.h>\n'
if include not in self._preamble:
self._preamble.append(include)

# .............................................................................................
def _load_meta_info(self, schema_section):
'''Store the global/common types and the types defined by any named references.'''
self._root_data_group = schema_section.get('Root Data Group')
refs = {f'{self._schema_name}' : os.path.join(self._source_dir, f'{self._schema_name}.schema.yaml'),
'core' : os.path.join(os.path.dirname(__file__),'core.schema.yaml')}
if 'References' in schema_section:
for ref in schema_section['References']:
refs.update({f'{ref}' : os.path.join(self._source_dir, ref + '.schema.yaml')})
# refs.insert(0,self._schema_name) # prepend the current file to references list so that
# # objects are found locally first
for ref_file in refs:
ext_dict = load(refs[ref_file])
self._data_group_types.extend([name for name in ext_dict if ext_dict[name]['Object Type'] == 'Data Group Template'])
self._references[ref_file] = [name for name in ext_dict if ext_dict[name]['Object Type'] in self._data_group_types + ['Enumeration']]
cpp_types = {'integer' : 'int',
'string' : 'std::string',
'number' : 'double',
'boolean': 'bool'}
for base_item in [name for name in ext_dict if ext_dict[name]['Object Type'] == 'Data Type']:
self._fundamental_data_types[base_item] = cpp_types.get(ext_dict[base_item]['JSON Schema Type'])
for base_item in [name for name in ext_dict if ext_dict[name]['Object Type'] == 'String Type']:
self._fundamental_data_types[base_item] = 'std::string'
#print(self._fundamental_data_types)

# .............................................................................................
def _add_function_overrides(self, parent_node, base_class_name):
'''Get base class virtual functions to be overridden.'''
base_class = os.path.join(os.path.dirname(__file__),
'src',
f'{snake_style(base_class_name)}.h')
f'{hyphen_separated_lowercase_style(base_class_name)}.h')
try:
with open(base_class) as b:
for line in b:
Expand Down
16 changes: 11 additions & 5 deletions lattice/lattice.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
from .docs import HugoWeb, DocumentFile
from .header_entries import HeaderTranslator
from .cpp_entries import CPPTranslator
from lattice.cpp.generate_support_headers import generate_support_headers

class SchemaFile: # pylint:disable=R0902
"""Parse the components of a schema file."""
Expand Down Expand Up @@ -171,6 +172,8 @@ def __init__(
self.generate_json_schemas()
self.validate_example_files()

self.collect_cpp_schemas()

self.setup_cpp_source_files()

def collect_schemas(self):
Expand All @@ -187,7 +190,6 @@ def collect_schemas(self):
# Collect list of schema files
self.schemas: List[SchemaFile] = []
for file_name in sorted(list(self.schema_directory_path.iterdir())):
#file_path = self.schema_directory_path / file_name
if fnmatch(file_name, "*.schema.yaml") or fnmatch(file_name, "*.schema.yml"):
self.schemas.append(SchemaFile(file_name))

Expand Down Expand Up @@ -281,7 +283,6 @@ def collect_example_files(self):
self.examples = []
if self.example_directory_path is not None:
for file_name in sorted(list(self.example_directory_path.iterdir())):
#file_path = self.example_directory_path / file_name
if file_name.is_file():
self.examples.append(file_name.absolute())

Expand Down Expand Up @@ -334,20 +335,25 @@ def generate_web_documentation(self):
else:
warnings.warn('Template directory "doc" does not exist under {self.root_directory}')

def collect_cpp_schemas(self):
"""Collect source schemas into list of SchemaFiles"""
self.cpp_schemas = self.schemas + [SchemaFile(Path(__file__).with_name("core.schema.yaml"))]

def setup_cpp_source_files(self):
"""Create directories for generated CPP source"""
self.cpp_output_dir = Path(self.build_directory) / "cpp"
make_dir(self.cpp_output_dir)
for schema in self.schemas:
for schema in self.cpp_schemas:
schema.cpp_header_path = self.cpp_output_dir / f"{schema.file_base_name.lower()}.h"
schema.cpp_source_path = self.cpp_output_dir / f"{schema.file_base_name.lower()}.cpp"

def generate_cpp_headers(self):
"""Generate CPP header and source files"""
h = HeaderTranslator()
c = CPPTranslator()
for schema in self.schemas:
h.translate(schema.path, self.root_directory.name)
for schema in self.cpp_schemas:
h.translate(schema.path, self.root_directory.name, self.schema_directory_path)
dump(str(h), schema.cpp_header_path)
c.translate(self.root_directory.name, h)
dump(str(c), schema.cpp_source_path)
generate_support_headers(self.root_directory.name, schema.cpp_header_path.parent)
3 changes: 3 additions & 0 deletions lattice/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,3 +4,6 @@ def snake_style(s):
#return ''.join(['_'+c.lower() if c.isupper() else c for c in s]).lstrip('_')
a = re.compile('((?<=[a-z0-9])[A-Z]|(?!^)[A-Z](?=[a-z]))')
return a.sub(r'_\1', s).lower()

def hyphen_separated_lowercase_style(s):
return "-".join(snake_style(s).split("_"))

0 comments on commit c33c422

Please sign in to comment.