Skip to content

Commit

Permalink
Add more tests related to root category id file generation
Browse files Browse the repository at this point in the history
  • Loading branch information
bitbrain committed Jul 14, 2024
1 parent a14bd88 commit beb2ae9
Show file tree
Hide file tree
Showing 2 changed files with 114 additions and 31 deletions.
54 changes: 31 additions & 23 deletions addons/pandora/util/entity_id_file_generator.gd
Original file line number Diff line number Diff line change
@@ -1,25 +1,31 @@
const Tokenizer = preload("tokenizer.gd")


static func regenerate_id_files(root_categories: Array[PandoraCategory]) -> void:
var class_to_entity_map = generate_class_to_entity_map(root_categories)
for entity_class in class_to_entity_map:
var file_content = generate_entity_id_file(entity_class, class_to_entity_map[entity_class])
if not file_content.is_empty():
_write_to_file(entity_class, file_content)


static func generate_class_to_entity_map(root_categories: Array[PandoraCategory]) -> Dictionary:
var class_to_entity_map = {}
for category in root_categories:
_process_category_for_id_files(category, class_to_entity_map)
return class_to_entity_map

# Remove empty entries from the map
var keys_to_remove = []
for key in class_to_entity_map.keys():
if class_to_entity_map[key].size() == 0:
keys_to_remove.append(key)
for key in keys_to_remove:
class_to_entity_map.erase(key)

return class_to_entity_map

static func generate_entity_id_file(entity_class_name: String, entities: Array[PandoraEntity]) -> Array[String]:
if entities.is_empty():
return []
var lines:Array[String] = ["# Do not modify! Auto-generated file.", "class_name " + entity_class_name + "\n"]
var lines:Array[String] = ["# Do not modify! Auto-generated file.", "class_name " + entity_class_name + "\n\n"]
var name_usages = {}
for entity in entities:
var entity_name = entity.get_entity_name()
Expand All @@ -31,31 +37,33 @@ static func generate_entity_id_file(entity_class_name: String, entities: Array[P
lines.append("const " + Tokenizer.tokenize(entity_name) + ' = "' + entity.get_entity_id() + '"')
return lines


static func _process_category_for_id_files(category: PandoraCategory, class_to_entity_map: Dictionary) -> void:
var classname = category.get_id_generation_class()
var has_entities = false
if not class_to_entity_map.has(classname):
var empty:Array[PandoraEntity] = []
class_to_entity_map[classname] = empty

if category.is_generate_ids():
for child in category._children:
if not child is PandoraCategory:
if not _entity_exists_in_map(class_to_entity_map[classname], child):
class_to_entity_map[classname].append(child)
else:
_process_category_for_id_files(child as PandoraCategory, class_to_entity_map)

for child in category._children:
if child is PandoraCategory:
_process_category_for_id_files(child as PandoraCategory, class_to_entity_map)
else:
has_entities = true

if has_entities or (class_to_entity_map.has(classname) and not class_to_entity_map[classname].is_empty()):
if not class_to_entity_map.has(classname):
var empty_array:Array[PandoraEntity] = []
class_to_entity_map[classname] = empty_array
if category.is_generate_ids():
for child in category._children:
if not child is PandoraCategory:
class_to_entity_map[classname].append(child)
for child in category._children:
if child is PandoraCategory and class_to_entity_map.has(child.get_id_generation_class()):
for sub_entity in class_to_entity_map[child.get_id_generation_class()]:
if not class_to_entity_map[classname].has(sub_entity):
class_to_entity_map[classname].append(sub_entity)
var child_classname = child.get_id_generation_class()
if class_to_entity_map.has(child_classname):
for sub_entity in class_to_entity_map[child_classname]:
if not _entity_exists_in_map(class_to_entity_map[classname], sub_entity):
class_to_entity_map[classname].append(sub_entity)

static func _entity_exists_in_map(entity_list: Array[PandoraEntity], entity: PandoraEntity) -> bool:
for e in entity_list:
if e.get_entity_id() == entity.get_entity_id():
return true
return false

static func _write_to_file(entity_class_name: String, lines: Array[String]) -> void:
var file_path = "res://pandora/" + entity_class_name.to_snake_case() + ".gd"
Expand Down
91 changes: 83 additions & 8 deletions test/util/entity_id_file_generator_test.gd
Original file line number Diff line number Diff line change
Expand Up @@ -59,22 +59,97 @@ func test_generate_class_to_entity_map_excludes_categories() -> void:
assert_that(result).is_empty()


func test_generate_class_to_entity_map_avoids_duplicates() -> void:
func test_generate_class_to_entity_map_avoids_duplicates_within_category() -> void:
var entity = create_mock_entity("Item", "id_item")
var sub_category = create_mock_category("SubCategory", "id_subcategory", [entity], true)
var root_category = create_mock_category("RootCategory", "id_rootcategory", [sub_category], true)

var result = EntityIdFileGenerator.generate_class_to_entity_map([root_category])

# Collect all entities within "RootCategory" and "SubCategory"
var root_category_entities = result["RootCategory"]
var sub_category_entities = result["SubCategory"]

# Ensure "RootCategory" has 1 entity and it is the correct one
assert_that(root_category_entities.size()).is_equal(1)
assert_that(root_category_entities[0]._id).is_equal("id_item")

# Ensure "SubCategory" has 1 entity and it is the correct one
assert_that(sub_category_entities.size()).is_equal(1)
assert_that(sub_category_entities[0]._id).is_equal("id_item")

# Use a dictionary to track unique IDs within each category
var root_ids_dict = {}
for entity1 in root_category_entities:
root_ids_dict[entity1._id] = true

var sub_ids_dict = {}
for entity2 in sub_category_entities:
sub_ids_dict[entity2._id] = true

# The size of ids_dict should be 1 for each category if all entries are unique within that category
assert_that(root_ids_dict.size()).is_equal(1)
assert_that(sub_ids_dict.size()).is_equal(1)


func test_root_category_generates_file_with_child_entity() -> void:
var entity = create_mock_entity("Item", "id_item")
var sub_category = create_mock_category("SubCategory", "id_subcategory", [entity], true)
var root_category = create_mock_category("RootCategory", "id_rootcategory", [sub_category], true)

var result = EntityIdFileGenerator.generate_class_to_entity_map([root_category])

# Verify that the root category produces a file with the entity from the child category
assert_that(result.has("RootCategory")).is_true()
assert_that(result["RootCategory"].size()).is_equal(1)
assert_that(result["RootCategory"][0]._id).is_equal("id_item")


func test_child_category_generates_file_with_entity() -> void:
var entity = create_mock_entity("Item", "id_item")
var sub_category = create_mock_category("SubCategory", "id_subcategory", [entity], true)
var root_category = create_mock_category("RootCategory", "id_rootcategory", [sub_category], true)

var result = EntityIdFileGenerator.generate_class_to_entity_map([root_category])

# Verify that the child category generates a file with the entity
assert_that(result.has("SubCategory")).is_true()
assert_that(result["SubCategory"].size()).is_equal(1)
assert_that(result["SubCategory"][0]._id).is_equal("id_item")


func test_no_duplicate_entities_within_each_key() -> void:
var entity = create_mock_entity("Item", "id_item")
var sub_category = create_mock_category("SubCategory", "id_subcategory", [entity], true)
var root_category = create_mock_category("RootCategory", "id_rootcategory", [sub_category], true)

var result = EntityIdFileGenerator.generate_class_to_entity_map([root_category])

# Verify that there are no duplicate entities for each key
for key in result.keys():
var ids_dict = {}
for e in result[key]:
ids_dict[e._id] = true
assert_that(ids_dict.size()).is_equal(result[key].size())


func test_entity_exists_in_both_parent_and_child_keys() -> void:
var entity = create_mock_entity("Item", "id_item")
var sub_category = create_mock_category("SubCategory", "id_subcategory", [entity], true)
var root_category = create_mock_category("RootCategory", "id_rootcategory", [sub_category], true)

var result = EntityIdFileGenerator.generate_class_to_entity_map([root_category])

# Verify that entities can exist in both parent and child keys
var all_entities = []
for entity_list in result.values():
all_entities += entity_list

assert_that(all_entities.size()).is_equal(1)
assert_that(all_entities[0]._id).is_equal("id_item")

var ids_dict = {}
for entity1 in all_entities:
ids_dict[entity1._id] = true
# Verify that there are no duplicate entities across all keys
var overall_ids_dict = {}
for e in all_entities:
overall_ids_dict[e._id] = overall_ids_dict[e._id] + 1 if overall_ids_dict.has(e._id) else 1

assert_that(ids_dict.size()).is_equal(1)
# Ensure no entity appears more than once across all keys
for entity_id in overall_ids_dict.keys():
assert_that(overall_ids_dict[entity_id]).is_equal(2) # Each entity should appear twice: once in parent and once in child

0 comments on commit beb2ae9

Please sign in to comment.