Skip to content

Commit

Permalink
tests: improve coverage and add rules
Browse files Browse the repository at this point in the history
  • Loading branch information
gusye1234 committed Sep 6, 2024
1 parent e2dd89b commit f2ae785
Show file tree
Hide file tree
Showing 4 changed files with 282 additions and 6 deletions.
7 changes: 7 additions & 0 deletions .coveragerc
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
[report]
exclude_lines =
# Have to re-enable the standard pragma
pragma: no cover

# Don't complain if tests don't hit defensive assertion code:
raise NotImplementedError
14 changes: 11 additions & 3 deletions nano_graphrag/_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -395,6 +395,7 @@ async def _pack_single_community_describe(
community: SingleCommunitySchema,
max_token_size: int = 12000,
already_reports: dict[str, CommunitySchema] = {},
global_config: dict = {},
) -> str:
nodes_in_order = sorted(community["nodes"])
edges_in_order = sorted(community["edges"], key=lambda x: x[0] + x[1])
Expand Down Expand Up @@ -442,9 +443,15 @@ async def _pack_single_community_describe(

# If context is exceed the limit and have sub-communities:
report_describe = ""
if truncated and len(community["sub_communities"]) and len(already_reports):
logger.info(
f"Community {community['title']} exceeds the limit, using its sub-communities"
need_to_use_sub_communities = (
truncated and len(community["sub_communities"]) and len(already_reports)
)
force_to_use_sub_communities = global_config["addon_params"].get(
"force_to_use_sub_communities", False
)
if need_to_use_sub_communities or force_to_use_sub_communities:
logger.debug(
f"Community {community['title']} exceeds the limit or you set force_to_use_sub_communities to True, using its sub-communities"
)
report_describe, report_size, contain_nodes, contain_edges = (
_pack_single_community_by_sub_communities(
Expand Down Expand Up @@ -540,6 +547,7 @@ async def _form_single_community_report(
community,
max_token_size=global_config["best_model_max_token_size"],
already_reports=already_reports,
global_config=global_config,
)
prompt = community_report_prompt.format(input_text=describe)
response = await use_llm_func(prompt, **llm_extra_kwargs)
Expand Down
Loading

0 comments on commit f2ae785

Please sign in to comment.