Skip to content

Commit

Permalink
upgrade ruff to 0.3.5 (#142)
Browse files Browse the repository at this point in the history
  • Loading branch information
darthtrevino authored Apr 12, 2024
1 parent 656f37b commit 879cd5d
Show file tree
Hide file tree
Showing 5 changed files with 291 additions and 293 deletions.
2 changes: 1 addition & 1 deletion graphrag/index/verbs/graph/clustering/cluster_graph.py
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,7 @@ def apply_clustering(
graphml: str, communities: Communities, level=0, seed=0xF001
) -> nx.Graph:
"""Apply clustering to a graphml string."""
random = Random(seed)
random = Random(seed) # noqa S311
graph = nx.parse_graphml(graphml)
for community_level, community_id, nodes in communities:
if level == community_level:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ def _load_nodes_edges_for_claim_chain(
seed: int = 0xD3ADF00D,
) -> tuple[list[dict], list[dict]]:
nodes = []
random = Random(seed)
random = Random(seed) # noqa S311

if use_lcc:
graph = stable_largest_connected_component(graph)
Expand Down
4 changes: 2 additions & 2 deletions graphrag/query/structured_search/global_search/search.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,8 +115,8 @@ async def asearch(
if self.callbacks:
for callback in self.callbacks:
callback.on_map_response_end(map_responses)
map_llm_calls = sum([response.llm_calls for response in map_responses])
map_prompt_tokens = sum([response.prompt_tokens for response in map_responses])
map_llm_calls = sum(response.llm_calls for response in map_responses)
map_prompt_tokens = sum(response.prompt_tokens for response in map_responses)

# Step 2: Combine the intermediate answers from step 2 to generate the final answer
reduce_response = await self._reduce_response(
Expand Down
Loading

0 comments on commit 879cd5d

Please sign in to comment.