Skip to content

Commit

Permalink
Merge pull request #18 from fga-eps-mds/fix/fix_github_parser
Browse files Browse the repository at this point in the history
fix github implement
  • Loading branch information
liversonp authored Jul 12, 2023
2 parents dff7e7c + adf869d commit 9828794
Show file tree
Hide file tree
Showing 4 changed files with 596 additions and 478 deletions.
79 changes: 19 additions & 60 deletions genericparser/plugins/dinamic/github.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ def _make_request(self, url, token=None):
"X-GitHub-Api-Version": "2022-11-28",
}
if token:
headers["Authorization"] = (f"Bearer {token}",)
headers["Authorization"] = f"Bearer {token}"
try:
response = requests.get(url, headers=headers)
except Exception as e:
Expand Down Expand Up @@ -53,27 +53,16 @@ def _get_statistics_weekly_commit_activity(self, base_url, token=None):
"commits_on_friday",
"commits_on_saturday",
]
url = f"{base_url}/stats/code_frequency"
url = f"{base_url}/stats/punch_card"
response = self._make_request(url, token)
for commit_count in response or []:
values[commit_count[0]] += commit_count[2]
values[commit_count[0]] += commit_count[-1]

return {"metrics": metrics, "values": values}

def _get_pull_metrics_by_threshold(self, base_url, token=None):
values = []
metrics = [
"issue_url",
"commits_url",
"state",
"open_issues",
"closed_issues",
"created_at",
"updated_at",
"closed_at",
"merged_at",
]
url = f"{base_url}/pulls"
url = f"{base_url}/pulls?state=all"
response = self._make_request(url, token)
pull_requests = response if isinstance(response, list) else []
total_issues = len(pull_requests)
Expand All @@ -88,58 +77,41 @@ def _get_pull_metrics_by_threshold(self, base_url, token=None):
)

return {
"metrics": metrics + ["total_issues", "resolved_issues", "resolved_ratio"],
"metrics": ["total_issues", "resolved_issues", "resolved_ratio"],
"values": values,
}

# Get pull metrics

def _get_pull_metrics(self, base_url, token=None):
values = []
metrics = [
"issue_url",
"commits_url",
"state",
"open_issues",
"closed_issues",
"created_at",
"updated_at",
"closed_at",
"merged_at",
]
url = f"{base_url}/pulls"
response = self._make_request(url, token)
pull_requests = response if isinstance(response, list) else []

for pull_request in pull_requests:
values.extend([pull_request.get(metric, None) for metric in metrics])
return {"metrics": metrics, "values": values}

# Get statistics metrics
def _get_statistics_metrics(self, base_url, token=None):
return {
**self._get_statistics_weekly_code_frequency(base_url),
}

# get all Pull metrics
def _get_all_pull_metrics(self, base_url, token=None):
values = []
metrics = [
metrics_to_get = [
"issue_url",
"commits_url",
"state",
"open_issues",
"closed_issues",
"number",
"draft",
"created_at",
"updated_at",
"closed_at",
"merged_at",
]
metrics = []
url = f"{base_url}/pulls?state=all" # Fetch all pull requests (open and closed)
response = self._make_request(url, token)
pull_requests = response if isinstance(response, list) else []

for pull_request in pull_requests:
metric_values = [pull_request.get(metric, None) for metric in metrics]
metric_values = [
{"metric": metric, "value": pull_request.get(metric, None)}
for metric in metrics_to_get
]
metrics.append(f"pull_request_{pull_request.get('number')}")
values.append(metric_values)

return {"metrics": metrics, "values": values}
Expand Down Expand Up @@ -192,23 +164,6 @@ def extract(self, input_file):
metrics.extend(return_of_comunity_metrics["metrics"])
values.extend(return_of_comunity_metrics["values"])

# Get statistics metrics
return_of_statistics_metrics = self._get_statistics_metrics(
url, token_from_github
)
metrics.extend(return_of_statistics_metrics["metrics"])
values.extend(return_of_statistics_metrics["values"])

return_of_pull_metrics = self._get_pull_metrics(url, token_from_github)
metrics.extend(return_of_pull_metrics["metrics"])
values.extend(return_of_pull_metrics["values"])

return_of_get_all_pull_metrics = self._get_all_pull_metrics(
url, token_from_github
)
metrics.extend(return_of_get_all_pull_metrics["metrics"])
values.extend(return_of_get_all_pull_metrics["values"])

return_of_get_statistics_weekly_commit_activity = (
self._get_statistics_weekly_commit_activity(url, token_from_github)
)
Expand All @@ -221,6 +176,10 @@ def extract(self, input_file):
metrics.extend(return_of_get_pull_metrics_by_threshold["metrics"])
values.extend(return_of_get_pull_metrics_by_threshold["values"])

return_of_get_pull_metrics = self._get_all_pull_metrics(url, token_from_github)
metrics.extend(return_of_get_pull_metrics["metrics"])
values.extend(return_of_get_pull_metrics["values"])

return {"metrics": metrics, "values": values, "file_paths": keys}


Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"

[project]
name = "msgram-parser"
version = "1.0.0"
version = "1.1.0"
description = "The msgram Parser is a PyPI library that serves as a repository for parsing metrics, meaning that it provides a collection of tools, functions, and resources specifically designed to handle and extract information from metric data."
readme = "README.md"
authors = [
Expand Down
Loading

0 comments on commit 9828794

Please sign in to comment.