-
Notifications
You must be signed in to change notification settings - Fork 14
/
get_uploaded_json.py
68 lines (55 loc) · 2.28 KB
/
get_uploaded_json.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
import os
import json
import re
import argparse
from collections import defaultdict
def parse_args():
parser = argparse.ArgumentParser(description="Merge JSON files with total average scores.")
parser.add_argument("--input_path", type=str, default="results/all", help="The path to the folder containing the JSON files.")
parser.add_argument("--output_path", type=str, default="results", help="The filename for the merged results.")
return parser.parse_args()
def merge_scores(input_path, output_path, patterns, score_keys, result_keys, output_filename):
results = defaultdict(lambda: defaultdict(list))
for filename in os.listdir(input_path):
for pattern, score_key, result_key in zip(patterns, score_keys, result_keys):
prefix_pattern = re.compile(pattern)
match = prefix_pattern.match(filename)
if match:
prefix = match.group(1)
with open(os.path.join(input_path, filename), 'r') as file:
data = json.load(file)
results[prefix][result_key].append(data[score_key])
final_results = {}
for prefix, score_dict in results.items():
final_results[prefix] = {result_key: sum(scores) / len(scores) for result_key, scores in score_dict.items()}
final_results[prefix]["UMT-FVD"] = -1
final_results[prefix]["UMTScore"] = -1
if not os.path.exists(output_path):
os.makedirs(output_path)
output_json = os.path.join(output_path, output_filename)
with open(output_json, 'w') as output_file:
json.dump(final_results, output_file, indent=4)
print(f"Results have been merged and saved to {output_json}")
def main():
args = parse_args()
input_path = args.input_path
output_path = args.output_path
patterns = [
r'^(.*)_.*_CHScore\.json$',
r'^(.*)_.*_GPT4o-MTScore\.json$',
r'^(.*)_.*_MTScore\.json$'
]
score_keys = [
'total_average_score',
'Average Score',
'average_metamorphic_score'
]
result_keys = [
'Average_CHScore',
'Average_GPT4o-MTScore',
'Average_MTScore'
]
merge_scores(input_path, output_path, patterns, score_keys, result_keys, 'ChronoMagic-Bench-Input.json')
if __name__ == "__main__":
args = parse_args()
main()