|
| 1 | +{# -*- mode: Python -*- -#} |
| 2 | +{# SPDX-License-Identifier: LGPL-2.1-or-later -#} |
| 3 | + |
| 4 | +{%- extends 'base/python.jinja2' %} |
| 5 | + |
| 6 | +{%- block python_imports %} |
| 7 | +{{ super() }} |
| 8 | +import gzip |
| 9 | +import json |
| 10 | +import shutil |
| 11 | +import subprocess |
| 12 | +{%- endblock %} |
| 13 | + |
| 14 | +{%- block python_local_imports %} |
| 15 | +{{ super() }} |
| 16 | +import kernelci.api.helper |
| 17 | +{%- endblock %} |
| 18 | + |
| 19 | +{%- block python_globals %} |
| 20 | +{{ super() }} |
| 21 | +{% endblock %} |
| 22 | + |
| 23 | +{% block python_job -%} |
| 24 | +class Job(BaseJob): |
| 25 | + def _upload_artifacts(self, local_artifacts): |
| 26 | + artifacts = {} |
| 27 | + storage = self._get_storage() |
| 28 | + if storage and self._node: |
| 29 | + root_path = '-'.join([JOB_NAME, self._node['id']]) |
| 30 | + print(f"Uploading artifacts to {root_path}") |
| 31 | + for name, file_path in local_artifacts.items(): |
| 32 | + if os.path.exists(file_path): |
| 33 | + file_url = storage.upload_single( |
| 34 | + (file_path, os.path.basename(file_path)), root_path |
| 35 | + ) |
| 36 | + print(file_url) |
| 37 | + artifacts[name] = file_url |
| 38 | + return artifacts |
| 39 | + |
| 40 | + def _extract_coverage(self, summary_file, node=None): |
| 41 | + if node is None: |
| 42 | + node = self._node |
| 43 | + |
| 44 | + child_nodes = [] |
| 45 | + |
| 46 | + with open(summary_file, encoding='utf-8') as summary_json: |
| 47 | + summary = json.load(summary_json) |
| 48 | + node_data = node['data'] |
| 49 | + |
| 50 | + func_data = node_data.copy() |
| 51 | + func_percent = summary.get('function_percent') |
| 52 | + if func_percent is not None: |
| 53 | + func_data['misc'] = {} |
| 54 | + func_data['misc']['measurement'] = func_percent |
| 55 | + child_nodes += [ |
| 56 | + { |
| 57 | + 'node': { |
| 58 | + 'kind': 'test', |
| 59 | + 'name': 'coverage.functions', |
| 60 | + 'result': 'pass', |
| 61 | + 'state': 'done', |
| 62 | + 'data': func_data, |
| 63 | + }, |
| 64 | + 'child_nodes': [], |
| 65 | + }, |
| 66 | + ] |
| 67 | + |
| 68 | + line_data = node_data.copy() |
| 69 | + line_percent = summary.get('function_percent') |
| 70 | + if line_percent is not None: |
| 71 | + line_data['misc'] = {} |
| 72 | + line_data['misc']['measurement'] = line_percent |
| 73 | + child_nodes += [ |
| 74 | + { |
| 75 | + 'node': { |
| 76 | + 'kind': 'test', |
| 77 | + 'name': 'coverage.lines', |
| 78 | + 'result': 'pass', |
| 79 | + 'state': 'done', |
| 80 | + 'data': line_data, |
| 81 | + }, |
| 82 | + 'child_nodes': [], |
| 83 | + }, |
| 84 | + ] |
| 85 | + |
| 86 | + return { |
| 87 | + 'node': { |
| 88 | + 'result': 'pass' if node['id'] == self._node['id'] else node['result'], |
| 89 | + 'artifacts': {}, |
| 90 | + }, |
| 91 | + 'child_nodes': child_nodes, |
| 92 | + } |
| 93 | + |
| 94 | + def _run(self, src_path): |
| 95 | + api_helper = kernelci.api.helper.APIHelper(self._api) |
| 96 | + child_nodes = self._api.node.findfast({'parent': self._parent['id']}) |
| 97 | + |
| 98 | + # Ensure our working dir exists and create the log file there |
| 99 | + os.makedirs(self._workspace, exist_ok=True) |
| 100 | + log_path = os.path.join(self._workspace, f"log.txt") |
| 101 | + log_file = open(log_path, mode='w') |
| 102 | + |
| 103 | + log_file.write("Getting coverage source...\n") |
| 104 | + tarball_url = self._get_artifact_url(self._parent, 'coverage_source_tar_xz') |
| 105 | + self._get_source(tarball_url) |
| 106 | + # Not getting src_path from _get_source() as it doesn't work in our case |
| 107 | + # We do know that the top-level folder is named 'linux' however, so let's |
| 108 | + # just use that |
| 109 | + src_path = os.path.join(self._workspace, 'linux') |
| 110 | + log_file.write(f"Coverage source downloaded from {tarball_url}\n") |
| 111 | + |
| 112 | + base_cmd = ['gcovr', '--root', src_path] |
| 113 | + tracefiles = [] |
| 114 | + |
| 115 | + # Download and process coverage data for all child nodes |
| 116 | + for cnode in child_nodes: |
| 117 | + if cnode['id'] == self._node['id']: |
| 118 | + log_file.write(f"Skipping self ({cnode['id']})\n") |
| 119 | + continue |
| 120 | + |
| 121 | + coverage_dir = os.path.join(self._workspace, f"coverage-{cnode['id']}") |
| 122 | + json_summary = coverage_dir + '.summary.json' |
| 123 | + try: |
| 124 | + data_url = self._get_artifact_url(cnode, 'coverage_data') |
| 125 | + tracefile = coverage_dir + '.json' |
| 126 | + self._get_source(data_url, path=coverage_dir) |
| 127 | + log_file.write(f"Downloaded coverage data from {data_url}\n") |
| 128 | + except: |
| 129 | + log_file.write(f"WARNING: Unable to download coverage data for {cnode['id']}\n") |
| 130 | + continue |
| 131 | + |
| 132 | + # We now have raw coverage data available, process it |
| 133 | + log_file.write(f"--- Processing coverage data for {cnode['id']} ---\n") |
| 134 | + cmd = subprocess.run(base_cmd + [ |
| 135 | + '--gcov-ignore-parse-errors', |
| 136 | + '--object-directory', coverage_dir, |
| 137 | + '--json', tracefile, |
| 138 | + '--json-summary', json_summary, |
| 139 | + ], stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True) |
| 140 | + log_file.write(cmd.stdout) |
| 141 | + |
| 142 | + try: |
| 143 | + cmd.check_returncode() |
| 144 | + except: |
| 145 | + log_file.write(f"WARNING: Unable to process coverage data for {cnode['id']}") |
| 146 | + continue |
| 147 | + |
| 148 | + tracefiles += [tracefile] |
| 149 | + results = self._extract_coverage(json_summary, node=cnode) |
| 150 | + # We only want to create child nodes reporting coverage percentages, not actually |
| 151 | + # update the test node |
| 152 | + if len(results['child_nodes']) > 0: |
| 153 | + api_helper.submit_results(results, cnode) |
| 154 | + |
| 155 | + # Coverage data has been processed for all child nodes, we can now merge the tracefiles |
| 156 | + args = base_cmd |
| 157 | + for trace in tracefiles: |
| 158 | + args += ['--add-tracefile', trace] |
| 159 | + |
| 160 | + output_base = os.path.join(self._workspace, f"coverage-{self._parent['id']}") |
| 161 | + json_summary = output_base + '.summary.json' |
| 162 | + html_report = output_base + '.html' |
| 163 | + lcov_tracefile = output_base + '.info' |
| 164 | + args += [ |
| 165 | + '--json-summary', json_summary, |
| 166 | + '--html', html_report, |
| 167 | + '--lcov', lcov_tracefile, |
| 168 | + ] |
| 169 | + |
| 170 | + log_file.write("--- Merging tracefiles ---\n") |
| 171 | + cmd = subprocess.run(args, |
| 172 | + stdout=subprocess.PIPE, |
| 173 | + stderr=subprocess.STDOUT, |
| 174 | + text=True) |
| 175 | + log_file.write(cmd.stdout) |
| 176 | + |
| 177 | + # Ensure job completed successfully or report failure |
| 178 | + try: |
| 179 | + cmd.check_returncode() |
| 180 | + except: |
| 181 | + log_file.write(f"ERROR: Unable to generate coverage report\n") |
| 182 | + log_file.close() |
| 183 | + |
| 184 | + artifacts = self._upload_artifacts({ 'log': log_path }) |
| 185 | + return { |
| 186 | + 'node': { |
| 187 | + 'result': 'fail', |
| 188 | + 'artifacts': artifacts, |
| 189 | + }, |
| 190 | + 'child_nodes': [], |
| 191 | + } |
| 192 | + |
| 193 | + log_file.write("--- Compressing artifacts ---\n") |
| 194 | + compressed_lcov = lcov_tracefile + '.gz' |
| 195 | + with open(lcov_tracefile, 'rb') as f_in: |
| 196 | + with gzip.open(compressed_lcov, 'wb') as f_out: |
| 197 | + shutil.copyfileobj(f_in, f_out) |
| 198 | + |
| 199 | + # Finish writing the job log and upload it along with other artifacts |
| 200 | + log_file.write("--- Job successful ---\n") |
| 201 | + log_file.close() |
| 202 | + |
| 203 | + artifacts = { |
| 204 | + 'coverage_report': html_report, |
| 205 | + 'tracefile': compressed_lcov, |
| 206 | + 'log': log_path, |
| 207 | + } |
| 208 | + artifacts = self._upload_artifacts(artifacts) |
| 209 | + |
| 210 | + return self._extract_coverage(json_summary) |
| 211 | + |
| 212 | + def _submit(self, result): |
| 213 | + # Ensure top-level name is kept the same |
| 214 | + result = result.copy() |
| 215 | + result['node']['name'] = self._node['name'] |
| 216 | + result['node']['state'] = 'done' |
| 217 | + api_helper = kernelci.api.helper.APIHelper(self._api) |
| 218 | + api_helper.submit_results(result, self._node) |
| 219 | + |
| 220 | +{% endblock %} |
0 commit comments