Skip to content

Commit

Permalink
Add tests and fixes for the Collection API (#14)
Browse files Browse the repository at this point in the history
* fix: fetch_job must return a counterfactual to failure

* fix: remove ghost job db check

they were not being inserted anyways

* fix: database insertion logic

previously, we would rely on `lastrowid` to provide the pk of the inserted row,
but since we're using `INSERT OR IGNORE`, we ran the risk of reusing an old
`lastrowid` entry. Now, we will only use it if we've confirmed a row
was inserted due to the statement.

* enable foreign keys by default in database

* fix: reorder calling of _query within prometheus client to better accomodate test mocking

* add tests: spec variant parsing

* add tests: prometheus client

* add tests: collection API

* add tests: db client

* add config and dependencies for tests

* test_collect -> test_collection

* fix: flake8 compliance

* update github actions deps

* update github actions instructions for unit tests

* python3.11 requirement for unit tests

* fix: avoid extraneous select statement in insert_node

* reinstate python matrix for unit tests

* fix: handle jobs that haven't started or finished

* fix: handle jobs with no runners

* test data definitions no longer imported line-by-line

* logging -> logger

* use top-level requirements.txt for unit test dependency listing

* fix: shrink mock of usage data 🤪
  • Loading branch information
cmelone authored Mar 4, 2024
1 parent 7afc33d commit 31a0aa1
Show file tree
Hide file tree
Showing 22 changed files with 348 additions and 34 deletions.
5 changes: 3 additions & 2 deletions .github/workflows/requirements/unit-tests.txt
Original file line number Diff line number Diff line change
@@ -1,2 +1,3 @@
pytest==7.4.3
pytest-asyncio==0.23.2
pytest==8.0.1
pytest-aiohttp==1.0.5
pytest-mock==3.12.0
11 changes: 8 additions & 3 deletions .github/workflows/unit-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -9,17 +9,22 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ['3.8', '3.11']
python-version: ['3.11']
steps:
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c
with:
python-version: ${{ matrix.python-version }}
cache: 'pip'
cache-dependency-path: '.github/workflows/requirements/unit-tests.txt'
cache-dependency-path: |
'requirements.txt'
'.github/workflows/requirements/unit-tests.txt'
- name: Install Python dependencies
run: |
pip install -r requirements.txt
pip install -r .github/workflows/requirements/unit-tests.txt
- name: Run Unit Tests with Pytest
run: |
pytest
python -m pytest gantry
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -3,3 +3,5 @@ __pycache__
spack.lock
.spack-env
db/*.db
.coverage
htmlcov
4 changes: 3 additions & 1 deletion db/schema.sql
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
PRAGMA foreign_keys = ON;

CREATE TABLE nodes (
id INTEGER PRIMARY KEY,
uuid TEXT NOT NULL UNIQUE,
Expand Down Expand Up @@ -27,7 +29,7 @@ CREATE TABLE jobs (
stack TEXT NOT NULL,
build_jobs INTEGER NOT NULL,
cpu_request REAL NOT NULL,
cpu_limit REAL, -- this can be null becasue it's currently not set
cpu_limit REAL, -- this can be null because it's currently not set
cpu_mean REAL NOT NULL,
cpu_median REAL NOT NULL,
cpu_max REAL NOT NULL,
Expand Down
8 changes: 4 additions & 4 deletions gantry/clients/prometheus/prometheus.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ async def query_single(self, query: str | dict, time: int) -> list:

query = util.process_query(query)
url = f"{self.base_url}/query?query={query}&time={time}"
return await self._query(url)
return self.prettify_res(await self._query(url))

async def query_range(self, query: str | dict, start: int, end: int) -> list:
"""Query Prometheus for a range of values
Expand All @@ -64,15 +64,15 @@ async def query_range(self, query: str | dict, start: int, end: int) -> list:
f"end={end}&"
f"step={step}s"
)
return await self._query(url)
return self.prettify_res(await self._query(url))

async def _query(self, url: str) -> list:
"""Query Prometheus with a query string"""
async with aiohttp.ClientSession(raise_for_status=True) as session:
# submit cookie with request
async with session.get(url, cookies=self.cookies) as resp:
try:
return self.prettify_res(await resp.json())
return await resp.json()
except aiohttp.ContentTypeError:
logger.error(
"""Prometheus query failed with unexpected response.
Expand All @@ -81,7 +81,7 @@ async def _query(self, url: str) -> list:
return {}

def prettify_res(self, response: dict) -> list:
"""Process Prometheus response into an arrray of dicts with {label: value}"""
"""Process Prometheus response into a list of dicts with {label: value}"""
result_type = response.get("data", {}).get("resultType")
values_dict = {
"matrix": "values",
Expand Down
12 changes: 0 additions & 12 deletions gantry/db/get.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,15 +26,3 @@ async def job_exists(db: aiosqlite.Connection, gl_id: int) -> bool:
return True

return False


async def ghost_exists(db: aiosqlite.Connection, gl_id: int) -> bool:
"""return if the ghost job exists in the database"""

async with db.execute(
"select id from ghost_jobs where gitlab_id = ?", (gl_id,)
) as cursor:
if await cursor.fetchone():
return True

return False
25 changes: 19 additions & 6 deletions gantry/db/insert.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,11 @@
import logging

import aiosqlite

from gantry.db.get import get_node

logger = logging.getLogger(__name__)


def insert_dict(table: str, input: dict, ignore=False) -> tuple[str, tuple]:
"""
Expand Down Expand Up @@ -35,15 +39,20 @@ async def insert_node(db: aiosqlite.Connection, node: dict) -> int:
"nodes",
node,
# deal with races
# this also ignores the not-null constraint
# so we need to make sure the node is valid before inserting
ignore=True,
)
) as cursor:
pk = cursor.lastrowid
# this check ensures that something was inserted
# and not relying on lastrowid, which could be anything
if cursor.rowcount > 0:
return cursor.lastrowid

if pk == 0:
# the ignore part of the query was triggered, some other call
# must have inserted the node before this one
pk = await get_node(db, node["uuid"])
pk = await get_node(db, node["uuid"])

if pk is None:
logger.error(f"node not inserted: {node}. data is likely missing")

return pk

Expand All @@ -60,4 +69,8 @@ async def insert_job(db: aiosqlite.Connection, job: dict) -> int:
ignore=True,
)
) as cursor:
return cursor.lastrowid
if cursor.rowcount > 0:
return cursor.lastrowid

logger.error(f"job not inserted: {job}. data is likely missing")
return None
7 changes: 5 additions & 2 deletions gantry/models/job.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,11 @@ def __init__(
self.status = status
self.name = name
self.gl_id = gl_id
self.start = datetime.fromisoformat(start).timestamp()
self.end = datetime.fromisoformat(end).timestamp()
# handle jobs that haven't started or finished
if start:
self.start = datetime.fromisoformat(start).timestamp()
if end:
self.end = datetime.fromisoformat(end).timestamp()
self.ref = ref

@property
Expand Down
8 changes: 5 additions & 3 deletions gantry/routes/collection.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,17 +45,19 @@ async def fetch_job(
if (
job.status != "success"
or not job.valid_build_name # is not a build job
# some jobs don't have runners..?
or payload["runner"] is None
# uo runners are not in Prometheus
or payload["runner"]["description"].startswith("uo")
or await db.job_exists(db_conn, job.gl_id) # job already in the database
or await db.ghost_exists(db_conn, job.gl_id) # ghost already in db
):
return

# check if the job is a ghost
job_log = await gitlab.job_log(job.gl_id)
is_ghost = "No need to rebuild" in job_log
if is_ghost:
logger.warning(f"job {job.gl_id} is a ghost, skipping")
return

try:
Expand All @@ -70,7 +72,7 @@ async def fetch_job(
logger.error(f"{e} job={job.gl_id}")
return

await db.insert_job(
job_id = await db.insert_job(
db_conn,
{
"node": node_id,
Expand All @@ -89,7 +91,7 @@ async def fetch_job(
# we don't accidentally commit a node without a job
await db_conn.commit()

return
return job_id


async def fetch_node(
Expand Down
16 changes: 16 additions & 0 deletions gantry/tests/conftest.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
# fixtures shared among all tests

import aiosqlite
import pytest


@pytest.fixture
async def db_conn():
"""
In-memory sqlite connection ensures that the database is clean for each test
"""
db = await aiosqlite.connect(":memory:")
with open("db/schema.sql") as f:
await db.executescript(f.read())
yield db
await db.close()
Empty file added gantry/tests/defs/__init__.py
Empty file.
37 changes: 37 additions & 0 deletions gantry/tests/defs/collection.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
# flake8: noqa
# fmt: off

INVALID_JOB_NAME = "invalid job name"
# uo runners are not supported
INVALID_RUNNER = {"description": "uo-blabla1821"}
INVALID_JOB_STATUS = "failure"
GHOST_JOB_LOG = "No need to rebuild"
VALID_JOB_LOG = "some log"

VALID_JOB = {
"build_status": "success",
"build_name": "[email protected] /jcchwaj %[email protected] arch=linux-ubuntu20.04-x86_64_v3 E4S",
"build_id": 9892514, # not used in testing unless it already exists in the db
"build_started_at": "2024-01-24T17:24:06.000Z",
"build_finished_at": "2024-01-24T17:47:00.000Z",
"ref": "pr42264_bugfix/mathomp4/hdf5-appleclang15",
"runner": {"description": "aws"},
}

# used to compare successful insertions
# run SELECT * FROM table_name WHERE id = 1; from python sqlite api and grab fetchone() result
INSERTED_JOB = (1, 'runner-hwwb-i3u-project-2-concurrent-1-s10tq41z', 1, 1706117046, 1706118420, 9892514, 'success', 'pr42264_bugfix/mathomp4/hdf5-appleclang15', 'gmsh', '4.8.4', '{"alglib": true, "cairo": false, "cgns": true, "compression": true, "eigen": false, "external": false, "fltk": true, "gmp": true, "hdf5": false, "ipo": false, "med": true, "metis": true, "mmg": true, "mpi": true, "netgen": true, "oce": true, "opencascade": false, "openmp": false, "petsc": false, "privateapi": false, "shared": true, "slepc": false, "tetgen": true, "voropp": true, "build_system": "cmake", "build_type": "Release", "generator": "make"}', 'gcc', '11.4.0', 'linux-ubuntu20.04-x86_64_v3', 'e4s', 16, 0.75, None, 1.899768349523097, 0.2971597591741076, 4.128116379389054, 0.2483743618267752, 1.7602635378120381, 2000000000.0, 48000000000.0, 143698407.6190476, 2785280.0, 594620416.0, 2785280.0, 252073065.82263485)
INSERTED_NODE = (1, 'ec253b04-b1dc-f08b-acac-e23df83b3602', 'ip-192-168-86-107.ec2.internal', 24.0, 196608000000.0, 'amd64', 'linux', 'i3en.6xlarge')

# these were obtained by executing the respective queries to Prometheus and capturing the JSON output
# or the raw output of PrometheusClient._query
VALID_ANNOTATIONS = {'status': 'success', 'data': {'resultType': 'vector', 'result': [{'metric': {'__name__': 'kube_pod_annotations', 'annotation_gitlab_ci_job_id': '9892514', 'annotation_metrics_spack_ci_stack_name': 'e4s', 'annotation_metrics_spack_job_spec_arch': 'linux-ubuntu20.04-x86_64_v3', 'annotation_metrics_spack_job_spec_compiler_name': 'gcc', 'annotation_metrics_spack_job_spec_compiler_version': '11.4.0', 'annotation_metrics_spack_job_spec_pkg_name': 'gmsh', 'annotation_metrics_spack_job_spec_pkg_version': '4.8.4', 'annotation_metrics_spack_job_spec_variants': '+alglib~cairo+cgns+compression~eigen~external+fltk+gmp~hdf5~ipo+med+metis+mmg+mpi+netgen+oce~opencascade~openmp~petsc~privateapi+shared~slepc+tetgen+voropp build_system=cmake build_type=Release generator=make', 'container': 'kube-state-metrics', 'endpoint': 'http', 'instance': '192.168.164.84:8080', 'job': 'kube-state-metrics', 'namespace': 'pipeline', 'pod': 'runner-hwwb-i3u-project-2-concurrent-1-s10tq41z', 'service': 'kube-prometheus-stack-kube-state-metrics', 'uid': 'd7aa13e0-998c-4f21-b1d6-62781f4980b0'}, 'value': [1706117733, '1']}]}}
VALID_RESOURCE_REQUESTS = {'status': 'success', 'data': {'resultType': 'vector', 'result': [{'metric': {'__name__': 'kube_pod_container_resource_requests', 'container': 'build', 'endpoint': 'http', 'instance': '192.168.164.84:8080', 'job': 'kube-state-metrics', 'namespace': 'pipeline', 'node': 'ip-192-168-86-107.ec2.internal', 'pod': 'runner-hwwb-i3u-project-2-concurrent-1-s10tq41z', 'resource': 'cpu', 'service': 'kube-prometheus-stack-kube-state-metrics', 'uid': 'd7aa13e0-998c-4f21-b1d6-62781f4980b0', 'unit': 'core'}, 'value': [1706117733, '0.75']}, {'metric': {'__name__': 'kube_pod_container_resource_requests', 'container': 'build', 'endpoint': 'http', 'instance': '192.168.164.84:8080', 'job': 'kube-state-metrics', 'namespace': 'pipeline', 'node': 'ip-192-168-86-107.ec2.internal', 'pod': 'runner-hwwb-i3u-project-2-concurrent-1-s10tq41z', 'resource': 'memory', 'service': 'kube-prometheus-stack-kube-state-metrics', 'uid': 'd7aa13e0-998c-4f21-b1d6-62781f4980b0', 'unit': 'byte'}, 'value': [1706117733, '2000000000']}]}}
VALID_RESOURCE_LIMITS = {'status': 'success', 'data': {'resultType': 'vector', 'result': [{'metric': {'__name__': 'kube_pod_container_resource_limits', 'container': 'build', 'endpoint': 'http', 'instance': '192.168.164.84:8080', 'job': 'kube-state-metrics', 'namespace': 'pipeline', 'node': 'ip-192-168-86-107.ec2.internal', 'pod': 'runner-hwwb-i3u-project-2-concurrent-1-s10tq41z', 'resource': 'memory', 'service': 'kube-prometheus-stack-kube-state-metrics', 'uid': 'd7aa13e0-998c-4f21-b1d6-62781f4980b0', 'unit': 'byte'}, 'value': [1706117733, '48000000000']}]}}
VALID_MEMORY_USAGE = {'status': 'success', 'data': {'resultType': 'matrix', 'result': [{'metric': {'__name__': 'container_memory_working_set_bytes', 'container': 'build', 'endpoint': 'https-metrics', 'id': '/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd7aa13e0_998c_4f21_b1d6_62781f4980b0.slice/cri-containerd-48a5e9e7d46655e73ba119fa16b65fa94ceed23c55157db8269b0b12f18f55d1.scope', 'image': 'ghcr.io/spack/ubuntu20.04-runner-amd64-gcc-11.4:2023.08.01', 'instance': '192.168.86.107:10250', 'job': 'kubelet', 'metrics_path': '/metrics/cadvisor', 'name': '48a5e9e7d46655e73ba119fa16b65fa94ceed23c55157db8269b0b12f18f55d1', 'namespace': 'pipeline', 'node': 'ip-192-168-86-107.ec2.internal', 'pod': 'runner-hwwb-i3u-project-2-concurrent-1-s10tq41z', 'service': 'kube-prometheus-stack-kubelet'}, 'values': [[1706117115, '2785280'], [1706117116, '2785280'], [1706117117, '2785280'], [1706117118, '2785280'], [1706117119, '2785280'], [1706117120, '2785280'], [1706117121, '2785280'], [1706117122, '2785280'], [1706117123, '2785280'], [1706117124, '2785280'], [1706117125, '2785280'], [1706117126, '2785280'], [1706117127, '2785280'], [1706117128, '2785280'], [1706117129, '2785280'], [1706117130, '2785280'], [1706118416, '594620416'], [1706118417, '594620416'], [1706118418, '594620416'], [1706118419, '594620416'], [1706118420, '594620416']]}]}}
VALID_CPU_USAGE = {'status': 'success', 'data': {'resultType': 'matrix', 'result': [{'metric': {'container': 'build', 'cpu': 'total', 'endpoint': 'https-metrics', 'id': '/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd7aa13e0_998c_4f21_b1d6_62781f4980b0.slice/cri-containerd-48a5e9e7d46655e73ba119fa16b65fa94ceed23c55157db8269b0b12f18f55d1.scope', 'image': 'ghcr.io/spack/ubuntu20.04-runner-amd64-gcc-11.4:2023.08.01', 'instance': '192.168.86.107:10250', 'job': 'kubelet', 'metrics_path': '/metrics/cadvisor', 'name': '48a5e9e7d46655e73ba119fa16b65fa94ceed23c55157db8269b0b12f18f55d1', 'namespace': 'pipeline', 'node': 'ip-192-168-86-107.ec2.internal', 'pod': 'runner-hwwb-i3u-project-2-concurrent-1-s10tq41z', 'service': 'kube-prometheus-stack-kubelet'}, 'values': [[1706117145, '0.2483743618267752'], [1706117146, '0.25650526138466395'], [1706117147, '0.26463616094255266'], [1706117148, '0.2727670605004414'], [1706117149, '0.28089796005833007'], [1706117150, '0.2890288596162188'], [1706117151, '0.2971597591741076'], [1706117357, '3.7319005481816236'], [1706117358, '3.7319005481816236'], [1706117359, '3.7319005481816236'], [1706117360, '3.7319005481816245'], [1706117361, '3.7319005481816245'], [1706118420, '4.128116379389054']]}]}}
VALID_NODE_INFO = {'status': 'success', 'data': {'resultType': 'vector', 'result': [{'metric': {'__name__': 'kube_node_info', 'container': 'kube-state-metrics', 'container_runtime_version': 'containerd://1.7.2', 'endpoint': 'http', 'instance': '192.168.164.84:8080', 'internal_ip': '192.168.86.107', 'job': 'kube-state-metrics', 'kernel_version': '5.10.205-195.804.amzn2.x86_64', 'kubelet_version': 'v1.27.9-eks-5e0fdde', 'kubeproxy_version': 'v1.27.9-eks-5e0fdde', 'namespace': 'monitoring', 'node': 'ip-192-168-86-107.ec2.internal', 'os_image': 'Amazon Linux 2', 'pod': 'kube-prometheus-stack-kube-state-metrics-dbd66d8c7-6ftw8', 'provider_id': 'aws:///us-east-1c/i-0fe9d9c99fdb3631d', 'service': 'kube-prometheus-stack-kube-state-metrics', 'system_uuid': 'ec253b04-b1dc-f08b-acac-e23df83b3602'}, 'value': [1706117733, '1']}]}}
VALID_NODE_LABELS = {'status': 'success', 'data': {'resultType': 'vector', 'result': [{'metric': {'__name__': 'kube_node_labels', 'container': 'kube-state-metrics', 'endpoint': 'http', 'instance': '192.168.164.84:8080', 'job': 'kube-state-metrics', 'label_beta_kubernetes_io_arch': 'amd64', 'label_beta_kubernetes_io_instance_type': 'i3en.6xlarge', 'label_beta_kubernetes_io_os': 'linux', 'label_failure_domain_beta_kubernetes_io_region': 'us-east-1', 'label_failure_domain_beta_kubernetes_io_zone': 'us-east-1c', 'label_k8s_io_cloud_provider_aws': 'ceb9f9cc8e47252a6f7fe7d6bded2655', 'label_karpenter_k8s_aws_instance_category': 'i', 'label_karpenter_k8s_aws_instance_cpu': '24', 'label_karpenter_k8s_aws_instance_encryption_in_transit_supported': 'true', 'label_karpenter_k8s_aws_instance_family': 'i3en', 'label_karpenter_k8s_aws_instance_generation': '3', 'label_karpenter_k8s_aws_instance_hypervisor': 'nitro', 'label_karpenter_k8s_aws_instance_local_nvme': '15000', 'label_karpenter_k8s_aws_instance_memory': '196608', 'label_karpenter_k8s_aws_instance_network_bandwidth': '25000', 'label_karpenter_k8s_aws_instance_pods': '234', 'label_karpenter_k8s_aws_instance_size': '6xlarge', 'label_karpenter_sh_capacity_type': 'spot', 'label_karpenter_sh_initialized': 'true', 'label_karpenter_sh_provisioner_name': 'glr-x86-64-v4', 'label_kubernetes_io_arch': 'amd64', 'label_kubernetes_io_hostname': 'ip-192-168-86-107.ec2.internal', 'label_kubernetes_io_os': 'linux', 'label_node_kubernetes_io_instance_type': 'i3en.6xlarge', 'label_spack_io_pipeline': 'true', 'label_spack_io_x86_64': 'v4', 'label_topology_ebs_csi_aws_com_zone': 'us-east-1c', 'label_topology_kubernetes_io_region': 'us-east-1', 'label_topology_kubernetes_io_zone': 'us-east-1c', 'namespace': 'monitoring', 'node': 'ip-192-168-86-107.ec2.internal', 'pod': 'kube-prometheus-stack-kube-state-metrics-dbd66d8c7-6ftw8', 'service': 'kube-prometheus-stack-kube-state-metrics'}, 'value': [1706117733, '1']}]}}

# modified version of VALID_MEMORY_USAGE to make the mean/stddev 0
INVALID_MEMORY_USAGE = {'status': 'success', 'data': {'resultType': 'matrix', 'result': [{'metric': {'__name__': 'container_memory_working_set_bytes', 'container': 'build', 'endpoint': 'https-metrics', 'id': '/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd7aa13e0_998c_4f21_b1d6_62781f4980b0.slice/cri-containerd-48a5e9e7d46655e73ba119fa16b65fa94ceed23c55157db8269b0b12f18f55d1.scope', 'image': 'ghcr.io/spack/ubuntu20.04-runner-amd64-gcc-11.4:2023.08.01', 'instance': '192.168.86.107:10250', 'job': 'kubelet', 'metrics_path': '/metrics/cadvisor', 'name': '48a5e9e7d46655e73ba119fa16b65fa94ceed23c55157db8269b0b12f18f55d1', 'namespace': 'pipeline', 'node': 'ip-192-168-86-107.ec2.internal', 'pod': 'runner-hwwb-i3u-project-2-concurrent-1-s10tq41z', 'service': 'kube-prometheus-stack-kubelet'}, 'values': [[1706117115, '0']]}]}}
5 changes: 5 additions & 0 deletions gantry/tests/defs/db.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
# flake8: noqa
# fmt: off

# valid input into insert_node
NODE_INSERT_DICT = {"uuid": "ec253b04-b1dc-f08b-acac-e23df83b3602", "hostname": "ip-192-168-86-107.ec2.internal", "cores": 24.0, "mem": 196608000000.0, "arch": "amd64", "os": "linux", "instance_type": "i3en.6xlarge"}
12 changes: 12 additions & 0 deletions gantry/tests/defs/prometheus.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
# flake8: noqa
# fmt: off

QUERY_DICT = query={"metric": "kube_pod_annotations","filters": {"annotation_gitlab_ci_job_id": 1}}
QUERY_STR = "rate(container_cpu_usage_seconds_total{pod='1', container='build'}[90s])"

# encoded versions of the above that were put through the original version of process_query
ENCODED_QUERY_DICT = "kube_pod_annotations%7Bannotation_gitlab_ci_job_id%3D%221%22%7D"
ENCODED_QUERY_STR = "rate%28container_cpu_usage_seconds_total%7Bpod%3D%271%27%2C%20container%3D%27build%27%7D%5B90s%5D%29"

# this will not be parsed as a query
INVALID_QUERY = 1
1 change: 1 addition & 0 deletions gantry/tests/sql/insert_job.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
INSERT INTO jobs VALUES(1,'runner-hwwb-i3u-project-2-concurrent-1-s10tq41z',2,1706117046,1706118420,9892514,'success','pr42264_bugfix/mathomp4/hdf5-appleclang15','gmsh','4.8.4','{"alglib": true, "cairo": false, "cgns": true, "compression": true, "eigen": false, "external": false, "fltk": true, "gmp": true, "hdf5": false, "ipo": false, "med": true, "metis": true, "mmg": true, "mpi": true, "netgen": true, "oce": true, "opencascade": false, "openmp": false, "petsc": false, "privateapi": false, "shared": true, "slepc": false, "tetgen": true, "voropp": true, "build_system": "cmake", "build_type": "Release", "generator": "make"}','gcc','11.4.0','linux-ubuntu20.04-x86_64_v3','e4s',16,0.75,NULL,4.12532286694540495,3.15805864677520409,11.6038107294648877,0.248374361826775191,3.34888880339475214,2000000000.0,48000000000.0,1649868862.72588062,999763968.0,5679742976.0,2785280.0,1378705563.21018671);
Loading

0 comments on commit 31a0aa1

Please sign in to comment.