Skip to content

Commit

Permalink
fix incorerct id, should be instance_id
Browse files Browse the repository at this point in the history
  • Loading branch information
nguu0123 committed Aug 28, 2024
1 parent 5d73862 commit 8f28843
Show file tree
Hide file tree
Showing 4 changed files with 25 additions and 22 deletions.
11 changes: 5 additions & 6 deletions src/qoa4ml/qoa_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,10 +11,6 @@
import requests
from pydantic import create_model

from qoa4ml.probes.docker_monitoring_probe import DockerMonitoringProbe
from qoa4ml.probes.process_monitoring_probe import ProcessMonitoringProbe
from qoa4ml.probes.system_monitoring_probe import SystemMonitoringProbe

# from .connector.mqtt_connector import Mqtt_Connector
from .config.configs import (
AMQPConnectorConfig,
Expand All @@ -37,9 +33,12 @@
ServiceAPIEnum,
ServiceMetricNameEnum,
)
from .probes.docker_monitoring_probe import DockerMonitoringProbe
from .probes.probe import Probe
from .probes.process_monitoring_probe import ProcessMonitoringProbe
from .probes.system_monitoring_probe import SystemMonitoringProbe
from .reports.abstract_report import AbstractReport
from .reports.rohe_reports import RoheReport
from .reports.ml_reports import MLReport
from .utils.logger import qoa_logger
from .utils.qoa_utils import (
load_config,
Expand All @@ -57,7 +56,7 @@ class QoaClient(Generic[T]):
def __init__(
self,
# NOTE: use text, number, enum
report_cls: type[T] = RoheReport,
report_cls: type[T] = MLReport,
config_dict: Optional[dict] = None,
config_path: Optional[str] = None,
registration_url: Optional[str] = None,
Expand Down
2 changes: 1 addition & 1 deletion src/qoa4ml/reports/general_application_report.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ def reset(self):
self.report = GeneralApplicationReportModel()

self.execution_instance = MicroserviceInstance(
id=UUID(self.client_config.id),
id=UUID(self.client_config.instance_id),
name=self.client_config.name,
functionality=self.client_config.functionality,
stage=self.client_config.stage_id,
Expand Down
28 changes: 16 additions & 12 deletions src/qoa4ml/reports/ml_reports.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ def observe_metric(self, report_type, stage, metric: Metric):
self.report.service[stage].metrics[metric.metric_name] = {}

self.report.service[stage].metrics[metric.metric_name] |= {
UUID(self.client_config.id): metric
UUID(self.client_config.instance_id): metric
}

elif report_type == ReportTypeEnum.data:
Expand All @@ -84,32 +84,36 @@ def observe_metric(self, report_type, stage, metric: Metric):
self.report.data[stage].metrics[metric.metric_name] = {}

self.report.data[stage].metrics[metric.metric_name] |= {
UUID(self.client_config.id): metric
UUID(self.client_config.instance_id): metric
}
else:
raise ValueError(f"Can't handle report type {report_type}")

def observe_inference(self, inference_value):
if self.client_config.id in self.report.ml_inference:
if self.client_config.instance_id in self.report.ml_inference:
raise RuntimeWarning(
"Inference existed, will override the existing inference"
)
self.report.ml_inference[self.client_config.id] = InferenceInstance(
self.report.ml_inference[self.client_config.instance_id] = InferenceInstance(
inference_id=uuid4(),
instance_id=UUID(self.client_config.id),
instance_id=UUID(self.client_config.instance_id),
functionality=self.client_config.functionality,
prediction=inference_value,
)

def observe_inference_metric(self, metric: Metric):
if self.client_config.id in self.report.ml_inference:
self.report.ml_inference[self.client_config.id].metrics.append(metric)
if self.client_config.instance_id in self.report.ml_inference:
self.report.ml_inference[self.client_config.instance_id].metrics.append(
metric
)
else:
self.report.ml_inference[self.client_config.id] = InferenceInstance(
inference_id=uuid4(),
instance_id=UUID(self.client_config.id),
functionality=self.client_config.functionality,
metrics=[metric],
self.report.ml_inference[self.client_config.instance_id] = (
InferenceInstance(
inference_id=uuid4(),
instance_id=UUID(self.client_config.instance_id),
functionality=self.client_config.functionality,
metrics=[metric],
)
)

def generate_report(
Expand Down
6 changes: 3 additions & 3 deletions src/qoa4ml/reports/rohe_reports.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ def reset(self):
self.report = RoheReportModel()
self.previous_microservice_instance = []
self.execution_instance = MicroserviceInstance(
id=UUID(self.client_config.id),
id=UUID(self.client_config.instance_id),
name=self.client_config.name,
functionality=self.client_config.functionality,
stage=self.client_config.stage_id,
Expand Down Expand Up @@ -163,7 +163,7 @@ def observe_metric(self, report_type: ReportTypeEnum, stage: str, metric: Metric
self.inference_report.service[stage].metrics[metric.metric_name] = {}

self.inference_report.service[stage].metrics[metric.metric_name] |= {
UUID(self.client_config.id): metric
UUID(self.client_config.instance_id): metric
}

elif report_type == ReportTypeEnum.data:
Expand All @@ -173,7 +173,7 @@ def observe_metric(self, report_type: ReportTypeEnum, stage: str, metric: Metric
self.inference_report.data[stage].metrics[metric.metric_name] = {}

self.inference_report.data[stage].metrics[metric.metric_name] |= {
UUID(self.client_config.id): metric
UUID(self.client_config.instance_id): metric
}
else:
raise ValueError(f"Can't handle report type {report_type}")
Expand Down

0 comments on commit 8f28843

Please sign in to comment.