From ce04099b038a07016e71b58d7e6906917a92fa69 Mon Sep 17 00:00:00 2001 From: pchtsp Date: Fri, 17 May 2024 12:17:20 +0200 Subject: [PATCH 01/84] first version of tsp report + changes (many more requirements added) --- cornflow-dags/DAG/tsp/core/experiment.py | 33 +++-- cornflow-dags/DAG/tsp/core/instance.py | 29 +++- cornflow-dags/DAG/tsp/core/solution.py | 16 +++ cornflow-dags/DAG/tsp/report/report.qmd | 171 +++++++++++++++++++++++ cornflow-dags/requirements.txt | 8 ++ cornflow-dags/tests/test_dags.py | 7 +- 6 files changed, 245 insertions(+), 19 deletions(-) create mode 100644 cornflow-dags/DAG/tsp/report/report.qmd diff --git a/cornflow-dags/DAG/tsp/core/experiment.py b/cornflow-dags/DAG/tsp/core/experiment.py index 27e217a38..680594cfc 100644 --- a/cornflow-dags/DAG/tsp/core/experiment.py +++ b/cornflow-dags/DAG/tsp/core/experiment.py @@ -25,31 +25,38 @@ def solution(self, value): self._solution = value def get_objective(self) -> float: + # if solution is empty, we return 0 + if len(self.solution.data["route"]) == 0: + return 0 # we get a sorted list of nodes by position - route = ( - TupList(self.solution.data["route"]) - .sorted(key=lambda v: v["pos"]) - .vapply(lambda v: v["node"]) - ) - weight = {(el["n1"], el["n2"]): el["w"] for el in self.instance.data["arcs"]} - # we sum all arcs in the solution - return ( - sum([weight[n1, n2] for n1, n2 in zip(route, route[1:])]) - + weight[route[-1], route[0]] - ) + arcs = self.solution.get_used_arcs() + + # we sum all arc weights in the solution + return sum(self.get_used_arc_weights().values()) + + def get_used_arc_weights(self) -> dict: + arcs = self.solution.get_used_arcs() + weight = self.instance.get_indexed_arcs() + return arcs.to_dict(None).kapply(lambda k: weight[k]["w"]) def check_missing_nodes(self): nodes_in = TupList(v["n1"] for v in self.instance.data["arcs"]).to_set() nodes_out = TupList(n["node"] for n in self.solution.data["route"]).to_set() - return [{"node": n} for n in (nodes_in - nodes_out)] + return TupList({"node": n} for n in (nodes_in - nodes_out)) def check_missing_positions(self): nodes_in = TupList(v["n1"] for v in self.instance.data["arcs"]).to_set() positions = TupList(n["pos"] for n in self.solution.data["route"]).to_set() - return [{"position": p} for p in set(range(len(nodes_in))) - positions] + return TupList({"position": p} for p in set(range(len(nodes_in))) - positions) def check_solution(self, *args, **kwargs) -> SuperDict: return SuperDict( missing_nodes=self.check_missing_nodes(), missing_positions=self.check_missing_positions(), ) + + def get_report(self): + # get positions (explicit, or implicitly via distances) + # get graph of solution + # + pass diff --git a/cornflow-dags/DAG/tsp/core/instance.py b/cornflow-dags/DAG/tsp/core/instance.py index c77d90dea..2f82110ee 100644 --- a/cornflow-dags/DAG/tsp/core/instance.py +++ b/cornflow-dags/DAG/tsp/core/instance.py @@ -4,23 +4,24 @@ from cornflow_client import InstanceCore, get_empty_schema from cornflow_client.core.tools import load_json from pytups import TupList, SuperDict +import networkx as nx class Instance(InstanceCore): schema = load_json(os.path.join(os.path.dirname(__file__), "../schemas/input.json")) schema_checks = get_empty_schema() - def __init__(self, data): + def __init__(self, data: dict): data = SuperDict(data) data["arcs"] = TupList(data["arcs"]) super().__init__(data) @classmethod - def from_tsplib_file(cls, path): + def from_tsplib_file(cls, path: str): return cls.from_tsplib95(tsp.load(path)) @classmethod - def from_tsplib95(cls, problem): + def from_tsplib95(cls, problem: tsp.models.StandardProblem): nodes = list(problem.get_nodes()) edge_to_dict = lambda e: dict( n1=nodes[e[0]], n2=nodes[e[1]], w=problem.get_weight(*e) @@ -29,7 +30,7 @@ def from_tsplib95(cls, problem): return cls(dict(arcs=arcs)) def to_tsplib95(self): - arcs = TupList(self.data["arcs"]) + arcs = TupList(self.get_arcs()) nodes = (arcs.take("n1") + arcs.take("n2")).unique() pos = {k: v for v, k in enumerate(nodes)} arc_dict = arcs.to_dict( @@ -57,7 +58,27 @@ def to_tsplib95(self): edge_weight_format=edge_weight_format, edge_weights=arc_weights, ) + return tsp.models.StandardProblem(**dict_data) def get_arcs(self) -> TupList: return self.data["arcs"] + + def get_indexed_arcs(self) -> TupList: + return self.data["arcs"].to_dict( + result_col=None, indices=["n1", "n2"], is_list=False + ) + + def get_nodes(self) -> TupList: + arcs = self.get_arcs() + return (arcs.take("n1") + arcs.take("n2")).unique() + + def get_graph(self) -> nx.Graph: + nodes = self.get_nodes() + arcs = self.get_arcs() + G = nx.DiGraph() + for node in nodes: + G.add_node(node) + for arc in arcs: + G.add_edge(arc["n1"], arc["n2"], weight=arc["w"]) + return G diff --git a/cornflow-dags/DAG/tsp/core/solution.py b/cornflow-dags/DAG/tsp/core/solution.py index d4e03922f..4bdb4e389 100644 --- a/cornflow-dags/DAG/tsp/core/solution.py +++ b/cornflow-dags/DAG/tsp/core/solution.py @@ -1,9 +1,25 @@ import os from cornflow_client import SolutionCore from cornflow_client.core.tools import load_json +import pytups as pt class Solution(SolutionCore): schema = load_json( os.path.join(os.path.dirname(__file__), "../schemas/output.json") ) + + def get_route(self): + return self.data["route"] + + def get_tour(self): + return pt.TupList(self.get_route()).sorted(key=lambda v: v["pos"]).take("node") + + def get_used_arcs(self): + tour = self.get_tour() + + if len(tour) <= 1: + return [] + edges = pt.TupList(zip(tour, tour[1:])) + edges.append((tour[-1], tour[0])) + return edges diff --git a/cornflow-dags/DAG/tsp/report/report.qmd b/cornflow-dags/DAG/tsp/report/report.qmd new file mode 100644 index 000000000..fc07f062d --- /dev/null +++ b/cornflow-dags/DAG/tsp/report/report.qmd @@ -0,0 +1,171 @@ +--- +title: "TSP report" +execute: + echo: false + warning: false +format: + html: + embed-resources: true +editor_options: + chunk_output_type: console +--- + +```{python} +#| tags: [parameters] + +file_name = "/home/pchtsp/Documents/projects/baobab/cornflow/corn/cornflow-dags/DAG/tsp/data/gr17.tsp" +``` + +## TSP + +From [wikipedia](https://en.wikipedia.org/wiki/Travelling_salesman_problem): + +> The travelling salesman problem, also known as the travelling salesperson problem (TSP), asks the following question: "Given a list of cities and the distances between each pair of cities, what is the shortest possible route that visits each city exactly once and returns to the origin city?" It is an NP-hard problem in combinatorial optimization, important in theoretical computer science and operations research. + +```{python} +#| echo: false +import os +import sys +module_path = os.path.abspath(os.path.join('../..')) +if module_path not in sys.path: + sys.path.append(module_path) + +from tsp import TspApp +import networkx as nx +import numpy as np +import pytups as pt +import matplotlib.pyplot as plt + +my_instance = TspApp.instance.from_tsplib_file(file_name) +my_experiment = TspApp.solvers['cpsat'](instance=my_instance, solution=TspApp.solution(dict(route=[]))) + +status = my_experiment.solve({'timeLimit': 2}) +pop_element = my_experiment.solution.data['route'].pop() + +``` + + +## Instance statistics + +The problem has `{python} len(my_instance.get_nodes())` nodes and `{python} len(my_instance.get_arcs())` arcs. + +The distance distribution is the following: + +```{python} +#| label: fig-distance-dist +#| fig-cap: "Distance distribution" + +# Import seaborn +import seaborn as sns +import pandas as pd + +# Apply the default theme +sns.set_theme(style="whitegrid") + +# Load an example dataset +distances = my_instance.get_arcs().take('w') +my_df = pd.DataFrame(dict(distance=distances)) + +# Create a visualization +ax = sns.histplot( + data=my_df, + x="distance", + binwidth=50 +) +ax.set(xlabel='Distance', ylabel='Number of edges') +plt.show() + +``` + +## The network + +See @fig-distances for a representation of the network distances. + + +```{python} +#| echo: false +#| label: fig-distances +#| fig-cap: "Network. Green lines mean shorter distances, red ones, longer ones." +G = my_instance.get_graph() +pos = nx.kamada_kawai_layout(G) + +weights = pt.SuperDict(nx.get_edge_attributes(G, 'weight')).vfilter(lambda v: v > 0) + +low, mid, high = np.quantile(weights.values_tl(), [0.25, 0.5, 0.75]) + +def get_color(value): + if value <= low: + return 'green' + if value > high: + return 'red' + return 'black' + +colors = weights.vapply(get_color) + +__nodes = nx.draw_networkx_nodes(G, pos=pos) +__edges = nx.draw_networkx_edges(G, edgelist=weights.keys_l(), pos=pos, edge_color=colors.values_tl()) + +# nx.draw_networkx_edge_labels(G, pos=pos, edge_labels=weights) + +__labels = nx.draw_networkx_labels(G, pos=pos, font_color='white') + +ax = plt.gca() +ax.margins(0.05) +plt.axis("off") +plt.tight_layout() +plt.show() + +``` + +## Solution statistics + +```{python} +objective = my_experiment.get_objective() +checks = my_experiment.check_solution() +feasible = len(checks['missing_nodes']) == 0 and len(checks['missing_positions']) == 0 + +``` + +```{python} +#| output: asis + +from IPython.display import display, Markdown + +if feasible: + print("::: {{.callout-tip}}\n\n## Solution is feasible\n\nThe shortest tour has length {}.\n\n:::".format(objective)) +else: + my_text = "" + if checks['missing_nodes']: + my_text += 'The solution is missing the following nodes: {}\n\n'.format(checks['missing_nodes'].take('node')) + if checks['missing_positions']: + my_text += 'The solution is missing the following positions: {}\n\n'.format(checks['missing_positions'].take('position')) + + print("::: {{.callout-important}}\n\n## Solution is infeasible\n\n{}\n\n:::".format(my_text)) + +``` + +## Solution + +```{python} +#| label: fig-tour +#| fig-cap: "Shortest tour that passes through each node once" +#| code-fold: true + +my_solution = my_experiment.solution + +my_arcs = my_experiment.get_used_arc_weights() +my_arcs_keys = my_arcs.keys_tl() +my_colors = my_arcs_keys.vapply(lambda v: colors[v]) + +__nodes = nx.draw_networkx_nodes(G, pos=pos) +__edges = nx.draw_networkx_edges(G, edgelist=my_arcs_keys, pos=pos, edge_color=my_colors) + +nx.draw_networkx_edge_labels(G, pos=pos, edge_labels=my_arcs, font_size=8) + +__labels = nx.draw_networkx_labels(G, pos=pos, font_color='white') + +plt.axis("off") +plt.tight_layout() +plt.show() + +``` diff --git a/cornflow-dags/requirements.txt b/cornflow-dags/requirements.txt index a67663a50..c01275b91 100644 --- a/cornflow-dags/requirements.txt +++ b/cornflow-dags/requirements.txt @@ -9,3 +9,11 @@ xmltodict<=0.13.0 openpyxl<=3.1.2 pyomo<=6.6.2 tsplib95<=0.7.1 +networkx +scipy +numpy + +# quarto +matplotlib +seaborn +jupyter \ No newline at end of file diff --git a/cornflow-dags/tests/test_dags.py b/cornflow-dags/tests/test_dags.py index 57dedced2..98497bf1b 100644 --- a/cornflow-dags/tests/test_dags.py +++ b/cornflow-dags/tests/test_dags.py @@ -1,7 +1,10 @@ import os, sys -prev_dir = os.path.join(os.path.dirname(__file__), "../DAG") -sys.path.insert(1, prev_dir) +prev_dir = os.path.join(os.path.dirname(__file__), "../") +my_paths = [prev_dir, os.path.join(prev_dir, 'DAG')] +for __my_path in my_paths: + sys.path.insert(1, __my_path) + import unittest from unittest.mock import patch, Mock, MagicMock From dad9f0e6f7dd9d4302427ede7ab9ff04060f9ec7 Mon Sep 17 00:00:00 2001 From: pchtsp Date: Fri, 17 May 2024 15:04:06 +0200 Subject: [PATCH 02/84] add how-to reports to README --- cornflow-dags/DAG/tsp/core/solution.py | 2 +- cornflow-dags/DAG/tsp/report/report.qmd | 30 +++++++++++++++---------- cornflow-dags/README.rst | 16 +++++++++++++ 3 files changed, 35 insertions(+), 13 deletions(-) diff --git a/cornflow-dags/DAG/tsp/core/solution.py b/cornflow-dags/DAG/tsp/core/solution.py index 4bdb4e389..b056c9b8f 100644 --- a/cornflow-dags/DAG/tsp/core/solution.py +++ b/cornflow-dags/DAG/tsp/core/solution.py @@ -19,7 +19,7 @@ def get_used_arcs(self): tour = self.get_tour() if len(tour) <= 1: - return [] + return pt.TupList() edges = pt.TupList(zip(tour, tour[1:])) edges.append((tour[-1], tour[0])) return edges diff --git a/cornflow-dags/DAG/tsp/report/report.qmd b/cornflow-dags/DAG/tsp/report/report.qmd index fc07f062d..5b99ad5f3 100644 --- a/cornflow-dags/DAG/tsp/report/report.qmd +++ b/cornflow-dags/DAG/tsp/report/report.qmd @@ -13,7 +13,7 @@ editor_options: ```{python} #| tags: [parameters] -file_name = "/home/pchtsp/Documents/projects/baobab/cornflow/corn/cornflow-dags/DAG/tsp/data/gr17.tsp" +file_name = "../data/gr17.tsp" ``` ## TSP @@ -36,11 +36,18 @@ import numpy as np import pytups as pt import matplotlib.pyplot as plt -my_instance = TspApp.instance.from_tsplib_file(file_name) +extension = os.path.splitext(file_name)[1] +if extension=='.tsp': + my_instance = TspApp.instance.from_tsplib_file(file_name) +elif extension=='.json': + my_instance = TspApp.instance.from_json(file_name) +else: + raise ValueError("Unknown extension: {}".format(extension)) my_experiment = TspApp.solvers['cpsat'](instance=my_instance, solution=TspApp.solution(dict(route=[]))) -status = my_experiment.solve({'timeLimit': 2}) -pop_element = my_experiment.solution.data['route'].pop() +# TODO: Ideally, we should not solve the problem inside the report, we should already have an example of problem solved. +# status = my_experiment.solve({'timeLimit': 2}) +# pop_element = my_experiment.solution.data['route'].pop() ``` @@ -49,7 +56,7 @@ pop_element = my_experiment.solution.data['route'].pop() The problem has `{python} len(my_instance.get_nodes())` nodes and `{python} len(my_instance.get_arcs())` arcs. -The distance distribution is the following: +The distance distribution is shown in the histogram of @fig-distance-dist. ```{python} #| label: fig-distance-dist @@ -79,12 +86,12 @@ plt.show() ## The network -See @fig-distances for a representation of the network distances. +See @fig-network for a representation of the network distances. ```{python} #| echo: false -#| label: fig-distances +#| label: fig-network #| fig-cap: "Network. Green lines mean shorter distances, red ones, longer ones." G = my_instance.get_graph() pos = nx.kamada_kawai_layout(G) @@ -105,8 +112,6 @@ colors = weights.vapply(get_color) __nodes = nx.draw_networkx_nodes(G, pos=pos) __edges = nx.draw_networkx_edges(G, edgelist=weights.keys_l(), pos=pos, edge_color=colors.values_tl()) -# nx.draw_networkx_edge_labels(G, pos=pos, edge_labels=weights) - __labels = nx.draw_networkx_labels(G, pos=pos, font_color='white') ax = plt.gca() @@ -129,7 +134,7 @@ feasible = len(checks['missing_nodes']) == 0 and len(checks['missing_positions'] ```{python} #| output: asis -from IPython.display import display, Markdown +# The following code shows (1) a box with feasibility + objective function OR (2) root cause of infeasibility. if feasible: print("::: {{.callout-tip}}\n\n## Solution is feasible\n\nThe shortest tour has length {}.\n\n:::".format(objective)) @@ -146,13 +151,14 @@ else: ## Solution + +See @fig-tour for a representation of the solution tour. Number in links show the actual distance. Colors are kept from the network representation of the instance. + ```{python} #| label: fig-tour #| fig-cap: "Shortest tour that passes through each node once" #| code-fold: true -my_solution = my_experiment.solution - my_arcs = my_experiment.get_used_arc_weights() my_arcs_keys = my_arcs.keys_tl() my_colors = my_arcs_keys.vapply(lambda v: colors[v]) diff --git a/cornflow-dags/README.rst b/cornflow-dags/README.rst index 95e0a9e19..7b5b14518 100644 --- a/cornflow-dags/README.rst +++ b/cornflow-dags/README.rst @@ -297,3 +297,19 @@ and add a reference to your solver:: Then, you can execute the unittests for your solver with the following command:: python -m unittest tests.test_dags.GraphColor + +The reports +-------------- + +The generation of reports needs to have the `quarto` app installed in the system. +To downloda and install quarto, check here: https://quarto.org/docs/download/. + +A report is a static/ self-contained view of an Experiment (solved or not). + +For example, to generate the `tsp` report, you execute:: + + quarto render cornflow-dags/DAG/tsp/report/report.qmd + +By default, it uses an example instance. If a new instance is needed, the path to it is required:: + + quarto render cornflow-dags/DAG/tsp/report/report.qmd -P file_name:PATH_TO_JSON.json From c6f4c0f6234314ae986339ae9be8a1fd82a913ea Mon Sep 17 00:00:00 2001 From: pchtsp Date: Fri, 17 May 2024 15:10:50 +0200 Subject: [PATCH 03/84] added documentation on developing reports --- cornflow-dags/README.rst | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/cornflow-dags/README.rst b/cornflow-dags/README.rst index 7b5b14518..066c62862 100644 --- a/cornflow-dags/README.rst +++ b/cornflow-dags/README.rst @@ -27,6 +27,11 @@ There are several things that are needed when submitting a new solver. 4. a `Solver` class 5. an `Application class` +A few recommended additions: + +1. unit tests. +2. a report. + In its most minimalistic form: an app constitutes one file that contains all of this. In the following lines we will explain each of these concepts while using the graph-coloring example dag. This example can be found in the `DAG/graph_coloring` directory. @@ -302,7 +307,7 @@ The reports -------------- The generation of reports needs to have the `quarto` app installed in the system. -To downloda and install quarto, check here: https://quarto.org/docs/download/. +To downlodad and install quarto, check here: https://quarto.org/docs/download/. A report is a static/ self-contained view of an Experiment (solved or not). @@ -313,3 +318,10 @@ For example, to generate the `tsp` report, you execute:: By default, it uses an example instance. If a new instance is needed, the path to it is required:: quarto render cornflow-dags/DAG/tsp/report/report.qmd -P file_name:PATH_TO_JSON.json + +Developing reports +******************** + +Quarto reports are easier to create using VS-code with the following extensions: `Python`, `Quarto`, `Jupyter`, `black (Microsoft)`. + +VS-code offers an interactive window to execute cells, and automatic re-run of the report by watching for changes. \ No newline at end of file From 5780bff031ac764d15a66042ac545a996ed1b850 Mon Sep 17 00:00:00 2001 From: pchtsp Date: Fri, 7 Jun 2024 18:31:14 +0200 Subject: [PATCH 04/84] unsuccessful tests with quarto library to call quarto and generate report from python --- cornflow-dags/DAG/tsp/core/experiment.py | 15 ++++++++++----- cornflow-dags/requirements.txt | 4 +++- cornflow-dags/tests/test_dags.py | 9 ++++++++- 3 files changed, 21 insertions(+), 7 deletions(-) diff --git a/cornflow-dags/DAG/tsp/core/experiment.py b/cornflow-dags/DAG/tsp/core/experiment.py index 680594cfc..33631e9c2 100644 --- a/cornflow-dags/DAG/tsp/core/experiment.py +++ b/cornflow-dags/DAG/tsp/core/experiment.py @@ -6,6 +6,8 @@ from .instance import Instance from .solution import Solution +from quarto import render + class Experiment(ExperimentCore): schema_checks = load_json( @@ -55,8 +57,11 @@ def check_solution(self, *args, **kwargs) -> SuperDict: missing_positions=self.check_missing_positions(), ) - def get_report(self): - # get positions (explicit, or implicitly via distances) - # get graph of solution - # - pass + def get_report(self) -> None: + path_to_report = os.path.join(os.path.dirname(__file__), "../report/test.qmd") + # by default, it creates the report next to the quarto file. + + return render( + input=path_to_report, + execute_params=dict(file_name="MY_FILE_NAME"), + ) diff --git a/cornflow-dags/requirements.txt b/cornflow-dags/requirements.txt index c01275b91..ef78d6ded 100644 --- a/cornflow-dags/requirements.txt +++ b/cornflow-dags/requirements.txt @@ -16,4 +16,6 @@ numpy # quarto matplotlib seaborn -jupyter \ No newline at end of file +jupyter +quarto +papermill \ No newline at end of file diff --git a/cornflow-dags/tests/test_dags.py b/cornflow-dags/tests/test_dags.py index 98497bf1b..d3baeccf2 100644 --- a/cornflow-dags/tests/test_dags.py +++ b/cornflow-dags/tests/test_dags.py @@ -1,7 +1,7 @@ import os, sys prev_dir = os.path.join(os.path.dirname(__file__), "../") -my_paths = [prev_dir, os.path.join(prev_dir, 'DAG')] +my_paths = [prev_dir, os.path.join(prev_dir, "DAG")] for __my_path in my_paths: sys.path.insert(1, __my_path) @@ -168,6 +168,13 @@ def setUp(self): def test_solve_cpsat(self): return self.test_try_solving_testcase(dict(solver="cpsat", **self.config)) + def test_report(self): + tests = self.app.test_cases + my_experim = self.app.solvers["cpsat"](self.app.instance(tests[0])) + my_experim.solve(dict()) + my_experim.get_report() + # check the file is created. + class Vrp(BaseDAGTests.SolvingTests): def setUp(self): From f6b470473428d4c30cdfa2e158bf04f5d3c80594 Mon Sep 17 00:00:00 2001 From: pchtsp Date: Tue, 11 Jun 2024 16:05:46 +0200 Subject: [PATCH 05/84] added reports model, endpoint, schemas. Each report has an execution_id associated. --- cornflow-dags/DAG/activate_dags.py | 46 ++++-- .../cornflow/endpoints/execution.py | 2 +- cornflow-server/cornflow/endpoints/reports.py | 133 ++++++++++++++++++ cornflow-server/cornflow/models/__init__.py | 2 + cornflow-server/cornflow/models/execution.py | 8 ++ cornflow-server/cornflow/models/reports.py | 102 ++++++++++++++ cornflow-server/cornflow/schemas/execution.py | 2 + cornflow-server/cornflow/schemas/reports.py | 33 +++++ .../cornflow_client/airflow/dag_utilities.py | 74 ++++++++-- 9 files changed, 375 insertions(+), 27 deletions(-) create mode 100644 cornflow-server/cornflow/endpoints/reports.py create mode 100644 cornflow-server/cornflow/models/reports.py create mode 100644 cornflow-server/cornflow/schemas/reports.py diff --git a/cornflow-dags/DAG/activate_dags.py b/cornflow-dags/DAG/activate_dags.py index 837952081..1c701b2c3 100644 --- a/cornflow-dags/DAG/activate_dags.py +++ b/cornflow-dags/DAG/activate_dags.py @@ -1,14 +1,14 @@ import cornflow_client.airflow.dag_utilities as utils -from airflow import DAG from airflow.operators.python import PythonOperator from airflow.secrets.environment_variables import EnvironmentVariablesBackend +from airflow.decorators import dag, task +from airflow.models.baseoperator import chain +from airflow.exceptions import AirflowSkipException from update_all_schemas import get_new_apps def create_dag(app): - def solve(**kwargs): - return utils.cf_solve_app(app, EnvironmentVariablesBackend(), **kwargs) if app.default_args is not None: default_args = app.default_args @@ -19,7 +19,7 @@ def solve(**kwargs): if app.extra_args is not None: kwargs = app.extra_args - dag = DAG( + @dag( app.name, description=app.description, default_args=default_args, @@ -27,18 +27,36 @@ def solve(**kwargs): tags=["model"], **kwargs ) - with dag: - notify = getattr(app, "notify", True) - if not notify: - t1 = PythonOperator(task_id=app.name, python_callable=solve) - else: - t1 = PythonOperator( - task_id=app.name, - python_callable=solve, - on_failure_callback=utils.callback_email, + def taskflow_dag(): + @task + def solve_app(): + def solve(**kwargs): + return utils.cf_solve_app(app, EnvironmentVariablesBackend(), **kwargs) + + notify = getattr(app, "notify", True) + if not notify: + return PythonOperator(task_id=app.name, python_callable=solve) + else: + return PythonOperator( + task_id=app.name, + python_callable=solve, + on_failure_callback=utils.callback_email, + ) + + @task + def run_report(): + def run(**kwargs): + return utils.cf_report(app, EnvironmentVariablesBackend(), **kwargs) + + file_name = PythonOperator( + task_id=app.name + "_report", python_callable=run ) + return dict(file_name=file_name) - return dag + # Define dependencies and call task functions + chain(solve_app(), run_report()) + + return taskflow_dag for app in get_new_apps(): diff --git a/cornflow-server/cornflow/endpoints/execution.py b/cornflow-server/cornflow/endpoints/execution.py index af6f3eee5..f7d4c8d5d 100644 --- a/cornflow-server/cornflow/endpoints/execution.py +++ b/cornflow-server/cornflow/endpoints/execution.py @@ -1,7 +1,7 @@ """ External endpoints to manage the executions: create new ones, list all of them, get one in particular or check the status of an ongoing one -These endpoints hve different access url, but manage the same data entities +These endpoints have different access url, but manage the same data entities """ # Import from libraries diff --git a/cornflow-server/cornflow/endpoints/reports.py b/cornflow-server/cornflow/endpoints/reports.py new file mode 100644 index 000000000..173beee5e --- /dev/null +++ b/cornflow-server/cornflow/endpoints/reports.py @@ -0,0 +1,133 @@ +""" +External endpoints to manage the reports: create new ones, list all of them, get one in particular +These endpoints have different access url, but manage the same data entities +""" + +# Import from libraries +from flask import current_app +from flask_apispec import marshal_with, use_kwargs, doc + +# Import from internal modules +from cornflow.endpoints.meta_resource import BaseMetaResource +from cornflow.models import ExecutionModel, ReportsModel + +from cornflow.schemas.reports import ReportSchema, ReportEditRequest, QueryFiltersReports, ReportRequest +from cornflow.shared.authentication import Auth, authenticate + + +class ReportEndpoint(BaseMetaResource): + """ + Endpoint used to create a new report or get all the reports and their information back + """ + + def __init__(self): + super().__init__() + self.model = ReportsModel + self.data_model = ReportsModel + self.foreign_data = {"execution_id": ExecutionModel} + + + @doc(description="Get all reports", tags=["Reports"]) + @authenticate(auth_class=Auth()) + @marshal_with(ReportSchema(many=True)) + @use_kwargs(QueryFiltersReports, location="query") + def get(self, **kwargs): + """ + API method to get all the reports created by the user and its related info + It requires authentication to be passed in the form of a token that has to be linked to + an existing session (login) made by a user + + :return: A dictionary with a message (error if authentication failed or a list with all the reports + created by the authenticated user) and a integer with the HTTP status code + :rtype: Tuple(dict, integer) + """ + # TODO: filter by execution_id + reports = self.get_list(user=self.get_user(), **kwargs) + current_app.logger.info(f"User {self.get_user()} gets list of reports") + return reports + + @doc(description="Create an report", tags=["Reports"]) + @authenticate(auth_class=Auth()) + @Auth.dag_permission_required + @marshal_with(ReportSchema) + @use_kwargs(ReportRequest, location="json") + def post(self, **kwargs): + """ + API method to create a new report linked to an already existing report + It requires authentication to be passed in the form of a token that has to be linked to + an existing session (login) made by a user + + :return: A dictionary with a message (error if authentication failed, error if data is not validated or + the reference_id for the newly created report if successful) and a integer wit the HTTP status code + :rtype: Tuple(dict, integer) + """ + # TODO: not sure if it should be possible to generate a report from the REST API + # and if so, should we let them generate a new report file? + report, status_code = self.post_list(data=kwargs) + + return report, 201 + + +class ReportDetailsEndpointBase(BaseMetaResource): + """ + Endpoint used to get the information of a certain report. But not the data! + """ + + def __init__(self): + super().__init__() + self.data_model = ReportsModel + self.foreign_data = {"execution_id": ExecutionModel} + + +class ReportDetailsEndpoint(ReportDetailsEndpointBase): + @doc(description="Get details of a report", tags=["Reports"], inherit=False) + @authenticate(auth_class=Auth()) + @marshal_with(ReportSchema) + @BaseMetaResource.get_data_or_404 + def get(self, idx): + """ + API method to get a report created by the user and its related info. + It requires authentication to be passed in the form of a token that has to be linked to + an existing session (login) made by a user. + + :param str idx: ID of the report. + :return: A dictionary with a message (error if authentication failed, or the report does not exist or + the data of the report) and an integer with the HTTP status code. + :rtype: Tuple(dict, integer) + """ + current_app.logger.info( + f"User {self.get_user()} gets details of report {idx}" + ) + return self.get_detail(user=self.get_user(), idx=idx) + + @doc(description="Edit a report", tags=["Reports"], inherit=False) + @authenticate(auth_class=Auth()) + @use_kwargs(ReportEditRequest, location="json") + def put(self, idx, **data): + """ + Edit an existing report + + :param string idx: ID of the report. + :return: A dictionary with a message (error if authentication failed, or the report does not exist or + a message) and an integer with the HTTP status code. + :rtype: Tuple(dict, integer) + """ + current_app.logger.info(f"User {self.get_user()} edits report {idx}") + return self.put_detail(data, user=self.get_user(), idx=idx) + + @doc(description="Delete a report", tags=["Reports"], inherit=False) + @authenticate(auth_class=Auth()) + def delete(self, idx): + """ + API method to delete a report created by the user and its related info. + It requires authentication to be passed in the form of a token that has to be linked to + an existing session (login) made by a user. + + :param string idx: ID of the report. + :return: A dictionary with a message (error if authentication failed, or the report does not exist or + a message) and an integer with the HTTP status code. + :rtype: Tuple(dict, integer) + """ + current_app.logger.info(f"User {self.get_user()} deleted report {idx}") + return self.delete_detail(user=self.get_user(), idx=idx) + diff --git a/cornflow-server/cornflow/models/__init__.py b/cornflow-server/cornflow/models/__init__.py index d36c34ffa..de0a507b5 100644 --- a/cornflow-server/cornflow/models/__init__.py +++ b/cornflow-server/cornflow/models/__init__.py @@ -1,6 +1,7 @@ """ Initialization file for the models module """ + from .action import ActionModel from .alarms import AlarmsModel from .case import CaseModel @@ -14,3 +15,4 @@ from .user import UserModel from .user_role import UserRoleModel from .view import ViewModel +from .reports import ReportsModel diff --git a/cornflow-server/cornflow/models/execution.py b/cornflow-server/cornflow/models/execution.py index e55151684..bc7ac0210 100644 --- a/cornflow-server/cornflow/models/execution.py +++ b/cornflow-server/cornflow/models/execution.py @@ -64,6 +64,14 @@ class ExecutionModel(BaseDataModel): default=EXECUTION_STATE_MESSAGE_DICT[DEFAULT_EXECUTION_CODE], nullable=True, ) + reports = db.relationship( + "ReportModel", + backref="executions", + lazy=True, + primaryjoin="and_(ExecutionModel.id==ReportModel.execution_id, " + "ReportModel.deleted_at==None)", + cascade="all,delete", + ) def __init__(self, data): super().__init__(data) diff --git a/cornflow-server/cornflow/models/reports.py b/cornflow-server/cornflow/models/reports.py new file mode 100644 index 000000000..cc8c4ca75 --- /dev/null +++ b/cornflow-server/cornflow/models/reports.py @@ -0,0 +1,102 @@ +""" +Model for the executions +""" + +# Import from libraries +from sqlalchemy.dialects.postgresql import TEXT +from sqlalchemy.ext.declarative import declared_attr + +# Imports from internal modules +from cornflow.models.base_data_model import TraceAttributesModel +from cornflow.shared import db + + +class ReportsModel(TraceAttributesModel): + """ + Model class for the Reports. + It inherits from :class:`TraceAttributesModel` to have the trace fields and user field. + + - **id**: str, the primary key for the executions, a hash generated upon creation of the execution + and the id given back to the user. + The hash is generated from the creation date, the user and the id of the parent instance. + - **execution_id**: str, the foreign key for the execution (:class:`ExecutionModel`). It links the report to its + parent execution. + - **report_link**: str, the link with the actual report. It should be a valid url to a cloud storage bucket. + - **name**: str, the name of the report given by the user. + - **description**: str, the description of the report given by the user. It is optional. + - **user_id**: int, the foreign key for the user (:class:`UserModel`). It links the execution to its owner. + - **created_at**: datetime, the datetime when the execution was created (in UTC). + This datetime is generated automatically, the user does not need to provide it. + - **updated_at**: datetime, the datetime when the execution was last updated (in UTC). + This datetime is generated automatically, the user does not need to provide it. + - **deleted_at**: datetime, the datetime when the execution was deleted (in UTC). Even though it is deleted, + actually, it is not deleted from the database, in order to have a command that cleans up deleted data + after a certain time of its deletion. + This datetime is generated automatically, the user does not need to provide it. + + """ + + # Table name in the database + __tablename__ = "reports" + + # Model fields + id = db.Column(db.Integer, primary_key=True, autoincrement=True) + execution_id = db.Column( + db.String(256), db.ForeignKey("execution.id"), nullable=False + ) + name = db.Column(db.String(256), nullable=False) + description = db.Column(TEXT, nullable=True) + report_link = db.Column(db.String(256), nullable=False) + + @declared_attr + def user_id(self): + """ + The foreign key for the user (:class:`UserModel`). + """ + return db.Column(db.Integer, db.ForeignKey("users.id"), nullable=False) + + def __init__(self, data: dict): + super().__init__() + self.user_id = data.get("user_id") + self.execution_id = data.get("execution_id") + self.name = data.get("name") + self.description = data.get("description") + self.report_link = data.get("report_link") + + def update(self, data): + """ + Method used to update a report from the database + + :param dict data: the data of the object + :return: None + :rtype: None + """ + super().update(data) + + def update_link(self, link: str): + """ + Method to update the report link + + :param str link: new URL for the report + :return: nothing + """ + self.report_link = link + super().update({}) + + def __repr__(self): + """ + Method to represent the class :class:`ReportModel` + + :return: The representation of the :class:`ReportModel` + :rtype: str + """ + return f"" + + def __str__(self): + """ + Method to print a string representation of the :class:`ReportModel` + + :return: The string for the :class:`ReportModel` + :rtype: str + """ + return self.__repr__() diff --git a/cornflow-server/cornflow/schemas/execution.py b/cornflow-server/cornflow/schemas/execution.py index 8b74aa95a..96f8987d4 100644 --- a/cornflow-server/cornflow/schemas/execution.py +++ b/cornflow-server/cornflow/schemas/execution.py @@ -5,6 +5,7 @@ from cornflow.shared.const import MIN_EXECUTION_STATUS_CODE, MAX_EXECUTION_STATUS_CODE from .common import QueryFilters, BaseDataEndpointResponse from .solution_log import LogSchema, BasicLogSchema +from .reports import ReportSchema class QueryFiltersExecution(QueryFilters): @@ -95,6 +96,7 @@ class ExecutionDagPostRequest(ExecutionRequest, ExecutionDagRequest): class ExecutionDetailsEndpointResponse(BaseDataEndpointResponse): + reports = fields.Nested(ReportSchema, many=True) config = fields.Nested(ConfigSchemaResponse) instance_id = fields.Str() state = fields.Int() diff --git a/cornflow-server/cornflow/schemas/reports.py b/cornflow-server/cornflow/schemas/reports.py new file mode 100644 index 000000000..7e77c588e --- /dev/null +++ b/cornflow-server/cornflow/schemas/reports.py @@ -0,0 +1,33 @@ +# Imports from libraries +from marshmallow import fields, Schema + +# Imports from internal modules +from .common import BaseQueryFilters + + +class QueryFiltersReports(BaseQueryFilters): + execution_id = fields.Str(required=False) + + +class ReportSchema(Schema): + id = fields.Str(dump_only=True) + user_id = fields.Int(required=False, load_only=True) + execution_id = fields.Str(required=True) + name = fields.Str() + description = fields.Str() + created_at = fields.DateTime(dump_only=True) + updated_at = fields.DateTime(dump_only=True) + deleted_at = fields.DateTime(dump_only=True) + + +class ReportEditRequest(Schema): + name = fields.Str() + description = fields.Str() + report_link = fields.Str() + + +class ReportRequest(Schema): + name = fields.Str(required=True) + description = fields.Str(required=False) + execution_id = fields.Str(required=True) + report_link = fields.Str(required=True) diff --git a/libs/client/cornflow_client/airflow/dag_utilities.py b/libs/client/cornflow_client/airflow/dag_utilities.py index a0bd5d984..599550eac 100644 --- a/libs/client/cornflow_client/airflow/dag_utilities.py +++ b/libs/client/cornflow_client/airflow/dag_utilities.py @@ -7,7 +7,8 @@ from datetime import datetime, timedelta from urllib.parse import urlparse, urljoin -from cornflow_client import CornFlow, CornFlowApiError +from cornflow_client import CornFlow, CornFlowApiError, ApplicationCore +from airflow.secrets import BaseSecretsBackend # TODO: convert everything to an object that encapsulates everything # to make it clear and avoid all the arguments. @@ -38,7 +39,7 @@ } -def get_schemas_from_file(_dir, dag_name): +def get_schemas_from_file(_dir: str, dag_name: str): # TODO: check if in use with open(os.path.join(_dir, dag_name + "_input.json"), "r") as f: instance = json.load(f) @@ -47,7 +48,7 @@ def get_schemas_from_file(_dir, dag_name): return instance, solution -def get_requirements(path): +def get_requirements(path: str): """ Read requirements.txt from a project and return a list of packages. @@ -67,7 +68,7 @@ def get_requirements(path): return req_list -def connect_to_cornflow(secrets): +def connect_to_cornflow(secrets: BaseSecretsBackend): """ Create a connection to cornflow and log in with airflow admin user. @@ -90,7 +91,7 @@ def connect_to_cornflow(secrets): return airflow_user -def try_to_save_error(client, exec_id, state=-1): +def try_to_save_error(client: CornFlow, exec_id: str, state=-1): """ Attempt at saving that the execution failed """ @@ -100,7 +101,7 @@ def try_to_save_error(client, exec_id, state=-1): print(f"An exception trying to register the failed status: {e}") -def try_to_save_airflow_log(client, exec_id, ti, base_log_folder): +def try_to_save_airflow_log(client: CornFlow, exec_id: str, ti, base_log_folder: str): log_file = os.path.join( base_log_folder, f"{ti.dag_id}", @@ -119,7 +120,7 @@ def try_to_save_airflow_log(client, exec_id, ti, base_log_folder): print(f"An exception occurred while trying to register airflow log: {e}") -def try_to_write_solution(client, exec_id, payload): +def try_to_write_solution(client: CornFlow, exec_id: str, payload: dict): """ Tries to write the payload into cornflow If it fails tries to write again that it failed. @@ -147,7 +148,7 @@ def try_to_write_solution(client, exec_id, payload): raise AirflowDagException("The writing of the instance checks failed") -def get_schema(dag_name): +def get_schema(dag_name: str): # TODO: check if in use _file = os.path.join(os.path.dirname(__file__), f"{dag_name}_output.json") with open(_file, "r") as f: @@ -155,14 +156,14 @@ def get_schema(dag_name): return schema -def cf_solve_app(app, secrets, **kwargs): +def cf_solve_app(app: ApplicationCore, secrets: BaseSecretsBackend, **kwargs): if kwargs["dag_run"].conf.get("checks_only"): return cf_check(app.check, app.name, secrets, **kwargs) else: return cf_solve(app.solve, app.name, secrets, **kwargs) -def cf_solve(fun, dag_name, secrets, **kwargs): +def cf_solve(fun: callable, dag_name: str, secrets: BaseSecretsBackend, **kwargs): """ Connect to cornflow, ask for data, solve the problem and write the solution in cornflow @@ -239,7 +240,7 @@ def cf_solve(fun, dag_name, secrets, **kwargs): raise AirflowDagException("There was an error during the solving") -def cf_check(fun, dag_name, secrets, **kwargs): +def cf_check(fun: callable, dag_name: str, secrets: BaseSecretsBackend, **kwargs): """ Connect to cornflow, ask for data, check the solution data and write the checks in cornflow :param fun: The function to use to check the data @@ -311,7 +312,56 @@ def cf_check(fun, dag_name, secrets, **kwargs): ) -def callback_email(context): +def cf_report( + app: ApplicationCore, + secrets: BaseSecretsBackend, + **kwargs, +): + """ + Connect to cornflow, ask for data, generate the report for the execution + + :param app: the application from which to generate the report + :param secrets: Environment variables + :param kwargs: other kwargs passed to the dag task. + :return: + """ + ti = kwargs["ti"] + try: + client = connect_to_cornflow(secrets) + exec_id = kwargs["dag_run"].conf["exec_id"] + # TODO: why not client.get_results? or get_status but for the config? + # I just want to check the config as a first step + execution_data = client.get_data(exec_id) + config = execution_data["config"] + report_config = config.get("report", {}) + if not report_config: + # no need to write report since it's not requested + return None + + execution_data = client.get_data(exec_id) + input_data = execution_data["data"] + solution_data = execution_data["solution_data"] + + report_name = report_config.get("name") + # maybe all of this should be abstracted inside the app + experiment = app.get_solver(app.get_default_solver_name()) + my_experiment = experiment( + app.instance(input_data), app.solution(solution_data) + ) + # this should return the path to the generated file + # TODO: add a get_report method in ExperimentCore + file_name = my_experiment.get_report(report_name) + # TODO: store it in AWS/GCD/Azure bucket + # TODO: update execution with link to bucket + payload = dict(report_link="") + client.put_one_execution(exec_id, payload) + return True + + except Exception as e: + raise AirflowDagException("There was an error during the solving") + + +def callback_email(context: dict): from airflow.utils.email import send_email from airflow.secrets.environment_variables import EnvironmentVariablesBackend From 17e917367705888e30730d23b002c605bc6dfd4e Mon Sep 17 00:00:00 2001 From: pchtsp Date: Tue, 11 Jun 2024 16:28:54 +0200 Subject: [PATCH 06/84] fixed some wrong comments --- cornflow-server/cornflow/endpoints/reports.py | 2 +- cornflow-server/cornflow/models/reports.py | 14 ++++++-------- 2 files changed, 7 insertions(+), 9 deletions(-) diff --git a/cornflow-server/cornflow/endpoints/reports.py b/cornflow-server/cornflow/endpoints/reports.py index 173beee5e..dcf4b46c9 100644 --- a/cornflow-server/cornflow/endpoints/reports.py +++ b/cornflow-server/cornflow/endpoints/reports.py @@ -66,7 +66,7 @@ def post(self, **kwargs): report, status_code = self.post_list(data=kwargs) return report, 201 - + class ReportDetailsEndpointBase(BaseMetaResource): """ diff --git a/cornflow-server/cornflow/models/reports.py b/cornflow-server/cornflow/models/reports.py index cc8c4ca75..229075834 100644 --- a/cornflow-server/cornflow/models/reports.py +++ b/cornflow-server/cornflow/models/reports.py @@ -1,5 +1,5 @@ """ -Model for the executions +Model for the reports """ # Import from libraries @@ -16,20 +16,18 @@ class ReportsModel(TraceAttributesModel): Model class for the Reports. It inherits from :class:`TraceAttributesModel` to have the trace fields and user field. - - **id**: str, the primary key for the executions, a hash generated upon creation of the execution - and the id given back to the user. - The hash is generated from the creation date, the user and the id of the parent instance. + - **id**: int, the report id, primary key for the reports. - **execution_id**: str, the foreign key for the execution (:class:`ExecutionModel`). It links the report to its parent execution. - **report_link**: str, the link with the actual report. It should be a valid url to a cloud storage bucket. - **name**: str, the name of the report given by the user. - **description**: str, the description of the report given by the user. It is optional. - - **user_id**: int, the foreign key for the user (:class:`UserModel`). It links the execution to its owner. - - **created_at**: datetime, the datetime when the execution was created (in UTC). + - **user_id**: int, the foreign key for the user (:class:`UserModel`). It links the report to its owner. + - **created_at**: datetime, the datetime when the report was created (in UTC). This datetime is generated automatically, the user does not need to provide it. - - **updated_at**: datetime, the datetime when the execution was last updated (in UTC). + - **updated_at**: datetime, the datetime when the report was last updated (in UTC). This datetime is generated automatically, the user does not need to provide it. - - **deleted_at**: datetime, the datetime when the execution was deleted (in UTC). Even though it is deleted, + - **deleted_at**: datetime, the datetime when the report was deleted (in UTC). Even though it is deleted, actually, it is not deleted from the database, in order to have a command that cleans up deleted data after a certain time of its deletion. This datetime is generated automatically, the user does not need to provide it. From 4496e11d2157a6ea6beb2787cce18330d7744811 Mon Sep 17 00:00:00 2001 From: pchtsp Date: Wed, 12 Jun 2024 18:48:17 +0200 Subject: [PATCH 07/84] migrations to database for reports --- .../versions/96f00d0961d1_reports_table.py | 41 +++++++++++++++++++ cornflow-server/cornflow/models/reports.py | 2 +- 2 files changed, 42 insertions(+), 1 deletion(-) create mode 100644 cornflow-server/cornflow/migrations/versions/96f00d0961d1_reports_table.py diff --git a/cornflow-server/cornflow/migrations/versions/96f00d0961d1_reports_table.py b/cornflow-server/cornflow/migrations/versions/96f00d0961d1_reports_table.py new file mode 100644 index 000000000..d26addf92 --- /dev/null +++ b/cornflow-server/cornflow/migrations/versions/96f00d0961d1_reports_table.py @@ -0,0 +1,41 @@ +"""reports table + +Revision ID: 96f00d0961d1 +Revises: 991b98e24225 +Create Date: 2024-06-12 18:47:06.366487 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '96f00d0961d1' +down_revision = '991b98e24225' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('reports', + sa.Column('created_at', sa.DateTime(), nullable=False), + sa.Column('updated_at', sa.DateTime(), nullable=False), + sa.Column('deleted_at', sa.DateTime(), nullable=True), + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('execution_id', sa.String(length=256), nullable=False), + sa.Column('name', sa.String(length=256), nullable=False), + sa.Column('description', sa.TEXT(), nullable=True), + sa.Column('report_link', sa.String(length=256), nullable=False), + sa.Column('user_id', sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(['execution_id'], ['executions.id'], ), + sa.ForeignKeyConstraint(['user_id'], ['users.id'], ), + sa.PrimaryKeyConstraint('id') + ) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table('reports') + # ### end Alembic commands ### diff --git a/cornflow-server/cornflow/models/reports.py b/cornflow-server/cornflow/models/reports.py index 229075834..f03fcfb44 100644 --- a/cornflow-server/cornflow/models/reports.py +++ b/cornflow-server/cornflow/models/reports.py @@ -40,7 +40,7 @@ class ReportsModel(TraceAttributesModel): # Model fields id = db.Column(db.Integer, primary_key=True, autoincrement=True) execution_id = db.Column( - db.String(256), db.ForeignKey("execution.id"), nullable=False + db.String(256), db.ForeignKey("executions.id"), nullable=False ) name = db.Column(db.String(256), nullable=False) description = db.Column(TEXT, nullable=True) From a3bcdaf3932b49291cecb2a71498e3e562764057 Mon Sep 17 00:00:00 2001 From: pchtsp Date: Wed, 12 Jun 2024 20:18:47 +0200 Subject: [PATCH 08/84] pass executions tests --- cornflow-server/cornflow/endpoints/reports.py | 23 ++++++++++--------- cornflow-server/cornflow/models/__init__.py | 2 +- cornflow-server/cornflow/models/reports.py | 2 +- .../cornflow/tests/unit/test_executions.py | 5 ++++ .../cornflow/tests/unit/test_users.py | 7 ++++-- 5 files changed, 24 insertions(+), 15 deletions(-) diff --git a/cornflow-server/cornflow/endpoints/reports.py b/cornflow-server/cornflow/endpoints/reports.py index dcf4b46c9..2f0b769d7 100644 --- a/cornflow-server/cornflow/endpoints/reports.py +++ b/cornflow-server/cornflow/endpoints/reports.py @@ -9,9 +9,14 @@ # Import from internal modules from cornflow.endpoints.meta_resource import BaseMetaResource -from cornflow.models import ExecutionModel, ReportsModel - -from cornflow.schemas.reports import ReportSchema, ReportEditRequest, QueryFiltersReports, ReportRequest +from cornflow.models import ExecutionModel, ReportModel + +from cornflow.schemas.reports import ( + ReportSchema, + ReportEditRequest, + QueryFiltersReports, + ReportRequest, +) from cornflow.shared.authentication import Auth, authenticate @@ -22,11 +27,10 @@ class ReportEndpoint(BaseMetaResource): def __init__(self): super().__init__() - self.model = ReportsModel - self.data_model = ReportsModel + self.model = ReportModel + self.data_model = ReportModel self.foreign_data = {"execution_id": ExecutionModel} - @doc(description="Get all reports", tags=["Reports"]) @authenticate(auth_class=Auth()) @marshal_with(ReportSchema(many=True)) @@ -75,7 +79,7 @@ class ReportDetailsEndpointBase(BaseMetaResource): def __init__(self): super().__init__() - self.data_model = ReportsModel + self.data_model = ReportModel self.foreign_data = {"execution_id": ExecutionModel} @@ -95,9 +99,7 @@ def get(self, idx): the data of the report) and an integer with the HTTP status code. :rtype: Tuple(dict, integer) """ - current_app.logger.info( - f"User {self.get_user()} gets details of report {idx}" - ) + current_app.logger.info(f"User {self.get_user()} gets details of report {idx}") return self.get_detail(user=self.get_user(), idx=idx) @doc(description="Edit a report", tags=["Reports"], inherit=False) @@ -130,4 +132,3 @@ def delete(self, idx): """ current_app.logger.info(f"User {self.get_user()} deleted report {idx}") return self.delete_detail(user=self.get_user(), idx=idx) - diff --git a/cornflow-server/cornflow/models/__init__.py b/cornflow-server/cornflow/models/__init__.py index de0a507b5..e9c35a3a6 100644 --- a/cornflow-server/cornflow/models/__init__.py +++ b/cornflow-server/cornflow/models/__init__.py @@ -15,4 +15,4 @@ from .user import UserModel from .user_role import UserRoleModel from .view import ViewModel -from .reports import ReportsModel +from .reports import ReportModel diff --git a/cornflow-server/cornflow/models/reports.py b/cornflow-server/cornflow/models/reports.py index f03fcfb44..7b969e9cf 100644 --- a/cornflow-server/cornflow/models/reports.py +++ b/cornflow-server/cornflow/models/reports.py @@ -11,7 +11,7 @@ from cornflow.shared import db -class ReportsModel(TraceAttributesModel): +class ReportModel(TraceAttributesModel): """ Model class for the Reports. It inherits from :class:`TraceAttributesModel` to have the trace fields and user field. diff --git a/cornflow-server/cornflow/tests/unit/test_executions.py b/cornflow-server/cornflow/tests/unit/test_executions.py index 6e67ee71f..e6a772763 100644 --- a/cornflow-server/cornflow/tests/unit/test_executions.py +++ b/cornflow-server/cornflow/tests/unit/test_executions.py @@ -57,6 +57,7 @@ def load_file_fk(_file): "instance_id", "name", "indicators", + "reports", ] def test_new_execution(self): @@ -260,6 +261,7 @@ def setUp(self): "schema", "user_id", "indicators", + "reports", } # we only check the following because this endpoint does not return data self.items_to_check = ["name", "description"] @@ -303,6 +305,7 @@ def test_create_delete_instance_load(self): "name", "created_at", "state", + "reports", ] execution = self.get_one_row( self.url + idx, @@ -408,6 +411,7 @@ def setUp(self): "state", "name", "id", + "reports", ] def test_get_one_execution(self): @@ -450,6 +454,7 @@ def setUp(self): "user_id", "config", "indicators", + "reports", ] def test_get_one_execution(self): diff --git a/cornflow-server/cornflow/tests/unit/test_users.py b/cornflow-server/cornflow/tests/unit/test_users.py index c87d0d4ce..490acbc5c 100644 --- a/cornflow-server/cornflow/tests/unit/test_users.py +++ b/cornflow-server/cornflow/tests/unit/test_users.py @@ -363,7 +363,11 @@ def test_viewer_user_change_password(self): def test_change_password_rotation(self): current_app.config["PWD_ROTATION_TIME"] = 1 # in days - payload = {"pwd_last_change": (datetime.utcnow() - timedelta(days=2)).strftime("%Y-%m-%dT%H:%M:%SZ")} + payload = { + "pwd_last_change": (datetime.utcnow() - timedelta(days=2)).strftime( + "%Y-%m-%dT%H:%M:%SZ" + ) + } self.modify_info(self.planner, self.planner, payload) response = self.log_in(self.planner) self.assertEqual(True, response.json["change_password"]) @@ -371,7 +375,6 @@ def test_change_password_rotation(self): payload = {"password": "Newtestpassword1!"} self.modify_info(self.planner, self.planner, payload) self.planner.update(payload) - print(self.planner) response = self.log_in(self.planner) self.assertEqual(False, response.json["change_password"]) From 82f0aa998d7b14f9cecd862c00ef8e96d7e34b1f Mon Sep 17 00:00:00 2001 From: pchtsp Date: Wed, 12 Jun 2024 20:23:31 +0200 Subject: [PATCH 09/84] delete airflow dependency --- libs/client/cornflow_client/airflow/dag_utilities.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/libs/client/cornflow_client/airflow/dag_utilities.py b/libs/client/cornflow_client/airflow/dag_utilities.py index 599550eac..ecd39e09d 100644 --- a/libs/client/cornflow_client/airflow/dag_utilities.py +++ b/libs/client/cornflow_client/airflow/dag_utilities.py @@ -8,7 +8,6 @@ from urllib.parse import urlparse, urljoin from cornflow_client import CornFlow, CornFlowApiError, ApplicationCore -from airflow.secrets import BaseSecretsBackend # TODO: convert everything to an object that encapsulates everything # to make it clear and avoid all the arguments. @@ -68,7 +67,7 @@ def get_requirements(path: str): return req_list -def connect_to_cornflow(secrets: BaseSecretsBackend): +def connect_to_cornflow(secrets): """ Create a connection to cornflow and log in with airflow admin user. @@ -156,14 +155,14 @@ def get_schema(dag_name: str): return schema -def cf_solve_app(app: ApplicationCore, secrets: BaseSecretsBackend, **kwargs): +def cf_solve_app(app: ApplicationCore, secrets, **kwargs): if kwargs["dag_run"].conf.get("checks_only"): return cf_check(app.check, app.name, secrets, **kwargs) else: return cf_solve(app.solve, app.name, secrets, **kwargs) -def cf_solve(fun: callable, dag_name: str, secrets: BaseSecretsBackend, **kwargs): +def cf_solve(fun: callable, dag_name: str, secrets, **kwargs): """ Connect to cornflow, ask for data, solve the problem and write the solution in cornflow @@ -240,7 +239,7 @@ def cf_solve(fun: callable, dag_name: str, secrets: BaseSecretsBackend, **kwargs raise AirflowDagException("There was an error during the solving") -def cf_check(fun: callable, dag_name: str, secrets: BaseSecretsBackend, **kwargs): +def cf_check(fun: callable, dag_name: str, secrets, **kwargs): """ Connect to cornflow, ask for data, check the solution data and write the checks in cornflow :param fun: The function to use to check the data @@ -314,7 +313,7 @@ def cf_check(fun: callable, dag_name: str, secrets: BaseSecretsBackend, **kwargs def cf_report( app: ApplicationCore, - secrets: BaseSecretsBackend, + secrets, **kwargs, ): """ From f30ee6d45599c86cc401625a63996253ab30eb0a Mon Sep 17 00:00:00 2001 From: pchtsp Date: Fri, 21 Jun 2024 14:35:19 +0200 Subject: [PATCH 10/84] added some additional I/O methods to the Experiment and a `generate_report` base method. --- .../client/cornflow_client/core/experiment.py | 31 +++++++++++++++++++ 1 file changed, 31 insertions(+) diff --git a/libs/client/cornflow_client/core/experiment.py b/libs/client/cornflow_client/core/experiment.py index 9676e9ded..cb893791a 100644 --- a/libs/client/cornflow_client/core/experiment.py +++ b/libs/client/cornflow_client/core/experiment.py @@ -4,6 +4,7 @@ from abc import ABC, abstractmethod from typing import Union, Dict +import json from cornflow_client.constants import ( PARAMETER_SOLVER_TRANSLATING_MAPPING, @@ -150,3 +151,33 @@ def get_solver_config( ) return conf + + def generate_report(self, report_path: str, report_name="report") -> None: + """ + this method should write a report file into report_path, using the template in report_name. + + :param report_path: the path of the report to export + :param report_name: the name of the template for the report + """ + raise NotImplementedError() + + @classmethod + def from_dict(cls, data: dict): + return cls( + InstanceCore.from_dict(data["instance"]), + SolutionCore.from_dict(data["solution"]), + ) + + def to_dict(self) -> dict: + return dict(instance=self.instance.to_dict(), solution=self.solution.to_dict()) + + @classmethod + def from_json(cls, path: str) -> "ExperimentCore": + with open(path, "r") as f: + data_json = json.load(f) + return cls.from_dict(data_json) + + def to_json(self, path: str) -> None: + data = self.to_dict() + with open(path, "w") as f: + json.dump(data, f, indent=4, sort_keys=True) From 0b1619f192baffc534270eb59b457b81b6394e5d Mon Sep 17 00:00:00 2001 From: pchtsp Date: Fri, 21 Jun 2024 14:36:39 +0200 Subject: [PATCH 11/84] now it's possible to run the create the report from python using a json format of the Experiment as bridge. --- cornflow-dags/DAG/tsp/core/experiment.py | 46 ++++++++++++++++++++---- cornflow-dags/DAG/tsp/report/.gitignore | 1 + cornflow-dags/DAG/tsp/report/report.qmd | 10 +++--- cornflow-dags/tests/test_dags.py | 7 ++-- 4 files changed, 50 insertions(+), 14 deletions(-) create mode 100644 cornflow-dags/DAG/tsp/report/.gitignore diff --git a/cornflow-dags/DAG/tsp/core/experiment.py b/cornflow-dags/DAG/tsp/core/experiment.py index 33631e9c2..904c9e943 100644 --- a/cornflow-dags/DAG/tsp/core/experiment.py +++ b/cornflow-dags/DAG/tsp/core/experiment.py @@ -6,6 +6,7 @@ from .instance import Instance from .solution import Solution +import json, tempfile from quarto import render @@ -14,6 +15,26 @@ class Experiment(ExperimentCore): os.path.join(os.path.dirname(__file__), "../schemas/solution_checks.json") ) + def to_dict(self) -> dict: + return dict(instance=self.instance.to_dict(), solution=self.solution.to_dict()) + + @classmethod + def from_dict(cls, data: dict): + return cls( + Instance.from_dict(data["instance"]), Solution.from_dict(data["solution"]) + ) + + @classmethod + def from_json(cls, path: str) -> "Experiment": + with open(path, "r") as f: + data_json = json.load(f) + return cls.from_dict(data_json) + + def to_json(self, path: str) -> None: + data = self.to_dict() + with open(path, "w") as f: + json.dump(data, f, indent=4, sort_keys=True) + @property def instance(self) -> Instance: return super().instance @@ -57,11 +78,22 @@ def check_solution(self, *args, **kwargs) -> SuperDict: missing_positions=self.check_missing_positions(), ) - def get_report(self) -> None: - path_to_report = os.path.join(os.path.dirname(__file__), "../report/test.qmd") - # by default, it creates the report next to the quarto file. - - return render( - input=path_to_report, - execute_params=dict(file_name="MY_FILE_NAME"), + def generate_report(self, report_path: str, report_name="report") -> None: + # a user may give the full "report.qmd" name. + # We want to take out the extension + report_base = os.path.splitext(report_name)[0] + path_without_ext = os.path.join( + os.path.dirname(__file__), "../report/", report_base ) + path_to_qmd = path_without_ext + ".qmd" + path_to_output = path_without_ext + ".html" + with tempfile.TemporaryDirectory() as tmp: + path = os.path.join(tmp, "experiment.json") + # write a json with instance and solution to temp file + self.to_json(path) + # pass the path to the report to render + # it generates a report with path = path_to_output + render(input=path_to_qmd, execute_params=dict(file_name=path)) + # quarto always writes the report in the .qmd directory. + # thus, we need to move it where we want to: + os.replace(path_to_output, report_path) diff --git a/cornflow-dags/DAG/tsp/report/.gitignore b/cornflow-dags/DAG/tsp/report/.gitignore new file mode 100644 index 000000000..075b2542a --- /dev/null +++ b/cornflow-dags/DAG/tsp/report/.gitignore @@ -0,0 +1 @@ +/.quarto/ diff --git a/cornflow-dags/DAG/tsp/report/report.qmd b/cornflow-dags/DAG/tsp/report/report.qmd index 5b99ad5f3..8ae87f470 100644 --- a/cornflow-dags/DAG/tsp/report/report.qmd +++ b/cornflow-dags/DAG/tsp/report/report.qmd @@ -38,16 +38,16 @@ import matplotlib.pyplot as plt extension = os.path.splitext(file_name)[1] if extension=='.tsp': + # it's an instance, so we should solve it, I guess my_instance = TspApp.instance.from_tsplib_file(file_name) + my_experiment = TspApp.solvers['cpsat'](instance=my_instance, solution=TspApp.solution(dict(route=[]))) + status = my_experiment.solve({'timeLimit': 2}) elif extension=='.json': - my_instance = TspApp.instance.from_json(file_name) + my_experiment = TspApp.solvers['cpsat'].from_json(file_name) + my_instance = my_experiment.instance else: raise ValueError("Unknown extension: {}".format(extension)) -my_experiment = TspApp.solvers['cpsat'](instance=my_instance, solution=TspApp.solution(dict(route=[]))) -# TODO: Ideally, we should not solve the problem inside the report, we should already have an example of problem solved. -# status = my_experiment.solve({'timeLimit': 2}) -# pop_element = my_experiment.solution.data['route'].pop() ``` diff --git a/cornflow-dags/tests/test_dags.py b/cornflow-dags/tests/test_dags.py index 5f09783bd..ffcfcd5b9 100644 --- a/cornflow-dags/tests/test_dags.py +++ b/cornflow-dags/tests/test_dags.py @@ -164,9 +164,12 @@ def test_solve_cpsat(self): def test_report(self): tests = self.app.test_cases - my_experim = self.app.solvers["cpsat"](self.app.instance(tests[0])) + my_experim = self.app.solvers["cpsat"](self.app.instance(tests[0]["instance"])) my_experim.solve(dict()) - my_experim.get_report() + report_path = "./my_report.html" + my_experim.generate_report(report_path=report_path) + self.assertTrue(os.path.exists(report_path)) + os.remove(report_path) # check the file is created. From 409662f9a67cf6897ca111d144f0763ce9ff9391 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Guillermo=20Gonz=C3=A1lez-Santander=20de=20la=20Cruz?= Date: Fri, 28 Jun 2024 10:44:26 +0200 Subject: [PATCH 12/84] New activate_dags (#543) --- cornflow-dags/DAG/activate_dags.py | 60 +++++++++++++----------------- 1 file changed, 26 insertions(+), 34 deletions(-) diff --git a/cornflow-dags/DAG/activate_dags.py b/cornflow-dags/DAG/activate_dags.py index 1c701b2c3..6c8e9223c 100644 --- a/cornflow-dags/DAG/activate_dags.py +++ b/cornflow-dags/DAG/activate_dags.py @@ -1,14 +1,18 @@ import cornflow_client.airflow.dag_utilities as utils + +from airflow import DAG from airflow.operators.python import PythonOperator from airflow.secrets.environment_variables import EnvironmentVariablesBackend -from airflow.decorators import dag, task -from airflow.models.baseoperator import chain -from airflow.exceptions import AirflowSkipException from update_all_schemas import get_new_apps def create_dag(app): + def solve(**kwargs): + return utils.cf_solve_app(app, EnvironmentVariablesBackend(), **kwargs) + + def run(**kwargs): + return utils.cf_report(app, EnvironmentVariablesBackend(), **kwargs) if app.default_args is not None: default_args = app.default_args @@ -19,7 +23,7 @@ def create_dag(app): if app.extra_args is not None: kwargs = app.extra_args - @dag( + dag = DAG( app.name, description=app.description, default_args=default_args, @@ -27,36 +31,24 @@ def create_dag(app): tags=["model"], **kwargs ) - def taskflow_dag(): - @task - def solve_app(): - def solve(**kwargs): - return utils.cf_solve_app(app, EnvironmentVariablesBackend(), **kwargs) - - notify = getattr(app, "notify", True) - if not notify: - return PythonOperator(task_id=app.name, python_callable=solve) - else: - return PythonOperator( - task_id=app.name, - python_callable=solve, - on_failure_callback=utils.callback_email, - ) - - @task - def run_report(): - def run(**kwargs): - return utils.cf_report(app, EnvironmentVariablesBackend(), **kwargs) - - file_name = PythonOperator( - task_id=app.name + "_report", python_callable=run - ) - return dict(file_name=file_name) - - # Define dependencies and call task functions - chain(solve_app(), run_report()) - - return taskflow_dag + + notify = getattr(app, "notify", True) + if not notify: + t1 = PythonOperator(task_id=app.name, python_callable=solve, dag=dag) + else: + t1 = PythonOperator( + task_id=app.name, + python_callable=solve, + on_failure_callback=utils.callback_email, + dag=dag + ) + + t2 = PythonOperator(task_id=f"{app.name}_report", python_callable=run, dag=dag) + + + t1 >> t2 + + return dag for app in get_new_apps(): From 9f83a970b77af717a92b0eb0f84d248519b4f3c9 Mon Sep 17 00:00:00 2001 From: Guillermo Gonzalez-Santander Date: Fri, 28 Jun 2024 11:06:55 +0200 Subject: [PATCH 13/84] Fixed numpy and pandas conoined error --- cornflow-dags/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cornflow-dags/requirements.txt b/cornflow-dags/requirements.txt index 8d32d0717..f31e931f7 100644 --- a/cornflow-dags/requirements.txt +++ b/cornflow-dags/requirements.txt @@ -11,7 +11,7 @@ pyomo<=6.6.2 tsplib95<=0.7.1 networkx scipy -numpy +numpy<2.0.0 # quarto matplotlib From 5c0fb2d0a2fa8f73c098f63f38812c0804da07e5 Mon Sep 17 00:00:00 2001 From: pchtsp Date: Fri, 28 Jun 2024 15:04:51 +0200 Subject: [PATCH 14/84] make tests valid with env. var CF_ALARMS_ENDPOINT=0 --- cornflow-server/cornflow/cli/utils.py | 2 +- .../cornflow/tests/unit/test_alarms.py | 27 ++++++++++++------- .../cornflow/tests/unit/test_cli.py | 15 +++++++---- .../cornflow/tests/unit/test_commands.py | 8 ++++-- .../cornflow/tests/unit/test_main_alarms.py | 4 +++ .../cornflow_client/airflow/dag_utilities.py | 1 + 6 files changed, 40 insertions(+), 17 deletions(-) diff --git a/cornflow-server/cornflow/cli/utils.py b/cornflow-server/cornflow/cli/utils.py index c7c6ce767..29d6fe9f7 100644 --- a/cornflow-server/cornflow/cli/utils.py +++ b/cornflow-server/cornflow/cli/utils.py @@ -6,7 +6,7 @@ def get_app(): env = os.getenv("FLASK_ENV", "development") - data_conn = os.getenv("DATABASE_URL", "sqlite:///cornflow.db") + data_conn = os.getenv("DATABASE_URL") if env == "production": warnings.filterwarnings("ignore") external = int(os.getenv("EXTERNAL_APP", 0)) diff --git a/cornflow-server/cornflow/tests/unit/test_alarms.py b/cornflow-server/cornflow/tests/unit/test_alarms.py index d0c317a67..39e5b9af9 100644 --- a/cornflow-server/cornflow/tests/unit/test_alarms.py +++ b/cornflow-server/cornflow/tests/unit/test_alarms.py @@ -1,6 +1,10 @@ """ """ + +import unittest +import os + # Imports from internal modules from cornflow.models import AlarmsModel from cornflow.tests.const import ALARMS_URL @@ -15,25 +19,30 @@ def setUp(self): self.response_items = {"id", "name", "description", "criticality", "schema"} self.items_to_check = ["name", "description", "schema", "criticality"] + @unittest.skipUnless(os.getenv("CF_ALARMS_ENDPOINT") == 1, "No alarms implemented") def test_post_alarm(self): - payload = {"name": "Alarm 1", "description": "Description Alarm 1", "criticality": 1} + payload = { + "name": "Alarm 1", + "description": "Description Alarm 1", + "criticality": 1, + } self.create_new_row(self.url, self.model, payload) + @unittest.skipUnless(os.getenv("CF_ALARMS_ENDPOINT") == 1, "No alarms implemented") def test_get_alarms(self): data = [ {"name": "Alarm 1", "description": "Description Alarm 1", "criticality": 1}, - {"name": "Alarm 2", "description": "Description Alarm 2", "criticality": 2, "schema": "solve_model_dag"}, + { + "name": "Alarm 2", + "description": "Description Alarm 2", + "criticality": 2, + "schema": "solve_model_dag", + }, ] - rows = self.get_rows( - self.url, - data, - check_data=False - ) + rows = self.get_rows(self.url, data, check_data=False) rows_data = list(rows.json) for i in range(len(data)): for key in self.get_keys_to_check(data[i]): self.assertIn(key, rows_data[i]) if key in data[i]: self.assertEqual(rows_data[i][key], data[i][key]) - - diff --git a/cornflow-server/cornflow/tests/unit/test_cli.py b/cornflow-server/cornflow/tests/unit/test_cli.py index 47ff47abe..d1301e4a8 100644 --- a/cornflow-server/cornflow/tests/unit/test_cli.py +++ b/cornflow-server/cornflow/tests/unit/test_cli.py @@ -20,6 +20,11 @@ class CLITests(TestCase): def setUp(self): db.create_all() + self.numberOfViews = 49 + self.numberOfPermissions = 546 + if os.getenv("CF_ALARMS_ENDPOINT") != 1: + self.numberOfViews = 47 + self.numberOfPermissions = 514 def tearDown(self): db.session.remove() @@ -131,7 +136,7 @@ def test_views_init_command(self): result = runner.invoke(cli, ["views", "init", "-v"]) self.assertEqual(result.exit_code, 0) views = ViewModel.get_all_objects().all() - self.assertEqual(len(views), 49) + self.assertEqual(len(views), self.numberOfViews) def test_permissions_entrypoint(self): runner = CliRunner() @@ -155,8 +160,8 @@ def test_permissions_init(self): permissions = PermissionViewRoleModel.get_all_objects().all() self.assertEqual(len(actions), 5) self.assertEqual(len(roles), 4) - self.assertEqual(len(views), 49) - self.assertEqual(len(permissions), 546) + self.assertEqual(len(views), self.numberOfViews) + self.assertEqual(len(permissions), self.numberOfPermissions) def test_permissions_base_command(self): runner = CliRunner() @@ -171,8 +176,8 @@ def test_permissions_base_command(self): permissions = PermissionViewRoleModel.get_all_objects().all() self.assertEqual(len(actions), 5) self.assertEqual(len(roles), 4) - self.assertEqual(len(views), 49) - self.assertEqual(len(permissions), 546) + self.assertEqual(len(views), self.numberOfViews) + self.assertEqual(len(permissions), self.numberOfPermissions) def test_service_entrypoint(self): runner = CliRunner() diff --git a/cornflow-server/cornflow/tests/unit/test_commands.py b/cornflow-server/cornflow/tests/unit/test_commands.py index 223bc9766..cb80dab02 100644 --- a/cornflow-server/cornflow/tests/unit/test_commands.py +++ b/cornflow-server/cornflow/tests/unit/test_commands.py @@ -1,5 +1,5 @@ import json - +import os from flask_testing import TestCase from cornflow.app import ( @@ -48,7 +48,11 @@ def setUp(self): "email": "testemail@test.org", "password": "Testpassword1!", } - self.resources = resources + alarms_resources + + if os.getenv("CF_ALARMS_ENDPOINT") == 1: + self.resources = resources + alarms_resources + else: + self.resources = resources self.runner = self.create_app().test_cli_runner() self.runner.invoke(register_roles, ["-v"]) diff --git a/cornflow-server/cornflow/tests/unit/test_main_alarms.py b/cornflow-server/cornflow/tests/unit/test_main_alarms.py index 2e9ab0c18..e442b5071 100644 --- a/cornflow-server/cornflow/tests/unit/test_main_alarms.py +++ b/cornflow-server/cornflow/tests/unit/test_main_alarms.py @@ -2,6 +2,8 @@ """ +import unittest +import os import json # Imports from internal modules @@ -29,6 +31,7 @@ def setUp(self): headers=self.get_header_with_auth(self.token), ).json["id"] + @unittest.skipUnless(os.getenv("CF_ALARMS_ENDPOINT") == 1, "No alarms implemented") def test_post_main_alarm(self): payload = { "message": "Message Main Alarm 1", @@ -37,6 +40,7 @@ def test_post_main_alarm(self): } self.create_new_row(self.url, self.model, payload) + @unittest.skipUnless(os.getenv("CF_ALARMS_ENDPOINT") == 1, "No alarms implemented") def test_get_main_alarms(self): data = [ { diff --git a/libs/client/cornflow_client/airflow/dag_utilities.py b/libs/client/cornflow_client/airflow/dag_utilities.py index ecd39e09d..8ea2906a1 100644 --- a/libs/client/cornflow_client/airflow/dag_utilities.py +++ b/libs/client/cornflow_client/airflow/dag_utilities.py @@ -324,6 +324,7 @@ def cf_report( :param kwargs: other kwargs passed to the dag task. :return: """ + # TODO: if this task fails, the dagrun should still be valid ti = kwargs["ti"] try: client = connect_to_cornflow(secrets) From 132a0c3f280485d4788deb7c1d90e481149e07f9 Mon Sep 17 00:00:00 2001 From: pchtsp Date: Fri, 28 Jun 2024 15:07:07 +0200 Subject: [PATCH 15/84] added quarto to github actions --- .github/workflows/test_cornflow_server.yml | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/.github/workflows/test_cornflow_server.yml b/.github/workflows/test_cornflow_server.yml index b603ca435..08e605403 100644 --- a/.github/workflows/test_cornflow_server.yml +++ b/.github/workflows/test_cornflow_server.yml @@ -101,8 +101,15 @@ jobs: AIRFLOW__WEBSERVER__SECRET_KEY: e9adafa751fd35adfc1fdd3285019be15eea0758f76e38e1e37a1154fb36 AIRFLOW__CORE__LOAD_EXAMPLES: 0 AIRFLOW_CONN_CF_URI: http://airflow:Airflow_test_password1@localhost:5050 + - name: Set up Quarto + uses: quarto-dev/quarto-actions/setup@v2 + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + # To install LaTeX to build PDF book + tinytex: true - name: Run unit tests - run: | + run: | coverage run --source=./cornflow/ --rcfile=./.coveragerc -m unittest discover -s cornflow/tests/unit coverage report -m env: @@ -114,7 +121,7 @@ jobs: AIRFLOW_PWD: notadmin CF_ALARMS_ENDPOINT: 1 - name: Run ldap unit tests - run: | + run: | coverage run -a --source=./cornflow/ --rcfile=./.coveragerc -m unittest discover -s cornflow/tests/ldap coverage report -m env: @@ -142,7 +149,7 @@ jobs: LOG_LEVEL: 30 CORNFLOW_SERVICE_USER: cornflow - name: Run postgres unit tests - run: | + run: | coverage run -a --source=./cornflow/ --rcfile=./.coveragerc -m unittest cornflow/tests/unit/test_commands.py coverage report -m env: From e0737aa43850d1a49a3fb6a0a3b1ec5c41b9ee0d Mon Sep 17 00:00:00 2001 From: pchtsp Date: Fri, 28 Jun 2024 15:13:19 +0200 Subject: [PATCH 16/84] quarto is needed in test_dags --- .github/workflows/test_cornflow_dags.yml | 7 +++++++ .github/workflows/test_cornflow_server.yml | 7 ------- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/.github/workflows/test_cornflow_dags.yml b/.github/workflows/test_cornflow_dags.yml index 879c87e2d..4e69f8fb6 100644 --- a/.github/workflows/test_cornflow_dags.yml +++ b/.github/workflows/test_cornflow_dags.yml @@ -35,6 +35,13 @@ jobs: uses: actions/setup-python@v1 with: python-version: ${{ matrix.python-version }} + - name: Set up Quarto + uses: quarto-dev/quarto-actions/setup@v2 + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + # To install LaTeX to build PDF book + tinytex: true - name: Install cbc run: | sudo apt-get install -y coinor-cbc diff --git a/.github/workflows/test_cornflow_server.yml b/.github/workflows/test_cornflow_server.yml index 08e605403..4e8fc4576 100644 --- a/.github/workflows/test_cornflow_server.yml +++ b/.github/workflows/test_cornflow_server.yml @@ -101,13 +101,6 @@ jobs: AIRFLOW__WEBSERVER__SECRET_KEY: e9adafa751fd35adfc1fdd3285019be15eea0758f76e38e1e37a1154fb36 AIRFLOW__CORE__LOAD_EXAMPLES: 0 AIRFLOW_CONN_CF_URI: http://airflow:Airflow_test_password1@localhost:5050 - - name: Set up Quarto - uses: quarto-dev/quarto-actions/setup@v2 - env: - GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - # To install LaTeX to build PDF book - tinytex: true - name: Run unit tests run: | coverage run --source=./cornflow/ --rcfile=./.coveragerc -m unittest discover -s cornflow/tests/unit From 83610f8f5c1970d2f09cb8f46b816134fe07ff2f Mon Sep 17 00:00:00 2001 From: Guillermo Gonzalez-Santander Date: Fri, 28 Jun 2024 17:56:58 +0200 Subject: [PATCH 17/84] Small adjustments to pass unit tests on local and on github actions --- cornflow-dags/requirements.txt | 1 - cornflow-server/cornflow/app.py | 1 + .../cornflow/endpoints/__init__.py | 7 +++++++ .../cornflow/tests/unit/test_alarms.py | 8 ++++++-- .../cornflow/tests/unit/test_cli.py | 20 +++++++++---------- .../cornflow/tests/unit/test_commands.py | 2 +- .../cornflow/tests/unit/test_main_alarms.py | 8 ++++++-- 7 files changed, 31 insertions(+), 16 deletions(-) diff --git a/cornflow-dags/requirements.txt b/cornflow-dags/requirements.txt index f31e931f7..bb358b1a9 100644 --- a/cornflow-dags/requirements.txt +++ b/cornflow-dags/requirements.txt @@ -13,7 +13,6 @@ networkx scipy numpy<2.0.0 -# quarto matplotlib seaborn jupyter diff --git a/cornflow-server/cornflow/app.py b/cornflow-server/cornflow/app.py index 314a7627b..d8125a931 100644 --- a/cornflow-server/cornflow/app.py +++ b/cornflow-server/cornflow/app.py @@ -74,6 +74,7 @@ def _fk_pragma_on_connect(dbapi_con, con_record): api = Api(app) for res in resources: api.add_resource(res["resource"], res["urls"], endpoint=res["endpoint"]) + if app.config["ALARMS_ENDPOINTS"]: for res in alarms_resources: api.add_resource(res["resource"], res["urls"], endpoint=res["endpoint"]) diff --git a/cornflow-server/cornflow/endpoints/__init__.py b/cornflow-server/cornflow/endpoints/__init__.py index b5cfc2edc..baf963e00 100644 --- a/cornflow-server/cornflow/endpoints/__init__.py +++ b/cornflow-server/cornflow/endpoints/__init__.py @@ -47,6 +47,7 @@ from .licenses import LicensesEndpoint from .main_alarms import MainAlarmsEndpoint from .permission import PermissionsViewRoleEndpoint, PermissionsViewRoleDetailEndpoint +from .reports import ReportEndpoint, ReportDetailsEndpoint from .roles import RolesListEndpoint, RoleDetailEndpoint from .schemas import SchemaDetailsEndpoint, SchemaEndpoint from .tables import TablesEndpoint, TablesDetailsEndpoint @@ -216,6 +217,12 @@ urls="/table///", endpoint="tables-detail", ), + dict(resource=ReportEndpoint, urls="/report/", endpoint="report"), + dict( + resource=ReportDetailsEndpoint, + urls="/report//", + endpoint="report-detail", + ), ] diff --git a/cornflow-server/cornflow/tests/unit/test_alarms.py b/cornflow-server/cornflow/tests/unit/test_alarms.py index 39e5b9af9..7752fd9c0 100644 --- a/cornflow-server/cornflow/tests/unit/test_alarms.py +++ b/cornflow-server/cornflow/tests/unit/test_alarms.py @@ -19,7 +19,9 @@ def setUp(self): self.response_items = {"id", "name", "description", "criticality", "schema"} self.items_to_check = ["name", "description", "schema", "criticality"] - @unittest.skipUnless(os.getenv("CF_ALARMS_ENDPOINT") == 1, "No alarms implemented") + @unittest.skipUnless( + int(os.getenv("CF_ALARMS_ENDPOINT")) == 1, "No alarms implemented" + ) def test_post_alarm(self): payload = { "name": "Alarm 1", @@ -28,7 +30,9 @@ def test_post_alarm(self): } self.create_new_row(self.url, self.model, payload) - @unittest.skipUnless(os.getenv("CF_ALARMS_ENDPOINT") == 1, "No alarms implemented") + @unittest.skipUnless( + int(os.getenv("CF_ALARMS_ENDPOINT")) == 1, "No alarms implemented" + ) def test_get_alarms(self): data = [ {"name": "Alarm 1", "description": "Description Alarm 1", "criticality": 1}, diff --git a/cornflow-server/cornflow/tests/unit/test_cli.py b/cornflow-server/cornflow/tests/unit/test_cli.py index d1301e4a8..0cdc94a91 100644 --- a/cornflow-server/cornflow/tests/unit/test_cli.py +++ b/cornflow-server/cornflow/tests/unit/test_cli.py @@ -20,11 +20,11 @@ class CLITests(TestCase): def setUp(self): db.create_all() - self.numberOfViews = 49 - self.numberOfPermissions = 546 - if os.getenv("CF_ALARMS_ENDPOINT") != 1: - self.numberOfViews = 47 - self.numberOfPermissions = 514 + self.number_of_views = 51 + self.number_of_permissions = 578 + if int(os.getenv("CF_ALARMS_ENDPOINT")) != 1: + self.number_of_views = 49 + self.number_of_permissions = 514 def tearDown(self): db.session.remove() @@ -136,7 +136,7 @@ def test_views_init_command(self): result = runner.invoke(cli, ["views", "init", "-v"]) self.assertEqual(result.exit_code, 0) views = ViewModel.get_all_objects().all() - self.assertEqual(len(views), self.numberOfViews) + self.assertEqual(len(views), self.number_of_views) def test_permissions_entrypoint(self): runner = CliRunner() @@ -160,8 +160,8 @@ def test_permissions_init(self): permissions = PermissionViewRoleModel.get_all_objects().all() self.assertEqual(len(actions), 5) self.assertEqual(len(roles), 4) - self.assertEqual(len(views), self.numberOfViews) - self.assertEqual(len(permissions), self.numberOfPermissions) + self.assertEqual(len(views), self.number_of_views) + self.assertEqual(len(permissions), self.number_of_permissions) def test_permissions_base_command(self): runner = CliRunner() @@ -176,8 +176,8 @@ def test_permissions_base_command(self): permissions = PermissionViewRoleModel.get_all_objects().all() self.assertEqual(len(actions), 5) self.assertEqual(len(roles), 4) - self.assertEqual(len(views), self.numberOfViews) - self.assertEqual(len(permissions), self.numberOfPermissions) + self.assertEqual(len(views), self.number_of_views) + self.assertEqual(len(permissions), self.number_of_permissions) def test_service_entrypoint(self): runner = CliRunner() diff --git a/cornflow-server/cornflow/tests/unit/test_commands.py b/cornflow-server/cornflow/tests/unit/test_commands.py index cb80dab02..ab28499e9 100644 --- a/cornflow-server/cornflow/tests/unit/test_commands.py +++ b/cornflow-server/cornflow/tests/unit/test_commands.py @@ -49,7 +49,7 @@ def setUp(self): "password": "Testpassword1!", } - if os.getenv("CF_ALARMS_ENDPOINT") == 1: + if int(os.getenv("CF_ALARMS_ENDPOINT")) == 1: self.resources = resources + alarms_resources else: self.resources = resources diff --git a/cornflow-server/cornflow/tests/unit/test_main_alarms.py b/cornflow-server/cornflow/tests/unit/test_main_alarms.py index e442b5071..44c3c3462 100644 --- a/cornflow-server/cornflow/tests/unit/test_main_alarms.py +++ b/cornflow-server/cornflow/tests/unit/test_main_alarms.py @@ -31,7 +31,9 @@ def setUp(self): headers=self.get_header_with_auth(self.token), ).json["id"] - @unittest.skipUnless(os.getenv("CF_ALARMS_ENDPOINT") == 1, "No alarms implemented") + @unittest.skipUnless( + int(os.getenv("CF_ALARMS_ENDPOINT")) == 1, "No alarms implemented" + ) def test_post_main_alarm(self): payload = { "message": "Message Main Alarm 1", @@ -40,7 +42,9 @@ def test_post_main_alarm(self): } self.create_new_row(self.url, self.model, payload) - @unittest.skipUnless(os.getenv("CF_ALARMS_ENDPOINT") == 1, "No alarms implemented") + @unittest.skipUnless( + int(os.getenv("CF_ALARMS_ENDPOINT")) == 1, "No alarms implemented" + ) def test_get_main_alarms(self): data = [ { From 0427605060bd28536bc6c33e9969ba5700e29cf8 Mon Sep 17 00:00:00 2001 From: pchtsp Date: Mon, 1 Jul 2024 19:45:01 +0200 Subject: [PATCH 18/84] some working tests on reports. --- cornflow-dags/DAG/tsp/core/experiment.py | 14 ++- .../cornflow/endpoints/__init__.py | 8 ++ cornflow-server/cornflow/endpoints/reports.py | 10 +- .../versions/96f00d0961d1_reports_table.py | 40 ++++---- cornflow-server/cornflow/models/reports.py | 12 +-- cornflow-server/cornflow/schemas/execution.py | 4 +- cornflow-server/cornflow/schemas/reports.py | 14 ++- cornflow-server/cornflow/tests/const.py | 4 + .../cornflow/tests/custom_test_case.py | 1 - .../cornflow/tests/data/bad_report.json | 4 + .../cornflow/tests/data/new_report.json | 5 + .../cornflow/tests/unit/test_reports.py | 91 +++++++++++++++++++ .../cornflow_client/airflow/dag_utilities.py | 2 +- 13 files changed, 170 insertions(+), 39 deletions(-) create mode 100644 cornflow-server/cornflow/tests/data/bad_report.json create mode 100644 cornflow-server/cornflow/tests/data/new_report.json create mode 100644 cornflow-server/cornflow/tests/unit/test_reports.py diff --git a/cornflow-dags/DAG/tsp/core/experiment.py b/cornflow-dags/DAG/tsp/core/experiment.py index 904c9e943..45a4e618e 100644 --- a/cornflow-dags/DAG/tsp/core/experiment.py +++ b/cornflow-dags/DAG/tsp/core/experiment.py @@ -81,11 +81,17 @@ def check_solution(self, *args, **kwargs) -> SuperDict: def generate_report(self, report_path: str, report_name="report") -> None: # a user may give the full "report.qmd" name. # We want to take out the extension - report_base = os.path.splitext(report_name)[0] - path_without_ext = os.path.join( - os.path.dirname(__file__), "../report/", report_base - ) + path_without_ext = os.path.splitext(report_name)[0] + + # if someone gives the absolute path: we use that. + # otherwise we assume it's a file on the report/ directory: + if not os.path.isabs(path_without_ext): + path_without_ext = os.path.join( + os.path.dirname(__file__), "../report/", path_without_ext + ) path_to_qmd = path_without_ext + ".qmd" + if not os.path.exists(path_to_qmd): + raise FileNotFoundError(f"Report with path {path_to_qmd} does not exist.") path_to_output = path_without_ext + ".html" with tempfile.TemporaryDirectory() as tmp: path = os.path.join(tmp, "experiment.json") diff --git a/cornflow-server/cornflow/endpoints/__init__.py b/cornflow-server/cornflow/endpoints/__init__.py index b5cfc2edc..fa5986e7d 100644 --- a/cornflow-server/cornflow/endpoints/__init__.py +++ b/cornflow-server/cornflow/endpoints/__init__.py @@ -37,6 +37,8 @@ ExecutionLogEndpoint, ExecutionRelaunchEndpoint, ) + +from .reports import ReportEndpoint, ReportDetailsEndpoint from .health import HealthEndpoint from .instance import ( InstanceEndpoint, @@ -216,6 +218,12 @@ urls="/table///", endpoint="tables-detail", ), + dict( + resource=ReportDetailsEndpoint, + urls="/report//", + endpoint="report-detail", + ), + dict(resource=ReportEndpoint, urls="/report/", endpoint="report"), ] diff --git a/cornflow-server/cornflow/endpoints/reports.py b/cornflow-server/cornflow/endpoints/reports.py index 2f0b769d7..c8c52c0ba 100644 --- a/cornflow-server/cornflow/endpoints/reports.py +++ b/cornflow-server/cornflow/endpoints/reports.py @@ -45,7 +45,6 @@ def get(self, **kwargs): created by the authenticated user) and a integer with the HTTP status code :rtype: Tuple(dict, integer) """ - # TODO: filter by execution_id reports = self.get_list(user=self.get_user(), **kwargs) current_app.logger.info(f"User {self.get_user()} gets list of reports") return reports @@ -66,10 +65,13 @@ def post(self, **kwargs): :rtype: Tuple(dict, integer) """ # TODO: not sure if it should be possible to generate a report from the REST API - # and if so, should we let them generate a new report file? - report, status_code = self.post_list(data=kwargs) + # and if so, should we let them upload a new report file? - return report, 201 + response = self.post_list(data=kwargs) + current_app.logger.info( + f"User {self.get_user()} creates report {response[0].id}" + ) + return response class ReportDetailsEndpointBase(BaseMetaResource): diff --git a/cornflow-server/cornflow/migrations/versions/96f00d0961d1_reports_table.py b/cornflow-server/cornflow/migrations/versions/96f00d0961d1_reports_table.py index d26addf92..7419aed45 100644 --- a/cornflow-server/cornflow/migrations/versions/96f00d0961d1_reports_table.py +++ b/cornflow-server/cornflow/migrations/versions/96f00d0961d1_reports_table.py @@ -5,37 +5,45 @@ Create Date: 2024-06-12 18:47:06.366487 """ + from alembic import op import sqlalchemy as sa # revision identifiers, used by Alembic. -revision = '96f00d0961d1' -down_revision = '991b98e24225' +revision = "96f00d0961d1" +down_revision = "991b98e24225" branch_labels = None depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.create_table('reports', - sa.Column('created_at', sa.DateTime(), nullable=False), - sa.Column('updated_at', sa.DateTime(), nullable=False), - sa.Column('deleted_at', sa.DateTime(), nullable=True), - sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), - sa.Column('execution_id', sa.String(length=256), nullable=False), - sa.Column('name', sa.String(length=256), nullable=False), - sa.Column('description', sa.TEXT(), nullable=True), - sa.Column('report_link', sa.String(length=256), nullable=False), - sa.Column('user_id', sa.Integer(), nullable=False), - sa.ForeignKeyConstraint(['execution_id'], ['executions.id'], ), - sa.ForeignKeyConstraint(['user_id'], ['users.id'], ), - sa.PrimaryKeyConstraint('id') + op.create_table( + "reports", + sa.Column("created_at", sa.DateTime(), nullable=False), + sa.Column("updated_at", sa.DateTime(), nullable=False), + sa.Column("deleted_at", sa.DateTime(), nullable=True), + sa.Column("id", sa.Integer(), autoincrement=True, nullable=False), + sa.Column("execution_id", sa.String(length=256), nullable=False), + sa.Column("name", sa.String(length=256), nullable=False), + sa.Column("description", sa.TEXT(), nullable=True), + sa.Column("file_url", sa.String(length=256), nullable=False), + sa.Column("user_id", sa.Integer(), nullable=False), + sa.ForeignKeyConstraint( + ["execution_id"], + ["executions.id"], + ), + sa.ForeignKeyConstraint( + ["user_id"], + ["users.id"], + ), + sa.PrimaryKeyConstraint("id"), ) # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.drop_table('reports') + op.drop_table("reports") # ### end Alembic commands ### diff --git a/cornflow-server/cornflow/models/reports.py b/cornflow-server/cornflow/models/reports.py index 7b969e9cf..8eed71808 100644 --- a/cornflow-server/cornflow/models/reports.py +++ b/cornflow-server/cornflow/models/reports.py @@ -19,7 +19,7 @@ class ReportModel(TraceAttributesModel): - **id**: int, the report id, primary key for the reports. - **execution_id**: str, the foreign key for the execution (:class:`ExecutionModel`). It links the report to its parent execution. - - **report_link**: str, the link with the actual report. It should be a valid url to a cloud storage bucket. + - **file_url**: str, the link with the actual report. It should be a valid url to a cloud storage bucket. - **name**: str, the name of the report given by the user. - **description**: str, the description of the report given by the user. It is optional. - **user_id**: int, the foreign key for the user (:class:`UserModel`). It links the report to its owner. @@ -44,7 +44,7 @@ class ReportModel(TraceAttributesModel): ) name = db.Column(db.String(256), nullable=False) description = db.Column(TEXT, nullable=True) - report_link = db.Column(db.String(256), nullable=False) + file_url = db.Column(db.String(256), nullable=False) @declared_attr def user_id(self): @@ -59,7 +59,7 @@ def __init__(self, data: dict): self.execution_id = data.get("execution_id") self.name = data.get("name") self.description = data.get("description") - self.report_link = data.get("report_link") + self.file_url = data.get("file_url") def update(self, data): """ @@ -71,14 +71,14 @@ def update(self, data): """ super().update(data) - def update_link(self, link: str): + def update_link(self, file_url: str): """ Method to update the report link - :param str link: new URL for the report + :param str file_url: new URL for the report :return: nothing """ - self.report_link = link + self.file_url = file_url super().update({}) def __repr__(self): diff --git a/cornflow-server/cornflow/schemas/execution.py b/cornflow-server/cornflow/schemas/execution.py index 96f8987d4..1528fbd30 100644 --- a/cornflow-server/cornflow/schemas/execution.py +++ b/cornflow-server/cornflow/schemas/execution.py @@ -5,7 +5,7 @@ from cornflow.shared.const import MIN_EXECUTION_STATUS_CODE, MAX_EXECUTION_STATUS_CODE from .common import QueryFilters, BaseDataEndpointResponse from .solution_log import LogSchema, BasicLogSchema -from .reports import ReportSchema +from .reports import ReportSchemaBase class QueryFiltersExecution(QueryFilters): @@ -96,7 +96,7 @@ class ExecutionDagPostRequest(ExecutionRequest, ExecutionDagRequest): class ExecutionDetailsEndpointResponse(BaseDataEndpointResponse): - reports = fields.Nested(ReportSchema, many=True) + reports = fields.Nested(ReportSchemaBase, many=True) config = fields.Nested(ConfigSchemaResponse) instance_id = fields.Str() state = fields.Int() diff --git a/cornflow-server/cornflow/schemas/reports.py b/cornflow-server/cornflow/schemas/reports.py index 7e77c588e..cb03d96a7 100644 --- a/cornflow-server/cornflow/schemas/reports.py +++ b/cornflow-server/cornflow/schemas/reports.py @@ -9,11 +9,15 @@ class QueryFiltersReports(BaseQueryFilters): execution_id = fields.Str(required=False) -class ReportSchema(Schema): - id = fields.Str(dump_only=True) +class ReportSchemaBase(Schema): + id = fields.Int(dump_only=True) + file_url = fields.Str(required=True) + name = fields.Str(required=True) + + +class ReportSchema(ReportSchemaBase): user_id = fields.Int(required=False, load_only=True) execution_id = fields.Str(required=True) - name = fields.Str() description = fields.Str() created_at = fields.DateTime(dump_only=True) updated_at = fields.DateTime(dump_only=True) @@ -23,11 +27,11 @@ class ReportSchema(Schema): class ReportEditRequest(Schema): name = fields.Str() description = fields.Str() - report_link = fields.Str() + file_url = fields.Str() class ReportRequest(Schema): name = fields.Str(required=True) description = fields.Str(required=False) execution_id = fields.Str(required=True) - report_link = fields.Str(required=True) + file_url = fields.Str(required=True) diff --git a/cornflow-server/cornflow/tests/const.py b/cornflow-server/cornflow/tests/const.py index 4f774b228..91844803f 100644 --- a/cornflow-server/cornflow/tests/const.py +++ b/cornflow-server/cornflow/tests/const.py @@ -34,6 +34,10 @@ def _get_file(relative_path): FULL_CASE_PATH = _get_file("./data/full_case_raw.json") FULL_CASE_LIST = [FULL_CASE_PATH, _get_file("./data/full_case_raw_2.json")] +REPORT_PATH = _get_file("./data/new_report.json") +BAD_REPORT_PATH = _get_file("./data/bad_report.json") +REPORT_URL = PREFIX + "/report/" + JSON_PATCH_GOOD_PATH = _get_file("./data/json_patch_good.json") JSON_PATCH_BAD_PATH = _get_file("./data/json_patch_bad.json") FULL_CASE_JSON_PATCH_1 = _get_file("./data/full_case_patch.json") diff --git a/cornflow-server/cornflow/tests/custom_test_case.py b/cornflow-server/cornflow/tests/custom_test_case.py index 14fb47d93..1214904f9 100644 --- a/cornflow-server/cornflow/tests/custom_test_case.py +++ b/cornflow-server/cornflow/tests/custom_test_case.py @@ -169,7 +169,6 @@ def create_new_row( self.assertEqual(row.id, response.json["id"]) for key in self.get_keys_to_check(payload): - getattr(row, key) if key in payload: self.assertEqual(getattr(row, key), payload[key]) return row.id diff --git a/cornflow-server/cornflow/tests/data/bad_report.json b/cornflow-server/cornflow/tests/data/bad_report.json new file mode 100644 index 000000000..de738f85b --- /dev/null +++ b/cornflow-server/cornflow/tests/data/bad_report.json @@ -0,0 +1,4 @@ +{ +"name": "report_1", +"description": "This is a test for the reports" +} \ No newline at end of file diff --git a/cornflow-server/cornflow/tests/data/new_report.json b/cornflow-server/cornflow/tests/data/new_report.json new file mode 100644 index 000000000..468eaad87 --- /dev/null +++ b/cornflow-server/cornflow/tests/data/new_report.json @@ -0,0 +1,5 @@ +{ +"name": "report_1", +"description": "This is a test for the reports", +"file_url": "http://LINK_TO_MY_REPORT" +} \ No newline at end of file diff --git a/cornflow-server/cornflow/tests/unit/test_reports.py b/cornflow-server/cornflow/tests/unit/test_reports.py new file mode 100644 index 000000000..29991d066 --- /dev/null +++ b/cornflow-server/cornflow/tests/unit/test_reports.py @@ -0,0 +1,91 @@ +""" +Unit test for the reports endpoints +""" + +# Import from libraries +import json +from unittest.mock import patch + +# Import from internal modules +from cornflow.models import ReportModel, InstanceModel, ExecutionModel +from cornflow.tests.const import ( + INSTANCE_PATH, + REPORT_PATH, + REPORT_URL, + INSTANCE_URL, + EXECUTION_PATH, + DAG_URL, + BAD_REPORT_PATH, + EXECUTION_URL_NORUN, +) +from cornflow.tests.custom_test_case import CustomTestCase, BaseTestCases +from cornflow.tests.unit.tools import patch_af_client + + +class TestReportsListEndpoint(BaseTestCases.ListFilters): + def setUp(self): + # we create an instance, and an execution + super().setUp() + + # instance: + with open(INSTANCE_PATH) as f: + payload = json.load(f) + fk_id = self.create_new_row(INSTANCE_URL, InstanceModel, payload) + + def load_file_fk(_file, **kwargs): + with open(_file) as f: + temp = json.load(f) + temp.update(kwargs) + return temp + + # execution: + fk_id = self.create_new_row( + EXECUTION_URL_NORUN, + ExecutionModel, + payload=load_file_fk(EXECUTION_PATH, instance_id=fk_id), + ) + + self.payload = load_file_fk(REPORT_PATH, execution_id=fk_id) + + self.url = REPORT_URL + self.model = ReportModel + + # self.payloads = [load_file_fk(f) for f in REPORTS_LIST] + # self.solution = load_file_fk(EXECUTION_SOLUTION_PATH) + self.keys_to_check = [ + "id", + "file_url", + "name", + "user_id", + "execution_id", + "description", + "created_at", + "updated_at", + "deleted_at", + ] + + def test_new_report(self): + self.create_new_row(self.url, self.model, payload=self.payload) + + def test_new_report_no_execution(self): + payload = dict(self.payload) + payload["execution_id"] = "bad_id" + response = self.client.post( + self.url, + data=json.dumps(payload), + follow_redirects=True, + headers=self.get_header_with_auth(self.token), + ) + self.assertEqual(404, response.status_code) + self.assertTrue("error" in response.json) + + def test_get_no_reports(self): + self.get_no_rows(self.url) + + def test_repr_method(self): + idx = self.create_new_row(self.url, self.model, self.payload) + self.repr_method(idx, f"") + + def test_str_method(self): + idx = self.create_new_row(self.url, self.model, self.payload) + self.str_method(idx, f"") diff --git a/libs/client/cornflow_client/airflow/dag_utilities.py b/libs/client/cornflow_client/airflow/dag_utilities.py index 8ea2906a1..01f71005a 100644 --- a/libs/client/cornflow_client/airflow/dag_utilities.py +++ b/libs/client/cornflow_client/airflow/dag_utilities.py @@ -350,7 +350,7 @@ def cf_report( ) # this should return the path to the generated file # TODO: add a get_report method in ExperimentCore - file_name = my_experiment.get_report(report_name) + file_name = my_experiment.generate_report(report_name) # TODO: store it in AWS/GCD/Azure bucket # TODO: update execution with link to bucket payload = dict(report_link="") From 4050939f4f006fbb1efd947f4cdfc881ef43ca22 Mon Sep 17 00:00:00 2001 From: Guillermo Gonzalez-Santander Date: Tue, 2 Jul 2024 16:55:21 +0200 Subject: [PATCH 19/84] Adjustments to file storage. File storage on local developed. Upload of files done. --- cornflow-server/cornflow/config.py | 5 ++ cornflow-server/cornflow/endpoints/reports.py | 51 +++++++++++++---- .../versions/96f00d0961d1_reports_table.py | 3 +- .../cornflow/models/meta_models.py | 35 ++++++++---- cornflow-server/cornflow/schemas/reports.py | 6 +- cornflow-server/cornflow/tests/const.py | 1 + .../cornflow/tests/custom_test_case.py | 4 +- .../cornflow/tests/data/new_report.html | 7 +++ .../cornflow/tests/data/new_report.json | 5 +- .../cornflow/tests/unit/test_reports.py | 55 ++++++++++++------- 10 files changed, 121 insertions(+), 51 deletions(-) create mode 100644 cornflow-server/cornflow/tests/data/new_report.html diff --git a/cornflow-server/cornflow/config.py b/cornflow-server/cornflow/config.py index 75d26e259..54cc01828 100644 --- a/cornflow-server/cornflow/config.py +++ b/cornflow-server/cornflow/config.py @@ -22,6 +22,11 @@ class DefaultConfig(object): SIGNUP_ACTIVATED = int(os.getenv("SIGNUP_ACTIVATED", 1)) CORNFLOW_SERVICE_USER = os.getenv("CORNFLOW_SERVICE_USER", "service_user") + # file support for reports + FILE_BACKEND = os.getenv("FILE_BACKEND", "local") + UPLOAD_FOLDER = os.getenv("UPLOAD_FOLDER", "../../static") + ALLOWED_EXTENSIONS = os.getenv("ALLOWED_EXTENSIONS", ["pdf", "html"]) + # Open deployment (all dags accessible to all users) OPEN_DEPLOYMENT = os.getenv("OPEN_DEPLOYMENT", 1) diff --git a/cornflow-server/cornflow/endpoints/reports.py b/cornflow-server/cornflow/endpoints/reports.py index c8c52c0ba..b5ef4b632 100644 --- a/cornflow-server/cornflow/endpoints/reports.py +++ b/cornflow-server/cornflow/endpoints/reports.py @@ -2,10 +2,12 @@ External endpoints to manage the reports: create new ones, list all of them, get one in particular These endpoints have different access url, but manage the same data entities """ +import os # Import from libraries -from flask import current_app +from flask import current_app, request from flask_apispec import marshal_with, use_kwargs, doc +from werkzeug.utils import secure_filename # Import from internal modules from cornflow.endpoints.meta_resource import BaseMetaResource @@ -18,6 +20,7 @@ ReportRequest, ) from cornflow.shared.authentication import Auth, authenticate +from cornflow.shared.exceptions import InvalidData class ReportEndpoint(BaseMetaResource): @@ -49,11 +52,10 @@ def get(self, **kwargs): current_app.logger.info(f"User {self.get_user()} gets list of reports") return reports - @doc(description="Create an report", tags=["Reports"]) + @doc(description="Create a report", tags=["Reports"]) @authenticate(auth_class=Auth()) @Auth.dag_permission_required - @marshal_with(ReportSchema) - @use_kwargs(ReportRequest, location="json") + @use_kwargs(ReportRequest, location="form") def post(self, **kwargs): """ API method to create a new report linked to an already existing report @@ -64,14 +66,43 @@ def post(self, **kwargs): the reference_id for the newly created report if successful) and a integer wit the HTTP status code :rtype: Tuple(dict, integer) """ - # TODO: not sure if it should be possible to generate a report from the REST API - # and if so, should we let them upload a new report file? - response = self.post_list(data=kwargs) - current_app.logger.info( - f"User {self.get_user()} creates report {response[0].id}" + if "file" not in request.files: + return {"message": "No file part"}, 400 + + file = request.files["file"] + filename = secure_filename(file.filename) + filename_extension = filename.split(".")[-1] + + if filename_extension not in current_app.config["ALLOWED_EXTENSIONS"]: + return { + "message": f"Invalid file extension. Valid extensions are: {current_app.config['ALLOWED_EXTENSIONS']}" + }, 400 + + save_path = os.path.join( + __file__, + f"{current_app.config['UPLOAD_FOLDER']}/{kwargs['name']}.{filename_extension}", ) - return response + + try: + file.save(save_path) + + report = ReportModel( + { + "name": kwargs["name"], + "file_url": save_path, + "execution_id": kwargs["execution_id"], + "user_id": self.get_user().id, + "description": kwargs.get("description", ""), + } + ) + + report.save() + + return {"message": "Report created"}, 201 + except InvalidData as error: + os.remove(save_path) + raise error class ReportDetailsEndpointBase(BaseMetaResource): diff --git a/cornflow-server/cornflow/migrations/versions/96f00d0961d1_reports_table.py b/cornflow-server/cornflow/migrations/versions/96f00d0961d1_reports_table.py index 7419aed45..d47a6cd4b 100644 --- a/cornflow-server/cornflow/migrations/versions/96f00d0961d1_reports_table.py +++ b/cornflow-server/cornflow/migrations/versions/96f00d0961d1_reports_table.py @@ -1,4 +1,5 @@ -"""reports table +""" +Adds reports table to database Revision ID: 96f00d0961d1 Revises: 991b98e24225 diff --git a/cornflow-server/cornflow/models/meta_models.py b/cornflow-server/cornflow/models/meta_models.py index 119cf7b2c..63e3652d8 100644 --- a/cornflow-server/cornflow/models/meta_models.py +++ b/cornflow-server/cornflow/models/meta_models.py @@ -33,17 +33,29 @@ def commit_changes(self, action: str = None): try: db.session.commit() - current_app.logger.debug(f"Transaction type: {action}, performed correctly on {self}") + current_app.logger.debug( + f"Transaction type: {action}, performed correctly on {self}" + ) + except IntegrityError as err: db.session.rollback() current_app.logger.error(f"Integrity error on {action} data: {err}") current_app.logger.error(f"Data: {self.__dict__}") - raise InvalidData(f"Integrity error on {action} with data {self}") + + if "FOREIGN KEY" in str(err): + message = f"Foreign key constraint error while {action} on {self.__class__.__tablename__} table" + raise InvalidData(message) + else: + raise InvalidData(f"Integrity error on {action} with data {self}") + except DBAPIError as err: db.session.rollback() - current_app.logger.error(f"Unknown database error on {action} data: {err}") + current_app.logger.error( + f"Unknown database error on {action} data: {type(err)}" + ) current_app.logger.error(f"Data: {self.__dict__}") raise InvalidData(f"Unknown database error on {action} with data {self}") + except Exception as err: db.session.rollback() current_app.logger.error(f"Unknown error on {action} data: {err}") @@ -99,7 +111,9 @@ def create_bulk(cls, data: List): action = "bulk create" try: db.session.commit() - current_app.logger.debug(f"Transaction type: {action}, performed correctly on {cls}") + current_app.logger.debug( + f"Transaction type: {action}, performed correctly on {cls}" + ) except IntegrityError as err: db.session.rollback() current_app.logger.error(f"Integrity error on {action} data: {err}") @@ -120,7 +134,9 @@ def create_update_bulk(cls, instances): action = "bulk create update" try: db.session.commit() - current_app.logger.debug(f"Transaction type: {action}, performed correctly on {cls}") + current_app.logger.debug( + f"Transaction type: {action}, performed correctly on {cls}" + ) except IntegrityError as err: db.session.rollback() current_app.logger.error(f"Integrity error on {action} data: {err}") @@ -136,12 +152,7 @@ def create_update_bulk(cls, instances): return instances @classmethod - def get_all_objects( - cls, - offset=0, - limit=None, - **kwargs - ): + def get_all_objects(cls, offset=0, limit=None, **kwargs): """ Method to get all the objects from the database applying the filters passed as keyword arguments @@ -261,7 +272,7 @@ def get_all_objects( update_date_lte=None, offset=0, limit=None, - **kwargs + **kwargs, ): """ Method to get all the objects from the database applying the filters passed as keyword arguments diff --git a/cornflow-server/cornflow/schemas/reports.py b/cornflow-server/cornflow/schemas/reports.py index cb03d96a7..806d7c5fe 100644 --- a/cornflow-server/cornflow/schemas/reports.py +++ b/cornflow-server/cornflow/schemas/reports.py @@ -1,5 +1,5 @@ # Imports from libraries -from marshmallow import fields, Schema +from marshmallow import fields, Schema, INCLUDE # Imports from internal modules from .common import BaseQueryFilters @@ -31,7 +31,9 @@ class ReportEditRequest(Schema): class ReportRequest(Schema): + class META: + unknown = INCLUDE + name = fields.Str(required=True) description = fields.Str(required=False) execution_id = fields.Str(required=True) - file_url = fields.Str(required=True) diff --git a/cornflow-server/cornflow/tests/const.py b/cornflow-server/cornflow/tests/const.py index 91844803f..8022db1b5 100644 --- a/cornflow-server/cornflow/tests/const.py +++ b/cornflow-server/cornflow/tests/const.py @@ -35,6 +35,7 @@ def _get_file(relative_path): FULL_CASE_LIST = [FULL_CASE_PATH, _get_file("./data/full_case_raw_2.json")] REPORT_PATH = _get_file("./data/new_report.json") +REPORT_FILE_PATH = _get_file("./data/new_report.html") BAD_REPORT_PATH = _get_file("./data/bad_report.json") REPORT_URL = PREFIX + "/report/" diff --git a/cornflow-server/cornflow/tests/custom_test_case.py b/cornflow-server/cornflow/tests/custom_test_case.py index 1214904f9..3d2ca2c79 100644 --- a/cornflow-server/cornflow/tests/custom_test_case.py +++ b/cornflow-server/cornflow/tests/custom_test_case.py @@ -90,8 +90,8 @@ def setUp(self): self.roles_with_access = [] @staticmethod - def get_header_with_auth(token): - return {"Content-Type": "application/json", "Authorization": "Bearer " + token} + def get_header_with_auth(token, content_type="application/json"): + return {"Content-Type": content_type, "Authorization": "Bearer " + token} def create_user(self, data): return self.client.post( diff --git a/cornflow-server/cornflow/tests/data/new_report.html b/cornflow-server/cornflow/tests/data/new_report.html new file mode 100644 index 000000000..d7baad82f --- /dev/null +++ b/cornflow-server/cornflow/tests/data/new_report.html @@ -0,0 +1,7 @@ + + + + Test Report + + + diff --git a/cornflow-server/cornflow/tests/data/new_report.json b/cornflow-server/cornflow/tests/data/new_report.json index 468eaad87..04e2c8654 100644 --- a/cornflow-server/cornflow/tests/data/new_report.json +++ b/cornflow-server/cornflow/tests/data/new_report.json @@ -1,5 +1,4 @@ { -"name": "report_1", -"description": "This is a test for the reports", -"file_url": "http://LINK_TO_MY_REPORT" + "name": "report_1", + "description": "This is a test for the reports" } \ No newline at end of file diff --git a/cornflow-server/cornflow/tests/unit/test_reports.py b/cornflow-server/cornflow/tests/unit/test_reports.py index 29991d066..6a02b5fef 100644 --- a/cornflow-server/cornflow/tests/unit/test_reports.py +++ b/cornflow-server/cornflow/tests/unit/test_reports.py @@ -1,28 +1,24 @@ """ Unit test for the reports endpoints """ - -# Import from libraries import json -from unittest.mock import patch -# Import from internal modules +from flask import current_app + from cornflow.models import ReportModel, InstanceModel, ExecutionModel from cornflow.tests.const import ( INSTANCE_PATH, REPORT_PATH, + REPORT_FILE_PATH, REPORT_URL, INSTANCE_URL, EXECUTION_PATH, - DAG_URL, - BAD_REPORT_PATH, EXECUTION_URL_NORUN, ) -from cornflow.tests.custom_test_case import CustomTestCase, BaseTestCases -from cornflow.tests.unit.tools import patch_af_client +from cornflow.tests.custom_test_case import CustomTestCase -class TestReportsListEndpoint(BaseTestCases.ListFilters): +class TestReportsListEndpoint(CustomTestCase): def setUp(self): # we create an instance, and an execution super().setUp() @@ -65,27 +61,44 @@ def load_file_fk(_file, **kwargs): ] def test_new_report(self): - self.create_new_row(self.url, self.model, payload=self.payload) + response = self.client.post( + self.url, + data=dict(file=(open(REPORT_FILE_PATH, "rb")), **self.payload), + follow_redirects=True, + headers=self.get_header_with_auth( + self.token, content_type="multipart/form-data" + ), + ) + + self.assertEqual(201, response.status_code) + self.assertTrue("message" in response.json) + + # check that the file in the test folder and the one generated on the static fodler are equal + with open(REPORT_FILE_PATH, "rb") as f: + file = f.read() + with open( + f"{current_app.config['UPLOAD_FOLDER']}/{self.payload['name']}.html", "rb" + ) as f: + file2 = f.read() + + self.assertEqual(file, file2) def test_new_report_no_execution(self): payload = dict(self.payload) payload["execution_id"] = "bad_id" response = self.client.post( self.url, - data=json.dumps(payload), + data=dict(file=(open(REPORT_FILE_PATH, "rb")), **payload), follow_redirects=True, - headers=self.get_header_with_auth(self.token), + headers=self.get_header_with_auth( + self.token, content_type="multipart/form-data" + ), ) - self.assertEqual(404, response.status_code) + + print(response.json) + + self.assertEqual(400, response.status_code) self.assertTrue("error" in response.json) def test_get_no_reports(self): self.get_no_rows(self.url) - - def test_repr_method(self): - idx = self.create_new_row(self.url, self.model, self.payload) - self.repr_method(idx, f"") - - def test_str_method(self): - idx = self.create_new_row(self.url, self.model, self.payload) - self.str_method(idx, f"") From 082eff94f3b9a5fa6285a1a10f922911c0bdde13 Mon Sep 17 00:00:00 2001 From: Guillermo Gonzalez-Santander Date: Tue, 2 Jul 2024 17:04:46 +0200 Subject: [PATCH 20/84] Added test for main get --- cornflow-server/cornflow/endpoints/reports.py | 3 ++- .../cornflow/tests/unit/test_reports.py | 21 +++++++++++++++---- 2 files changed, 19 insertions(+), 5 deletions(-) diff --git a/cornflow-server/cornflow/endpoints/reports.py b/cornflow-server/cornflow/endpoints/reports.py index b5ef4b632..2b62a87f9 100644 --- a/cornflow-server/cornflow/endpoints/reports.py +++ b/cornflow-server/cornflow/endpoints/reports.py @@ -56,6 +56,7 @@ def get(self, **kwargs): @authenticate(auth_class=Auth()) @Auth.dag_permission_required @use_kwargs(ReportRequest, location="form") + @marshal_with(ReportSchema) def post(self, **kwargs): """ API method to create a new report linked to an already existing report @@ -99,7 +100,7 @@ def post(self, **kwargs): report.save() - return {"message": "Report created"}, 201 + return report, 201 except InvalidData as error: os.remove(save_path) raise error diff --git a/cornflow-server/cornflow/tests/unit/test_reports.py b/cornflow-server/cornflow/tests/unit/test_reports.py index 6a02b5fef..6592caf54 100644 --- a/cornflow-server/cornflow/tests/unit/test_reports.py +++ b/cornflow-server/cornflow/tests/unit/test_reports.py @@ -46,13 +46,10 @@ def load_file_fk(_file, **kwargs): self.url = REPORT_URL self.model = ReportModel - # self.payloads = [load_file_fk(f) for f in REPORTS_LIST] - # self.solution = load_file_fk(EXECUTION_SOLUTION_PATH) self.keys_to_check = [ "id", "file_url", "name", - "user_id", "execution_id", "description", "created_at", @@ -71,7 +68,12 @@ def test_new_report(self): ) self.assertEqual(201, response.status_code) - self.assertTrue("message" in response.json) + + for key in self.keys_to_check: + self.assertTrue(key in response.json) + + for key, value in self.payload.items(): + self.assertEqual(response.json[key], value) # check that the file in the test folder and the one generated on the static fodler are equal with open(REPORT_FILE_PATH, "rb") as f: @@ -83,6 +85,8 @@ def test_new_report(self): self.assertEqual(file, file2) + return response.json + def test_new_report_no_execution(self): payload = dict(self.payload) payload["execution_id"] = "bad_id" @@ -102,3 +106,12 @@ def test_new_report_no_execution(self): def test_get_no_reports(self): self.get_no_rows(self.url) + + def test_get_all_reports(self): + item = self.test_new_report() + response = self.client.get( + self.url, headers=self.get_header_with_auth(self.token) + ) + + self.assertEqual(1, len(response.json)) + self.assertEqual(item, response.json[0]) From 3da87718f04939c0981e13a9ab2b91917d9247d7 Mon Sep 17 00:00:00 2001 From: pchtsp Date: Wed, 3 Jul 2024 11:57:52 +0200 Subject: [PATCH 21/84] fix uploading tests: add static and use absolute path for e.v. UPLOAD_FOLDER, using execution id in path and cleaning after tests --- cornflow-server/cornflow/config.py | 6 ++++-- cornflow-server/cornflow/endpoints/reports.py | 9 +++++---- .../cornflow/tests/unit/test_reports.py | 18 ++++++++++++++---- cornflow-server/static/__init__.py | 0 4 files changed, 23 insertions(+), 10 deletions(-) create mode 100644 cornflow-server/static/__init__.py diff --git a/cornflow-server/cornflow/config.py b/cornflow-server/cornflow/config.py index 54cc01828..4dba2613e 100644 --- a/cornflow-server/cornflow/config.py +++ b/cornflow-server/cornflow/config.py @@ -24,7 +24,10 @@ class DefaultConfig(object): # file support for reports FILE_BACKEND = os.getenv("FILE_BACKEND", "local") - UPLOAD_FOLDER = os.getenv("UPLOAD_FOLDER", "../../static") + UPLOAD_FOLDER = os.getenv( + "UPLOAD_FOLDER", + os.path.abspath(os.path.join(os.path.dirname(__file__), "../static")), + ) ALLOWED_EXTENSIONS = os.getenv("ALLOWED_EXTENSIONS", ["pdf", "html"]) # Open deployment (all dags accessible to all users) @@ -89,7 +92,6 @@ class DefaultConfig(object): class Development(DefaultConfig): - """ """ ENV = "development" diff --git a/cornflow-server/cornflow/endpoints/reports.py b/cornflow-server/cornflow/endpoints/reports.py index 2b62a87f9..e7041ec91 100644 --- a/cornflow-server/cornflow/endpoints/reports.py +++ b/cornflow-server/cornflow/endpoints/reports.py @@ -80,10 +80,11 @@ def post(self, **kwargs): "message": f"Invalid file extension. Valid extensions are: {current_app.config['ALLOWED_EXTENSIONS']}" }, 400 - save_path = os.path.join( - __file__, - f"{current_app.config['UPLOAD_FOLDER']}/{kwargs['name']}.{filename_extension}", - ) + my_directory = f"{current_app.config['UPLOAD_FOLDER']}/{kwargs['execution_id']}" + # we create a directory for the execution + if not os.path.exists(my_directory): + os.mkdir(my_directory) + save_path = f"{my_directory}/{kwargs['name']}.{filename_extension}" try: file.save(save_path) diff --git a/cornflow-server/cornflow/tests/unit/test_reports.py b/cornflow-server/cornflow/tests/unit/test_reports.py index 6592caf54..03b30008c 100644 --- a/cornflow-server/cornflow/tests/unit/test_reports.py +++ b/cornflow-server/cornflow/tests/unit/test_reports.py @@ -1,6 +1,9 @@ """ Unit test for the reports endpoints """ + +import shutil +import os import json from flask import current_app @@ -57,6 +60,15 @@ def load_file_fk(_file, **kwargs): "deleted_at", ] + def tearDown(self): + super().tearDown() + my_directories = os.listdir(current_app.config["UPLOAD_FOLDER"]) + for _dir in my_directories: + try: + shutil.rmtree(os.path.join(current_app.config["UPLOAD_FOLDER"], _dir)) + except OSError: + pass + def test_new_report(self): response = self.client.post( self.url, @@ -78,13 +90,11 @@ def test_new_report(self): # check that the file in the test folder and the one generated on the static fodler are equal with open(REPORT_FILE_PATH, "rb") as f: file = f.read() - with open( - f"{current_app.config['UPLOAD_FOLDER']}/{self.payload['name']}.html", "rb" - ) as f: + my_upload_path = f"{current_app.config['UPLOAD_FOLDER']}/{self.payload['execution_id']}/{self.payload['name']}.html" + with open(my_upload_path, "rb") as f: file2 = f.read() self.assertEqual(file, file2) - return response.json def test_new_report_no_execution(self): diff --git a/cornflow-server/static/__init__.py b/cornflow-server/static/__init__.py new file mode 100644 index 000000000..e69de29bb From 0be825aa42800f6204893abe7377ea3ff0daadae Mon Sep 17 00:00:00 2001 From: pchtsp Date: Wed, 3 Jul 2024 12:06:10 +0200 Subject: [PATCH 22/84] some comments --- cornflow-server/cornflow/endpoints/reports.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/cornflow-server/cornflow/endpoints/reports.py b/cornflow-server/cornflow/endpoints/reports.py index e7041ec91..d096108b4 100644 --- a/cornflow-server/cornflow/endpoints/reports.py +++ b/cornflow-server/cornflow/endpoints/reports.py @@ -59,12 +59,12 @@ def get(self, **kwargs): @marshal_with(ReportSchema) def post(self, **kwargs): """ - API method to create a new report linked to an already existing report + API method to create a new report linked to an existing execution It requires authentication to be passed in the form of a token that has to be linked to an existing session (login) made by a user :return: A dictionary with a message (error if authentication failed, error if data is not validated or - the reference_id for the newly created report if successful) and a integer wit the HTTP status code + the reference_id for the newly created report if successful) and a integer with the HTTP status code :rtype: Tuple(dict, integer) """ @@ -79,11 +79,14 @@ def post(self, **kwargs): return { "message": f"Invalid file extension. Valid extensions are: {current_app.config['ALLOWED_EXTENSIONS']}" }, 400 + # TODO: before writing, maybe we want to check for the execution existing/ being valid? my_directory = f"{current_app.config['UPLOAD_FOLDER']}/{kwargs['execution_id']}" # we create a directory for the execution if not os.path.exists(my_directory): os.mkdir(my_directory) + # TODO: maybe we want to generate a random string for the storage? + # i.e., what happens if the file already exists? save_path = f"{my_directory}/{kwargs['name']}.{filename_extension}" try: From 3a47e1316f3b97b9354a906f7e3eb4d5077dd87c Mon Sep 17 00:00:00 2001 From: Guillermo Gonzalez-Santander Date: Wed, 3 Jul 2024 20:34:51 +0200 Subject: [PATCH 23/84] Added some more funtionalities --- cornflow-server/cornflow/endpoints/reports.py | 69 +++++++++++-------- cornflow-server/cornflow/schemas/reports.py | 1 - cornflow-server/cornflow/shared/exceptions.py | 24 ++++--- .../cornflow/tests/unit/test_reports.py | 30 +++++++- 4 files changed, 86 insertions(+), 38 deletions(-) diff --git a/cornflow-server/cornflow/endpoints/reports.py b/cornflow-server/cornflow/endpoints/reports.py index d096108b4..b2bbe40ed 100644 --- a/cornflow-server/cornflow/endpoints/reports.py +++ b/cornflow-server/cornflow/endpoints/reports.py @@ -3,16 +3,14 @@ These endpoints have different access url, but manage the same data entities """ import os +from datetime import datetime -# Import from libraries -from flask import current_app, request +from flask import current_app, request, send_from_directory from flask_apispec import marshal_with, use_kwargs, doc from werkzeug.utils import secure_filename -# Import from internal modules from cornflow.endpoints.meta_resource import BaseMetaResource from cornflow.models import ExecutionModel, ReportModel - from cornflow.schemas.reports import ( ReportSchema, ReportEditRequest, @@ -20,7 +18,7 @@ ReportRequest, ) from cornflow.shared.authentication import Auth, authenticate -from cornflow.shared.exceptions import InvalidData +from cornflow.shared.exceptions import FileError, InvalidData, ObjectDoesNotExist class ReportEndpoint(BaseMetaResource): @@ -68,6 +66,11 @@ def post(self, **kwargs): :rtype: Tuple(dict, integer) """ + execution = ExecutionModel.get_one_object(id=kwargs["execution_id"]) + + if execution is None: + raise ObjectDoesNotExist("The execution does not exist") + if "file" not in request.files: return {"message": "No file part"}, 400 @@ -77,37 +80,40 @@ def post(self, **kwargs): if filename_extension not in current_app.config["ALLOWED_EXTENSIONS"]: return { - "message": f"Invalid file extension. Valid extensions are: {current_app.config['ALLOWED_EXTENSIONS']}" + "message": f"Invalid file extension. " + f"Valid extensions are: {current_app.config['ALLOWED_EXTENSIONS']}" }, 400 - # TODO: before writing, maybe we want to check for the execution existing/ being valid? my_directory = f"{current_app.config['UPLOAD_FOLDER']}/{kwargs['execution_id']}" + # we create a directory for the execution if not os.path.exists(my_directory): + current_app.logger.info(f"Creating directory {my_directory}") os.mkdir(my_directory) - # TODO: maybe we want to generate a random string for the storage? - # i.e., what happens if the file already exists? - save_path = f"{my_directory}/{kwargs['name']}.{filename_extension}" - try: - file.save(save_path) + save_path = f"{my_directory}/{kwargs['name']}.{filename_extension}" - report = ReportModel( - { - "name": kwargs["name"], - "file_url": save_path, - "execution_id": kwargs["execution_id"], - "user_id": self.get_user().id, - "description": kwargs.get("description", ""), - } - ) + report = ReportModel( + { + "name": kwargs["name"], + "file_url": save_path, + "execution_id": kwargs["execution_id"], + "user_id": execution.user_id, + "description": kwargs.get("description", ""), + } + ) - report.save() + report.save() + try: + # We try to save the file, if an error is raised then we delete the record on the database + file.save(save_path) return report, 201 - except InvalidData as error: - os.remove(save_path) - raise error + + except Exception as error: + report.delete() + current_app.logger.error(error) + raise FileError class ReportDetailsEndpointBase(BaseMetaResource): @@ -138,7 +144,16 @@ def get(self, idx): :rtype: Tuple(dict, integer) """ current_app.logger.info(f"User {self.get_user()} gets details of report {idx}") - return self.get_detail(user=self.get_user(), idx=idx) + report = self.get_detail(user_id=self.get_user_id(), idx=idx) + if report is None: + raise ObjectDoesNotExist + + directory, file = report.file_url.split(report.name) + file = f"{report.name}{file}" + directory = directory[:-1] + current_app.logger.debug(f"Directory {directory}") + current_app.logger.debug(f"File {file}") + return send_from_directory(directory, file) @doc(description="Edit a report", tags=["Reports"], inherit=False) @authenticate(auth_class=Auth()) @@ -169,4 +184,4 @@ def delete(self, idx): :rtype: Tuple(dict, integer) """ current_app.logger.info(f"User {self.get_user()} deleted report {idx}") - return self.delete_detail(user=self.get_user(), idx=idx) + return self.delete_detail(user_id=self.get_user_id(), idx=idx) diff --git a/cornflow-server/cornflow/schemas/reports.py b/cornflow-server/cornflow/schemas/reports.py index 806d7c5fe..90158d04a 100644 --- a/cornflow-server/cornflow/schemas/reports.py +++ b/cornflow-server/cornflow/schemas/reports.py @@ -27,7 +27,6 @@ class ReportSchema(ReportSchemaBase): class ReportEditRequest(Schema): name = fields.Str() description = fields.Str() - file_url = fields.Str() class ReportRequest(Schema): diff --git a/cornflow-server/cornflow/shared/exceptions.py b/cornflow-server/cornflow/shared/exceptions.py index de1099009..c3f4da63a 100644 --- a/cornflow-server/cornflow/shared/exceptions.py +++ b/cornflow-server/cornflow/shared/exceptions.py @@ -122,10 +122,21 @@ class ConfigurationError(InvalidUsage): error = "No authentication method configured on the server" +class FileError(InvalidUsage): + """ + Exception used when there is an error regarding the upload of a file to the server + """ + + status_code = 400 + error = "Error uploading the file" + + INTERNAL_SERVER_ERROR_MESSAGE = "500 Internal Server Error" -INTERNAL_SERVER_ERROR_MESSAGE_DETAIL = "The server encountered an internal error and was unable " \ - "to complete your request. Either the server is overloaded or " \ - "there is an error in the application." +INTERNAL_SERVER_ERROR_MESSAGE_DETAIL = ( + "The server encountered an internal error and was unable " + "to complete your request. Either the server is overloaded or " + "there is an error in the application." +) def initialize_errorhandlers(app): @@ -187,10 +198,7 @@ def handle_internal_server_error(error): status_code = error.code or status_code error_msg = f"{status_code} {error.name or INTERNAL_SERVER_ERROR_MESSAGE}" error_str = f"{error_msg}. {str(error.description or '') or INTERNAL_SERVER_ERROR_MESSAGE_DETAIL}" - response_dict = { - "message": error_msg, - "error": error_str - } + response_dict = {"message": error_msg, "error": error_str} response = jsonify(response_dict) elif app.config["ENV"] == "production": @@ -202,7 +210,7 @@ def handle_internal_server_error(error): response_dict = { "message": INTERNAL_SERVER_ERROR_MESSAGE, - "error": INTERNAL_SERVER_ERROR_MESSAGE_DETAIL + "error": INTERNAL_SERVER_ERROR_MESSAGE_DETAIL, } response = jsonify(response_dict) else: diff --git a/cornflow-server/cornflow/tests/unit/test_reports.py b/cornflow-server/cornflow/tests/unit/test_reports.py index 03b30008c..af76d53cc 100644 --- a/cornflow-server/cornflow/tests/unit/test_reports.py +++ b/cornflow-server/cornflow/tests/unit/test_reports.py @@ -109,8 +109,6 @@ def test_new_report_no_execution(self): ), ) - print(response.json) - self.assertEqual(400, response.status_code) self.assertTrue("error" in response.json) @@ -125,3 +123,31 @@ def test_get_all_reports(self): self.assertEqual(1, len(response.json)) self.assertEqual(item, response.json[0]) + + def test_get_one_report(self): + item = self.test_new_report() + response = self.client.get( + f"{self.url}{item['id']}/", headers=self.get_header_with_auth(self.token) + ) + + content = response.get_data() + + with open(REPORT_FILE_PATH, "rb") as f: + file = f.read() + + self.assertEqual(200, response.status_code) + self.assertEqual(content, file) + + def test_delete_report(self): + item = self.test_new_report() + response = self.client.delete( + f"{self.url}{item['id']}/", headers=self.get_header_with_auth(self.token) + ) + + self.assertEqual(200, response.status_code) + self.assertTrue("message" in response.json) + + response = self.client.get( + f"{self.url}{item['id']}/", headers=self.get_header_with_auth(self.token) + ) + self.assertEqual(404, response.status_code) From a6ecb0d06ff06ca2a2db85139ed082b3d27cc862 Mon Sep 17 00:00:00 2001 From: pchtsp Date: Wed, 3 Jul 2024 21:43:03 +0200 Subject: [PATCH 24/84] added endpoint to client and finished code in dag. --- cornflow-dags/DAG/solve_model_dag/__init__.py | 28 +++++++++++++++ cornflow-dags/DAG/solve_model_dag/report.qmd | 0 cornflow-server/cornflow/models/reports.py | 2 +- .../tests/integration/test_cornflowclient.py | 1 + .../cornflow_client/airflow/dag_utilities.py | 35 +++++++++++-------- .../client/cornflow_client/cornflow_client.py | 2 ++ .../cornflow_client/raw_cornflow_client.py | 27 +++++++++++++- 7 files changed, 78 insertions(+), 17 deletions(-) create mode 100644 cornflow-dags/DAG/solve_model_dag/report.qmd diff --git a/cornflow-dags/DAG/solve_model_dag/__init__.py b/cornflow-dags/DAG/solve_model_dag/__init__.py index 60821e4da..421354926 100644 --- a/cornflow-dags/DAG/solve_model_dag/__init__.py +++ b/cornflow-dags/DAG/solve_model_dag/__init__.py @@ -15,6 +15,8 @@ ) import cornflow_client.airflow.dag_utilities as utils +import tempfile +from quarto import render import pulp as pl import orloge as ol import os @@ -70,6 +72,32 @@ def get_objective(self) -> float: def check_solution(self, *args, **kwargs) -> dict: return dict() + def generate_report(self, report_path: str, report_name="report") -> None: + # a user may give the full "report.qmd" name. + # We want to take out the extension + path_without_ext = os.path.splitext(report_name)[0] + + # if someone gives the absolute path: we use that. + # otherwise we assume it's a file on the report/ directory: + if not os.path.isabs(path_without_ext): + path_without_ext = os.path.join( + os.path.dirname(__file__), "../report/", path_without_ext + ) + path_to_qmd = path_without_ext + ".qmd" + if not os.path.exists(path_to_qmd): + raise FileNotFoundError(f"Report with path {path_to_qmd} does not exist.") + path_to_output = path_without_ext + ".html" + with tempfile.TemporaryDirectory() as tmp: + path = os.path.join(tmp, "experiment.json") + # write a json with instance and solution to temp file + self.to_json(path) + # pass the path to the report to render + # it generates a report with path = path_to_output + render(input=path_to_qmd, execute_params=dict(file_name=path)) + # quarto always writes the report in the .qmd directory. + # thus, we need to move it where we want to: + os.replace(path_to_output, report_path) + class PuLP(ApplicationCore): name = "solve_model_dag" diff --git a/cornflow-dags/DAG/solve_model_dag/report.qmd b/cornflow-dags/DAG/solve_model_dag/report.qmd new file mode 100644 index 000000000..e69de29bb diff --git a/cornflow-server/cornflow/models/reports.py b/cornflow-server/cornflow/models/reports.py index 8eed71808..73ef15c8f 100644 --- a/cornflow-server/cornflow/models/reports.py +++ b/cornflow-server/cornflow/models/reports.py @@ -19,7 +19,7 @@ class ReportModel(TraceAttributesModel): - **id**: int, the report id, primary key for the reports. - **execution_id**: str, the foreign key for the execution (:class:`ExecutionModel`). It links the report to its parent execution. - - **file_url**: str, the link with the actual report. It should be a valid url to a cloud storage bucket. + - **file_url**: str, the link with the actual report. It should be a valid url to a cloud storage bucket or a file. - **name**: str, the name of the report given by the user. - **description**: str, the description of the report given by the user. It is optional. - **user_id**: int, the foreign key for the user (:class:`UserModel`). It links the report to its owner. diff --git a/cornflow-server/cornflow/tests/integration/test_cornflowclient.py b/cornflow-server/cornflow/tests/integration/test_cornflowclient.py index 20bcac4d1..9b6c235a1 100644 --- a/cornflow-server/cornflow/tests/integration/test_cornflowclient.py +++ b/cornflow-server/cornflow/tests/integration/test_cornflowclient.py @@ -1,6 +1,7 @@ """ """ + # Full imports import json import pulp diff --git a/libs/client/cornflow_client/airflow/dag_utilities.py b/libs/client/cornflow_client/airflow/dag_utilities.py index 01f71005a..6d8f88e75 100644 --- a/libs/client/cornflow_client/airflow/dag_utilities.py +++ b/libs/client/cornflow_client/airflow/dag_utilities.py @@ -329,10 +329,8 @@ def cf_report( try: client = connect_to_cornflow(secrets) exec_id = kwargs["dag_run"].conf["exec_id"] - # TODO: why not client.get_results? or get_status but for the config? - # I just want to check the config as a first step - execution_data = client.get_data(exec_id) - config = execution_data["config"] + execution_info = client.get_results(exec_id) + config = execution_info["config"] report_config = config.get("report", {}) if not report_config: # no need to write report since it's not requested @@ -342,23 +340,30 @@ def cf_report( input_data = execution_data["data"] solution_data = execution_data["solution_data"] - report_name = report_config.get("name") - # maybe all of this should be abstracted inside the app + report_name = report_config.get("name", "report") + # maybe all of this should be abstracted inside the app? + # maybe the app should return an Experiment? experiment = app.get_solver(app.get_default_solver_name()) my_experiment = experiment( app.instance(input_data), app.solution(solution_data) ) - # this should return the path to the generated file - # TODO: add a get_report method in ExperimentCore - file_name = my_experiment.generate_report(report_name) - # TODO: store it in AWS/GCD/Azure bucket - # TODO: update execution with link to bucket - payload = dict(report_link="") - client.put_one_execution(exec_id, payload) - return True + report_path = os.path.abspath("./my_report.html") + my_experiment.generate_report(report_path=report_path, report_name=report_name) + if not os.path.exists(report_path): + raise AirflowDagException("The generation of the report failed") + # we assume the contents of the config match name + description + payload = dict( + filename=report_path, + execution_id=exec_id, + name=report_name, + description=report_config.get("description"), + ) + client.create_report(**payload) + except CornFlowApiError: + raise AirflowDagException("The writing of the report failed") except Exception as e: - raise AirflowDagException("There was an error during the solving") + raise AirflowDagException("An unknown error occurred: " + str(e)) def callback_email(context: dict): diff --git a/libs/client/cornflow_client/cornflow_client.py b/libs/client/cornflow_client/cornflow_client.py index 7cb254f99..1321f17ec 100644 --- a/libs/client/cornflow_client/cornflow_client.py +++ b/libs/client/cornflow_client/cornflow_client.py @@ -16,6 +16,8 @@ def __init__(self, url, token=None): self.create_execution_data_check = self.expect_status( self.raw.create_execution_data_check, 201 ) + self.create_report = self.expect_status(self.raw.create_report, 201) + self.create_instance_data_check = self.expect_status( self.raw.create_instance_data_check, 201 ) diff --git a/libs/client/cornflow_client/raw_cornflow_client.py b/libs/client/cornflow_client/raw_cornflow_client.py index b222419c8..7787c4234 100644 --- a/libs/client/cornflow_client/raw_cornflow_client.py +++ b/libs/client/cornflow_client/raw_cornflow_client.py @@ -309,15 +309,17 @@ def create_instance_file( :param str filename: path to filename to upload :param str name: name for instance :param str description: description of the instance + :param bool minimize: minimize the problem if True :param str encoding: the type of encoding used in the call. Defaults to 'br' """ with open(filename, "rb") as file: - return self.create_api( + result = self.create_api( "instancefile/", data=dict(name=name, description=description, minimize=minimize), files=dict(file=file), encoding=encoding, ) + return result @log_call @ask_token @@ -496,6 +498,29 @@ def write_solution(self, execution_id, encoding=None, **kwargs): "dag/", id=execution_id, encoding=encoding, payload=kwargs ) + @ask_token + @prepare_encoding + def create_report(self, name, filename, execution_id, encoding=None, **kwargs): + """ + Edits an execution + + :param str execution_id: id for the execution + :param str name: the name of the report + :param file file: the file object with the report (e.g., open(REPORT_FILE_PATH, "rb")) + :param kwargs: optional data to write (description) + :param str encoding: the type of encoding used in the call. Defaults to 'br' + """ + with open(filename, "rb") as _file: + payload = ( + dict(file=_file, name=name, execution_id=execution_id, **kwargs), + ) + result = self.create_api( + "report/", + data=payload, + encoding=encoding, + ) + return result + @ask_token @prepare_encoding def write_instance_checks(self, instance_id, encoding=None, **kwargs): From b8d711fde127eae27e8c2c4534f2bbd1c7773e0d Mon Sep 17 00:00:00 2001 From: Guillermo Gonzalez-Santander Date: Mon, 15 Jul 2024 11:04:01 +0200 Subject: [PATCH 25/84] Small change for CodeQL --- cornflow-server/cornflow/endpoints/reports.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cornflow-server/cornflow/endpoints/reports.py b/cornflow-server/cornflow/endpoints/reports.py index b2bbe40ed..f8cb811c9 100644 --- a/cornflow-server/cornflow/endpoints/reports.py +++ b/cornflow-server/cornflow/endpoints/reports.py @@ -84,7 +84,7 @@ def post(self, **kwargs): f"Valid extensions are: {current_app.config['ALLOWED_EXTENSIONS']}" }, 400 - my_directory = f"{current_app.config['UPLOAD_FOLDER']}/{kwargs['execution_id']}" + my_directory = f"{current_app.config['UPLOAD_FOLDER']}/{execution.id}" # we create a directory for the execution if not os.path.exists(my_directory): From 35d9cc83753131b3cf3ae438b86bd6ace1234fc2 Mon Sep 17 00:00:00 2001 From: Guillermo Gonzalez-Santander Date: Mon, 15 Jul 2024 11:30:45 +0200 Subject: [PATCH 26/84] Some more checks on path building to restrict where the reports can be saved following the proposals of CodeQL --- cornflow-server/cornflow/endpoints/reports.py | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/cornflow-server/cornflow/endpoints/reports.py b/cornflow-server/cornflow/endpoints/reports.py index f8cb811c9..3f5aad99d 100644 --- a/cornflow-server/cornflow/endpoints/reports.py +++ b/cornflow-server/cornflow/endpoints/reports.py @@ -18,7 +18,12 @@ ReportRequest, ) from cornflow.shared.authentication import Auth, authenticate -from cornflow.shared.exceptions import FileError, InvalidData, ObjectDoesNotExist +from cornflow.shared.exceptions import ( + FileError, + InvalidData, + ObjectDoesNotExist, + NoPermission, +) class ReportEndpoint(BaseMetaResource): @@ -91,7 +96,12 @@ def post(self, **kwargs): current_app.logger.info(f"Creating directory {my_directory}") os.mkdir(my_directory) - save_path = f"{my_directory}/{kwargs['name']}.{filename_extension}" + report_name = f"{secure_filename(kwargs['name'])}.{filename_extension}" + + save_path = os.path.normpath(os.path.join(my_directory, report_name)) + + if "static" not in save_path and ".." in save_path: + raise NoPermission("Invalid file name") report = ReportModel( { From b1d79db4271aee3bf6389d628e6de83d3db291e0 Mon Sep 17 00:00:00 2001 From: Guillermo Gonzalez-Santander Date: Tue, 16 Jul 2024 08:33:08 +0200 Subject: [PATCH 27/84] Made delete endpoint to delete report file. Changed permissions to create a report Tested pdf report storage --- cornflow-server/cornflow/endpoints/reports.py | 23 +++-- cornflow-server/cornflow/shared/const.py | 2 + cornflow-server/cornflow/tests/const.py | 3 +- .../cornflow/tests/data/new_report_2.pdf | Bin 0 -> 6111 bytes .../cornflow/tests/unit/test_cli.py | 2 +- .../cornflow/tests/unit/test_reports.py | 80 +++++++++++++++--- .../client/cornflow_client/cornflow_client.py | 4 + 7 files changed, 92 insertions(+), 22 deletions(-) create mode 100644 cornflow-server/cornflow/tests/data/new_report_2.pdf diff --git a/cornflow-server/cornflow/endpoints/reports.py b/cornflow-server/cornflow/endpoints/reports.py index 3f5aad99d..e6c034ccf 100644 --- a/cornflow-server/cornflow/endpoints/reports.py +++ b/cornflow-server/cornflow/endpoints/reports.py @@ -3,7 +3,6 @@ These endpoints have different access url, but manage the same data entities """ import os -from datetime import datetime from flask import current_app, request, send_from_directory from flask_apispec import marshal_with, use_kwargs, doc @@ -18,9 +17,9 @@ ReportRequest, ) from cornflow.shared.authentication import Auth, authenticate +from cornflow.shared.const import SERVICE_ROLE from cornflow.shared.exceptions import ( FileError, - InvalidData, ObjectDoesNotExist, NoPermission, ) @@ -31,6 +30,8 @@ class ReportEndpoint(BaseMetaResource): Endpoint used to create a new report or get all the reports and their information back """ + ROLES_WITH_ACCESS = [SERVICE_ROLE] + def __init__(self): super().__init__() self.model = ReportModel @@ -57,7 +58,6 @@ def get(self, **kwargs): @doc(description="Create a report", tags=["Reports"]) @authenticate(auth_class=Auth()) - @Auth.dag_permission_required @use_kwargs(ReportRequest, location="form") @marshal_with(ReportSchema) def post(self, **kwargs): @@ -70,7 +70,6 @@ def post(self, **kwargs): the reference_id for the newly created report if successful) and a integer with the HTTP status code :rtype: Tuple(dict, integer) """ - execution = ExecutionModel.get_one_object(id=kwargs["execution_id"]) if execution is None: @@ -100,7 +99,7 @@ def post(self, **kwargs): save_path = os.path.normpath(os.path.join(my_directory, report_name)) - if "static" not in save_path and ".." in save_path: + if "static" not in save_path or ".." in save_path: raise NoPermission("Invalid file name") report = ReportModel( @@ -161,8 +160,7 @@ def get(self, idx): directory, file = report.file_url.split(report.name) file = f"{report.name}{file}" directory = directory[:-1] - current_app.logger.debug(f"Directory {directory}") - current_app.logger.debug(f"File {file}") + return send_from_directory(directory, file) @doc(description="Edit a report", tags=["Reports"], inherit=False) @@ -193,5 +191,14 @@ def delete(self, idx): a message) and an integer with the HTTP status code. :rtype: Tuple(dict, integer) """ - current_app.logger.info(f"User {self.get_user()} deleted report {idx}") + + # get report objet + report = self.get_detail(user_id=self.get_user_id(), idx=idx) + + if report is None: + raise ObjectDoesNotExist + + # delete file + os.remove(os.path.join(report.file_url)) + return self.delete_detail(user_id=self.get_user_id(), idx=idx) diff --git a/cornflow-server/cornflow/shared/const.py b/cornflow-server/cornflow/shared/const.py index e8fd2d591..20b59ede7 100644 --- a/cornflow-server/cornflow/shared/const.py +++ b/cornflow-server/cornflow/shared/const.py @@ -122,4 +122,6 @@ EXTRA_PERMISSION_ASSIGNATION = [ (VIEWER_ROLE, PUT_ACTION, "user-detail"), + (VIEWER_ROLE, GET_ACTION, "report"), + (PLANNER_ROLE, GET_ACTION, "report"), ] diff --git a/cornflow-server/cornflow/tests/const.py b/cornflow-server/cornflow/tests/const.py index 8022db1b5..8060938d4 100644 --- a/cornflow-server/cornflow/tests/const.py +++ b/cornflow-server/cornflow/tests/const.py @@ -35,7 +35,8 @@ def _get_file(relative_path): FULL_CASE_LIST = [FULL_CASE_PATH, _get_file("./data/full_case_raw_2.json")] REPORT_PATH = _get_file("./data/new_report.json") -REPORT_FILE_PATH = _get_file("./data/new_report.html") +REPORT_HTML_FILE_PATH = _get_file("./data/new_report.html") +REPORT_PDF_FILE_PATH = _get_file("./data/new_report_2.pdf") BAD_REPORT_PATH = _get_file("./data/bad_report.json") REPORT_URL = PREFIX + "/report/" diff --git a/cornflow-server/cornflow/tests/data/new_report_2.pdf b/cornflow-server/cornflow/tests/data/new_report_2.pdf new file mode 100644 index 0000000000000000000000000000000000000000..444662e83bc2db2dade7efc327c22fa4d133a178 GIT binary patch literal 6111 zcmbuDbxa)Ew#Kod#Rr$-4l}qz8Qk5qxRrsyy-?h>xE6PZ;!+9}hryjfafc#B-nlm~ zx##Ab+;@}r-_Fj?+S$LIeBW9hy_%E^J0}M(I{nW1$sM`?6^IIAZjUY^0+g|HbhCm2 zWgOqRSxH%0KrF3*N>)xbZnjihJUn7z=&o*1t9S3wy_Rzld+ZT8Uw^w53pLpUJk?u~ zea8&tnX2BbcKaecQ!cp?SiUMyke0*{B22BAzi~Sf1~j93Ax^qe1qJ#vU){B(=dn2E zk_hA@gUKC9KG>*>1O^#T%kJP2~LtF}uMM2lDfj1#gmhbjU1$>wN>a=2%^ zaoMwTeJ!?8dmqJDIQ&;WmYx>7x6RF-jHz;( zN_lLKKf`%Z>K5}utbX%J7v0Lq@_(QI$Lb%vxVZl-aDrTa24_62?(h+W1GniJI4J57 zQ&>NDDiNZ+m(Xljeu6ER+=y?FSLnarlQ$y&hEVMen)U;V7TsQ@%@6jfGvE;buQ1!W zOnI{6aCn_)^_nN-5Zm1M?0-S)H=i8mKf~>h|Djv|+i&e8zI`uhaJ0I6WcOaH9zfTU z;^RXc;^kq?7ys^Sw@!5!8wOne4=r!v6GXx@4b$_%lPA5%7EN8f?C5W2Jf0R#i=RoU z(JxoiEMQqvcqjv}3~FkKEvK8H!xeVB@GSShiJul}I*-t!8GX)TS@rMGQZMN!c>=qW z<|uhIySOgz>eiwe)^|nUSz|l-w784)8fetV$E%W3?&kL>*XURG^OA`?9ZJ>jZZ9M~ zuJ!?9c~iDORPN8PD?i1e{0(TH{{}QSi2vU~oBk8@xu>`^rLfHOR~=oPf?0wPce3VB zT_qz2A*7P9@POYL+1CZ|p{7vJd)EM+oltw(?U!_u6i`yT_Dt`Tstnv0@GMI_7mXn& z2Q+Zn<>Y=T@d&sMe=(VMcxl)bK&k3KM1VQZXj?+Ye{|@VkDI7waeZGCR5vJXvL24x z4xSU0$BV}aRB;HOULCOq{$}l5`~sovYpS5GWMN%X614OGhy{~F04f^`Mh)S2g-E!< zhgBkX(xuBPBn^mYV5j&Qlv0-%J6sIKVwRq{df_&Rq5lof|HM%^UvSm zu8pmwrA-&=r5V${vyBF!w`T0Xx0*%Em z1t>wB0W?NMJ+*?GFkLh_{yi>Lz0|nXb>5@ZLtn*eKJwbPW5Ly9BcwyW!*|mAw#&1} zck*n~^Xy3uT+JYt)XB*EKDupGtA&Nb`lFsb8W#PC;*Ok@E0^rB2r~xtZ^t&Y#2MpBiE(^Uz>!v2puwJe^y0B^OEX90Hc-1ErsT#I5s@rUPKnfX&fU6G?auDzbeQ^DuYt> zLsImp*>&WISX0%r>WtX%$L7s7?Fy&}(e>xa`*!P=DZq)wZmGgLU(xjk`NGwhmlctA z-?x4;P{yj1c23o=^61ldBuHwpwnmFI&(M@>K562j8nX;C6;C5kG$|>Q(}0+h0@0T6E^}D)dCS&Io^5RV*a2bK zZN--Xf*R;=;0V%#EGNFx2ISNdYP*(G!j>w-HSDbi==}m2as$0$4~i?E``|YsT5EhE z!V4@pQYIdUO*gm!jG;fA{ls)gBn~GjkD53?MGygM^x=4v=JB4lZ=Y&!AabdlF1_G74N^jeO zbQA^?u?^9QeuR(=%V*!!%MT)^eV72Huy{Dk@u**b``HM%RGn}cZ~c<=UWx=SlwJ2- zY`O+7J2w%+^M)hrzHN-X&KagWWzHi=MaHw6riwCdG!c~-^PoNRceN#C0qTSncX1$iV56R}ccl?*$m_b_1 zeX(9}~b4G}M#rKIjh7fu}wb-sV z1)9g@8-=u8oeljm@E745oxh_d6MMIw^S%!B=J(zxP9n)5de~vH2cGInacaIqWDaE4 zXAd3jcNQi2ujbv-=PZuZjH!%PZ<0|-4P^Hn1=Kwbyo63F!F%643fJBlA3bOFkq^AQ z!FI(S^@)7_umRtsaEhXiQne*y61q@j5=ML8#m!lVKJVv2(J!dfdO#_o7ws;9(ATM& zlI;`zh4W?U6z=y==P!$cw}66UBY)!OE*e8Dh1{Gc!lv(7DhbSMmcPnZn_Hwbr}R_9 z?%~8AHLY|3CWyMziOGIuyhUnb(DGN??ue_{N6GCfD5@ik@N%ece-YEg zC8m{7Tln}H(3^&ctnJy&rXT-s-cj3!20qCG_Zf?$7i#=Jg`eHH5Ga@RjXtPp03FAwfV;vw6(e;H=|A%T-`*rI+ulsM9Rtw{8 z^i$a2olW15JHvqEER7M?4-3l2pIABsuWB8~4Og17R`P7~rh0C5cM!F&7<_4Ue+Y&N z{oD_D5H-BT)#}Let$BXCw&nA_RBzGf>uEnW4S79)uq_PhD*QM7t-42*$C%TMxV034 zV0q4hRL#)Z=8|s~6R&I&uKO=%ppRtqWOrQq5=LDeBH_z#EVWCIy}n^|%9s`Y51Hn4 z*>{pN(6SJUgQI!z1&=OMi9?&Sv^9}?VK?4hZ^}8)(Liu2Bxr1ATH&5% z9$8Ue>#mUkD#N;S+vINBc7Mp0m>a&|fp8Vn>5xA%3)``~CU>yw0nh%BH9hg_u5T_b zufJjPZ+-k#*pml)!Z_b~oAWo;_1`tM|JIs0n3-+jm}!|=;y9am^&-H%W`RHZYh`ZG z|HBk{`Fa0rim}@MO87R!J)gTM?N6GT1Fo-6wSL0n;l&GGPq8^&Bl0z=G=IPlP^yi5 zkC(Bg3kY5FiMQb)piRWcD@o?+dd5s7j5DA|a86K|#8BE7fhd$ZWUI;!_+ z+!DeuX&i9h)w6diHtFiEE>$g~2AH$4blN#4lvRLk%alxw8{84M8)R~f!x?DM;6{S! zg;X-jldQ{k1r6rVs(l2s#ds;ML=)>tP=j^E@T1r8w{%X}b``I(_^6RXnkQ*4Tu`H8 zBpIkQ6G?`#haqLibI&=pR$^(RRUhMT=@r6l_I06PTS^S%QBShDu z8Wa>J$ez31G}jFUpY7P5nNVGT4BJ`va9zvs<>F9`4Ppo{5(fH$9-7 z>k3AMllLIr7>@wV(b~!)Y3J0K9pdwjEp%rNd2Qw8c#!C`yBn|bC!J4)xUq<|sJk5I7Ewai){JIwj2tc)e% zps-c;wRgV{F+Xa?BDY=pmfv+!!wWB^+i&Ld&*V2)(H8Fm#93Gh2<49ku!GWw-8ALt6oal6RV z!8F#GoHdFQB74l6XwS`@JBpMWp_QC|YF1zg7LIkEah1IE^4|6%(e`ojdlVr@91?%T zB^|bM{{~L@v999e3h{Jg@Nk3(8!BCt_RWdSx;U3V@p%6l4u8)WFk28 zuDgX7kjVCe>+DKVp%HyoAf35XDDH#xGuJhTo9*L(bJ9g&m#-=9mqx~(2@RsWxrBMQ zLD-7RrC1C`aUB8)69ldoh6HAZNp_OJd|3R=HcVy=n|nIHlWmOb1h=B<4;5MIZ|ugk zmFMycu{xy=l69l%`ZwF8+9mUz6&Hu5ot@BRbv_?HeKm2eOGzr-?&#|aQ=^)i0iz?} zV6}cVRLQ$&#l;c-ea}94z_Y9BTaQGE^G+u6`GV?AP)WoVS|&)43%v`b4PkN;U5~6fgy@vEG?^ipt&%Xw z#ap@4A(1&+wR1Ip-s0CoY2m;K<*p#RcH2=zFT7AOFAaM40nu+A2Z*dXMWwZRoCyK9 zr@g(#8blr2400hJaZq(?#gq@Ku#SP53hRB_GBk(b`a?$*VPxB(NDGvqP(<)s9E9dz zoX(0l85LxQkW4cWaYgxj+za9?CppsJSP<90Ea*QZHhh1!nm^19$81y0Of5aBH-9s~ z7Y;E7j`0CO<*(H_|739g81sOt=JuLy?|~|RWI%rmd{9?6Do#G$|1|o6__+Ukb+e8k zWLp3buwlB@8fyG%yvPQE3J4=Iq zjo8M$Uzr4wdMuBm7PY-VA%K$sXE#D#56QehDuw#=2G6Ac7ZZ1ZlryR=F#?w^65g~3 zX+IIYFO{0Tmmm}gftHrQd{ee4NFX5?W9MT7PSRTz3!EYi9MYI3r`LjH^Z|Ex6M66cc) zB^fi;_#y_wc;`b_M=~V_I8<#mZm(hsxWaqZ=zq{^N`k)sD4SK(a)<~*QSa*7P|PBG z_^OVN6!>xkDf@&CCqjP@r$MwL5^t)>Dr#2RY=E^-W?z8vIJY=f$(x8+xSmQ1m$kS& zmrH}WXwoojKGrwP_hRIL5W5F>(7RM$|JS`ee zHLm{VM2W>vK(*4-=24IEuu-Y8!Xvl8`yqPAJJ7_5+V}14Eu6`l(`ms(T1M!OlwqZV zVcLqlodc|Gd(g>(digu0f<=zz)~259RxU#~tjnbfJF3UpZmos+TN>YZlFL2dz7t&_ zAM;3Lk~OQX;m{0kln`sFk9wd+5PhO#j@M?U&sun2q^Z+e>%haVa$WO*d!-2bi1GUK z5bEdiY4L|AK8j62`Ro(rXSh_VOqTujpEOiEx_3vd{2j}S=CBVN_3tykv@u^cD6w~! zpUlng>cjK$PtIZ!Lt4^2L~Y+KeO`cb&uyav>OP0ogOb2QMlw+&di&OibV(^gQT&YT zv;zYIMTuG>d@OkAZ$r@NLL!(sV{t{$N@zDBY)Hr8Hv>nh_9~gp(J|l&i)vbCZT!SL zPC0XYnbkX6uI++IW`aa(wvumBpvCRGDGTwbb3fm+Od)Ihz+dRWL-Oy*PD+nz_lWWW zd)hiV7UU)LK^#}Gl?l)0#v}a;H9j{(tL93NE_H+T6|wFLvtO2j75s`p#u+O&)Z^C4 zE6sb97pvi$=T&LiE%5z!xX180t9@mRcN40IdugpbhmE3?bBb@6>Aqu!Tfb4GJtseP z!;oBmYshEn*y7_DDsd_fzDo~)~!#6Lj#U?MsW#Squ5@Q?UV7HUJ%t#Ml* zeRiN+7cO!aEKV@W70S0stB;|Z`Sy86+;2(jP}*D5((FC%4~X%W`zJOhh*Qr7xf2+H z>MNgW#Y+&=PkMEMC!mx%qc=^c;7>HhUVOxt64XQQewA~noT3Q7k2H+azSd}rLu^~e zh-`ucPoJfxUudHCoi4Cb%esUAszcm=idX-sLwXuk) Date: Tue, 16 Jul 2024 09:02:43 +0200 Subject: [PATCH 28/84] Added test for creation of report --- .../client/cornflow_client/cornflow_client.py | 2 + .../cornflow_client/raw_cornflow_client.py | 64 +++++++++++++++---- libs/client/cornflow_client/tests/const.py | 1 + .../tests/data/new_report.html | 7 ++ .../test_raw_cornflow_integration.py | 23 ++++++- 5 files changed, 82 insertions(+), 15 deletions(-) create mode 100644 libs/client/cornflow_client/tests/data/new_report.html diff --git a/libs/client/cornflow_client/cornflow_client.py b/libs/client/cornflow_client/cornflow_client.py index cc5ffb245..178943f68 100644 --- a/libs/client/cornflow_client/cornflow_client.py +++ b/libs/client/cornflow_client/cornflow_client.py @@ -21,6 +21,8 @@ def __init__(self, url, token=None): self.raw.create_execution_data_check, 201 ) self.create_report = self.expect_status(self.raw.create_report, 201) + self.get_reports = self.expect_status(self.raw.get_reports, 200) + self.get_one_report = self.expect_status(self.raw.get_one_report, 200) self.create_instance_data_check = self.expect_status( self.raw.create_instance_data_check, 201 diff --git a/libs/client/cornflow_client/raw_cornflow_client.py b/libs/client/cornflow_client/raw_cornflow_client.py index 7787c4234..4946e4e79 100644 --- a/libs/client/cornflow_client/raw_cornflow_client.py +++ b/libs/client/cornflow_client/raw_cornflow_client.py @@ -3,6 +3,7 @@ """ import logging as log +import os import re from functools import wraps from urllib.parse import urljoin @@ -105,13 +106,19 @@ def api_for_id( for key, value in query_args.items(): url = f"{url}{key}={value}&" url = url[:-1] - return requests.request( - method=method, - url=url, - headers={ + + headers = { + **{ "Authorization": "access_token " + self.token, "Content-Encoding": encoding, }, + **kwargs.pop("headers", {"content_type": "application/json"}), + } + + return requests.request( + method=method, + url=url, + headers=headers, **kwargs, ) @@ -180,12 +187,17 @@ def post_api_for_id(self, api, id, encoding=None, **kwargs): @ask_token @prepare_encoding def create_api(self, api, encoding=None, **kwargs): - return requests.post( - urljoin(self.url, api), - headers={ + headers = { + **{ "Authorization": "access_token " + self.token, "Content-Encoding": encoding, }, + **kwargs.pop("headers", {"content_type": "application/json"}), + } + + return requests.post( + urljoin(self.url, api), + headers=headers, **kwargs, ) @@ -498,15 +510,22 @@ def write_solution(self, execution_id, encoding=None, **kwargs): "dag/", id=execution_id, encoding=encoding, payload=kwargs ) + @ask_token + @log_call + @prepare_encoding + def get_reports(self, params=None, encoding=None): + """ """ + return self.get_api("report", params=params, encoding=encoding) + @ask_token @prepare_encoding def create_report(self, name, filename, execution_id, encoding=None, **kwargs): """ - Edits an execution + Creates a report for an execution :param str execution_id: id for the execution :param str name: the name of the report - :param file file: the file object with the report (e.g., open(REPORT_FILE_PATH, "rb")) + :param file filename: the file object with the report (e.g., open(REPORT_FILE_PATH, "rb")) :param kwargs: optional data to write (description) :param str encoding: the type of encoding used in the call. Defaults to 'br' """ @@ -514,11 +533,28 @@ def create_report(self, name, filename, execution_id, encoding=None, **kwargs): payload = ( dict(file=_file, name=name, execution_id=execution_id, **kwargs), ) - result = self.create_api( - "report/", - data=payload, - encoding=encoding, - ) + result = self.create_api( + "report/", + data=payload, + encoding=encoding, + headers={"content_type": "multipart/form-data"}, + ) + return result + + @ask_token + @prepare_encoding + def get_one_report(self, reference_id, folder_destination, encoding=None): + result = self.get_api_for_id(api="report", id=reference_id, encoding=encoding) + content = result.content() + + file_name = result.headers["Content-Disposition"].split["="][1] + + path = os.path.normpath(os.path.join(folder_destination, file_name)) + + # write content to disk on path + with open(path, "wb") as f: + f.write(content) + return result @ask_token diff --git a/libs/client/cornflow_client/tests/const.py b/libs/client/cornflow_client/tests/const.py index a1fae3517..264968a36 100644 --- a/libs/client/cornflow_client/tests/const.py +++ b/libs/client/cornflow_client/tests/const.py @@ -195,6 +195,7 @@ def _get_file(relative_path): ) PULP_EXAMPLE = _get_file("./data/pulp_example_data.json") +HTML_REPORT = "./data/new_report.html" PUBLIC_DAGS = [ "solve_model_dag", diff --git a/libs/client/cornflow_client/tests/data/new_report.html b/libs/client/cornflow_client/tests/data/new_report.html new file mode 100644 index 000000000..d7baad82f --- /dev/null +++ b/libs/client/cornflow_client/tests/data/new_report.html @@ -0,0 +1,7 @@ + + + + Test Report + + + diff --git a/libs/client/cornflow_client/tests/integration/test_raw_cornflow_integration.py b/libs/client/cornflow_client/tests/integration/test_raw_cornflow_integration.py index c4dcf147f..2c1e7626f 100644 --- a/libs/client/cornflow_client/tests/integration/test_raw_cornflow_integration.py +++ b/libs/client/cornflow_client/tests/integration/test_raw_cornflow_integration.py @@ -14,7 +14,7 @@ from cornflow_client import CornFlow from cornflow_client.constants import STATUS_OPTIMAL, STATUS_NOT_SOLVED, STATUS_QUEUED from cornflow_client.schema.tools import get_pulp_jsonschema -from cornflow_client.tests.const import PUBLIC_DAGS, PULP_EXAMPLE +from cornflow_client.tests.const import PUBLIC_DAGS, PULP_EXAMPLE, HTML_REPORT # Constants path_to_tests_dir = os.path.dirname(os.path.abspath(__file__)) @@ -732,3 +732,24 @@ def test_post_deployed_dag(self): self.assertIn(item, response.keys()) self.assertEqual("test_dag_2", response["id"]) self.assertEqual("test_dag_2_description", response["description"]) + + def test_post_report_html(self): + client = CornFlow(url="http://127.0.0.1:5050/") + _ = client.login("user", "UserPassword1!") + + data = _load_file(PULP_EXAMPLE) + + instance = client.raw.create_instance( + data, "test_example", "test_description" + ).json() + + execution = client.raw.create_execution( + instance_id=instance["id"], + config={"solver": "PULP_CBC_CMD", "timeLimit": 60}, + name="test_execution", + description="execution_description", + schema="solve_model_dag", + run=False, + ).json() + + client.raw.create_report("new_report", HTML_REPORT, execution["id"]) From 6077e20866fb33fd358479f6c8808df1b921d7e6 Mon Sep 17 00:00:00 2001 From: Guillermo Gonzalez-Santander Date: Tue, 16 Jul 2024 09:19:51 +0200 Subject: [PATCH 29/84] Fixed wrong path --- libs/client/cornflow_client/tests/const.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/libs/client/cornflow_client/tests/const.py b/libs/client/cornflow_client/tests/const.py index 264968a36..ba21f4a1c 100644 --- a/libs/client/cornflow_client/tests/const.py +++ b/libs/client/cornflow_client/tests/const.py @@ -195,7 +195,7 @@ def _get_file(relative_path): ) PULP_EXAMPLE = _get_file("./data/pulp_example_data.json") -HTML_REPORT = "./data/new_report.html" +HTML_REPORT = "../data/new_report.html" PUBLIC_DAGS = [ "solve_model_dag", From 0171fd8de23d832c5327cf3609f78e2bc26dee74 Mon Sep 17 00:00:00 2001 From: pchtsp Date: Tue, 16 Jul 2024 14:01:50 +0200 Subject: [PATCH 30/84] added some unittests and integration tests for reports (incomplete) --- cornflow-dags/DAG/dag_timer.py | 15 + cornflow-dags/tests/test_dags.py | 40 +- .../cornflow/tests/data/tsp_instance.json | 1449 +++++++++++++++++ .../tests/integration/test_cornflowclient.py | 44 + 4 files changed, 1546 insertions(+), 2 deletions(-) create mode 100644 cornflow-server/cornflow/tests/data/tsp_instance.json diff --git a/cornflow-dags/DAG/dag_timer.py b/cornflow-dags/DAG/dag_timer.py index f7ef66b57..3058f3da5 100644 --- a/cornflow-dags/DAG/dag_timer.py +++ b/cornflow-dags/DAG/dag_timer.py @@ -4,6 +4,8 @@ from cornflow_client.constants import SOLUTION_STATUS_FEASIBLE, STATUS_OPTIMAL import logging +from xml.etree import ElementTree as ET + class Instance(InstanceCore): schema = get_empty_schema() @@ -33,6 +35,19 @@ def get_objective(self) -> float: def check_solution(self, *args, **kwargs): return dict() + def generate_report(self, report_path: str, report_name="report") -> None: + + html = ET.Element("html") + body = ET.Element("body") + html.append(body) + div = ET.Element("div", attrib={"class": "foo"}) + body.append(div) + span = ET.Element("span", attrib={"class": "bar"}) + div.append(span) + with open(report_path, "w") as f: + ET.ElementTree(html).write(f, encoding="unicode", method="html") + return + class Timer(ApplicationCore): name = "timer" diff --git a/cornflow-dags/tests/test_dags.py b/cornflow-dags/tests/test_dags.py index ffcfcd5b9..d0bb4cf43 100644 --- a/cornflow-dags/tests/test_dags.py +++ b/cornflow-dags/tests/test_dags.py @@ -7,6 +7,8 @@ import unittest from unittest.mock import patch, Mock, MagicMock +import html +import xml.etree.ElementTree as ET # we mock everything that's airflow related: mymodule = MagicMock() @@ -168,9 +170,27 @@ def test_report(self): my_experim.solve(dict()) report_path = "./my_report.html" my_experim.generate_report(report_path=report_path) - self.assertTrue(os.path.exists(report_path)) - os.remove(report_path) # check the file is created. + self.assertTrue(os.path.exists(report_path)) + + tree = ET.parse(report_path) + elements = [elem.tag for elem in tree.iter()] + self.assertSetEqual(set(elements), {"html", "div", "span", "body"}) + + # try: + # os.remove(report_path) + # except FileNotFoundError: + # pass + + def test_export(self): + tests = self.app.test_cases + my_file_path = "export.json" + self.app.instance(tests[0]["instance"]).to_json(my_file_path) + self.assertTrue(os.path.exists(my_file_path)) + try: + os.remove(my_file_path) + except FileNotFoundError: + pass class Vrp(BaseDAGTests.SolvingTests): @@ -299,3 +319,19 @@ def setUp(self): self.app = Timer() self.config.update(dict(solver="default", seconds=10)) + + def test_report(self): + my_experim = self.app.solvers["default"](self.app.instance({})) + my_experim.solve(dict(timeLimit=0)) + report_path = "./my_report.html" + my_experim.generate_report(report_path=report_path) + # check the file is created. + self.assertTrue(os.path.exists(report_path)) + tree = ET.parse(report_path) + elements = [elem.tag for elem in tree.iter()] + self.assertSetEqual(set(elements), {"html", "div", "span", "body"}) + + try: + os.remove(report_path) + except FileNotFoundError: + pass diff --git a/cornflow-server/cornflow/tests/data/tsp_instance.json b/cornflow-server/cornflow/tests/data/tsp_instance.json new file mode 100644 index 000000000..03df1729d --- /dev/null +++ b/cornflow-server/cornflow/tests/data/tsp_instance.json @@ -0,0 +1,1449 @@ +{ + "arcs": [ + { + "n1": 0, + "n2": 0, + "w": 0 + }, + { + "n1": 0, + "n2": 1, + "w": 633 + }, + { + "n1": 0, + "n2": 2, + "w": 257 + }, + { + "n1": 0, + "n2": 3, + "w": 91 + }, + { + "n1": 0, + "n2": 4, + "w": 412 + }, + { + "n1": 0, + "n2": 5, + "w": 150 + }, + { + "n1": 0, + "n2": 6, + "w": 80 + }, + { + "n1": 0, + "n2": 7, + "w": 134 + }, + { + "n1": 0, + "n2": 8, + "w": 259 + }, + { + "n1": 0, + "n2": 9, + "w": 505 + }, + { + "n1": 0, + "n2": 10, + "w": 353 + }, + { + "n1": 0, + "n2": 11, + "w": 324 + }, + { + "n1": 0, + "n2": 12, + "w": 70 + }, + { + "n1": 0, + "n2": 13, + "w": 211 + }, + { + "n1": 0, + "n2": 14, + "w": 268 + }, + { + "n1": 0, + "n2": 15, + "w": 246 + }, + { + "n1": 0, + "n2": 16, + "w": 121 + }, + { + "n1": 1, + "n2": 0, + "w": 633 + }, + { + "n1": 1, + "n2": 1, + "w": 0 + }, + { + "n1": 1, + "n2": 2, + "w": 390 + }, + { + "n1": 1, + "n2": 3, + "w": 661 + }, + { + "n1": 1, + "n2": 4, + "w": 227 + }, + { + "n1": 1, + "n2": 5, + "w": 488 + }, + { + "n1": 1, + "n2": 6, + "w": 572 + }, + { + "n1": 1, + "n2": 7, + "w": 530 + }, + { + "n1": 1, + "n2": 8, + "w": 555 + }, + { + "n1": 1, + "n2": 9, + "w": 289 + }, + { + "n1": 1, + "n2": 10, + "w": 282 + }, + { + "n1": 1, + "n2": 11, + "w": 638 + }, + { + "n1": 1, + "n2": 12, + "w": 567 + }, + { + "n1": 1, + "n2": 13, + "w": 466 + }, + { + "n1": 1, + "n2": 14, + "w": 420 + }, + { + "n1": 1, + "n2": 15, + "w": 745 + }, + { + "n1": 1, + "n2": 16, + "w": 518 + }, + { + "n1": 2, + "n2": 0, + "w": 257 + }, + { + "n1": 2, + "n2": 1, + "w": 390 + }, + { + "n1": 2, + "n2": 2, + "w": 0 + }, + { + "n1": 2, + "n2": 3, + "w": 228 + }, + { + "n1": 2, + "n2": 4, + "w": 169 + }, + { + "n1": 2, + "n2": 5, + "w": 112 + }, + { + "n1": 2, + "n2": 6, + "w": 196 + }, + { + "n1": 2, + "n2": 7, + "w": 154 + }, + { + "n1": 2, + "n2": 8, + "w": 372 + }, + { + "n1": 2, + "n2": 9, + "w": 262 + }, + { + "n1": 2, + "n2": 10, + "w": 110 + }, + { + "n1": 2, + "n2": 11, + "w": 437 + }, + { + "n1": 2, + "n2": 12, + "w": 191 + }, + { + "n1": 2, + "n2": 13, + "w": 74 + }, + { + "n1": 2, + "n2": 14, + "w": 53 + }, + { + "n1": 2, + "n2": 15, + "w": 472 + }, + { + "n1": 2, + "n2": 16, + "w": 142 + }, + { + "n1": 3, + "n2": 0, + "w": 91 + }, + { + "n1": 3, + "n2": 1, + "w": 661 + }, + { + "n1": 3, + "n2": 2, + "w": 228 + }, + { + "n1": 3, + "n2": 3, + "w": 0 + }, + { + "n1": 3, + "n2": 4, + "w": 383 + }, + { + "n1": 3, + "n2": 5, + "w": 120 + }, + { + "n1": 3, + "n2": 6, + "w": 77 + }, + { + "n1": 3, + "n2": 7, + "w": 105 + }, + { + "n1": 3, + "n2": 8, + "w": 175 + }, + { + "n1": 3, + "n2": 9, + "w": 476 + }, + { + "n1": 3, + "n2": 10, + "w": 324 + }, + { + "n1": 3, + "n2": 11, + "w": 240 + }, + { + "n1": 3, + "n2": 12, + "w": 27 + }, + { + "n1": 3, + "n2": 13, + "w": 182 + }, + { + "n1": 3, + "n2": 14, + "w": 239 + }, + { + "n1": 3, + "n2": 15, + "w": 237 + }, + { + "n1": 3, + "n2": 16, + "w": 84 + }, + { + "n1": 4, + "n2": 0, + "w": 412 + }, + { + "n1": 4, + "n2": 1, + "w": 227 + }, + { + "n1": 4, + "n2": 2, + "w": 169 + }, + { + "n1": 4, + "n2": 3, + "w": 383 + }, + { + "n1": 4, + "n2": 4, + "w": 0 + }, + { + "n1": 4, + "n2": 5, + "w": 267 + }, + { + "n1": 4, + "n2": 6, + "w": 351 + }, + { + "n1": 4, + "n2": 7, + "w": 309 + }, + { + "n1": 4, + "n2": 8, + "w": 338 + }, + { + "n1": 4, + "n2": 9, + "w": 196 + }, + { + "n1": 4, + "n2": 10, + "w": 61 + }, + { + "n1": 4, + "n2": 11, + "w": 421 + }, + { + "n1": 4, + "n2": 12, + "w": 346 + }, + { + "n1": 4, + "n2": 13, + "w": 243 + }, + { + "n1": 4, + "n2": 14, + "w": 199 + }, + { + "n1": 4, + "n2": 15, + "w": 528 + }, + { + "n1": 4, + "n2": 16, + "w": 297 + }, + { + "n1": 5, + "n2": 0, + "w": 150 + }, + { + "n1": 5, + "n2": 1, + "w": 488 + }, + { + "n1": 5, + "n2": 2, + "w": 112 + }, + { + "n1": 5, + "n2": 3, + "w": 120 + }, + { + "n1": 5, + "n2": 4, + "w": 267 + }, + { + "n1": 5, + "n2": 5, + "w": 0 + }, + { + "n1": 5, + "n2": 6, + "w": 63 + }, + { + "n1": 5, + "n2": 7, + "w": 34 + }, + { + "n1": 5, + "n2": 8, + "w": 264 + }, + { + "n1": 5, + "n2": 9, + "w": 360 + }, + { + "n1": 5, + "n2": 10, + "w": 208 + }, + { + "n1": 5, + "n2": 11, + "w": 329 + }, + { + "n1": 5, + "n2": 12, + "w": 83 + }, + { + "n1": 5, + "n2": 13, + "w": 105 + }, + { + "n1": 5, + "n2": 14, + "w": 123 + }, + { + "n1": 5, + "n2": 15, + "w": 364 + }, + { + "n1": 5, + "n2": 16, + "w": 35 + }, + { + "n1": 6, + "n2": 0, + "w": 80 + }, + { + "n1": 6, + "n2": 1, + "w": 572 + }, + { + "n1": 6, + "n2": 2, + "w": 196 + }, + { + "n1": 6, + "n2": 3, + "w": 77 + }, + { + "n1": 6, + "n2": 4, + "w": 351 + }, + { + "n1": 6, + "n2": 5, + "w": 63 + }, + { + "n1": 6, + "n2": 6, + "w": 0 + }, + { + "n1": 6, + "n2": 7, + "w": 29 + }, + { + "n1": 6, + "n2": 8, + "w": 232 + }, + { + "n1": 6, + "n2": 9, + "w": 444 + }, + { + "n1": 6, + "n2": 10, + "w": 292 + }, + { + "n1": 6, + "n2": 11, + "w": 297 + }, + { + "n1": 6, + "n2": 12, + "w": 47 + }, + { + "n1": 6, + "n2": 13, + "w": 150 + }, + { + "n1": 6, + "n2": 14, + "w": 207 + }, + { + "n1": 6, + "n2": 15, + "w": 332 + }, + { + "n1": 6, + "n2": 16, + "w": 29 + }, + { + "n1": 7, + "n2": 0, + "w": 134 + }, + { + "n1": 7, + "n2": 1, + "w": 530 + }, + { + "n1": 7, + "n2": 2, + "w": 154 + }, + { + "n1": 7, + "n2": 3, + "w": 105 + }, + { + "n1": 7, + "n2": 4, + "w": 309 + }, + { + "n1": 7, + "n2": 5, + "w": 34 + }, + { + "n1": 7, + "n2": 6, + "w": 29 + }, + { + "n1": 7, + "n2": 7, + "w": 0 + }, + { + "n1": 7, + "n2": 8, + "w": 249 + }, + { + "n1": 7, + "n2": 9, + "w": 402 + }, + { + "n1": 7, + "n2": 10, + "w": 250 + }, + { + "n1": 7, + "n2": 11, + "w": 314 + }, + { + "n1": 7, + "n2": 12, + "w": 68 + }, + { + "n1": 7, + "n2": 13, + "w": 108 + }, + { + "n1": 7, + "n2": 14, + "w": 165 + }, + { + "n1": 7, + "n2": 15, + "w": 349 + }, + { + "n1": 7, + "n2": 16, + "w": 36 + }, + { + "n1": 8, + "n2": 0, + "w": 259 + }, + { + "n1": 8, + "n2": 1, + "w": 555 + }, + { + "n1": 8, + "n2": 2, + "w": 372 + }, + { + "n1": 8, + "n2": 3, + "w": 175 + }, + { + "n1": 8, + "n2": 4, + "w": 338 + }, + { + "n1": 8, + "n2": 5, + "w": 264 + }, + { + "n1": 8, + "n2": 6, + "w": 232 + }, + { + "n1": 8, + "n2": 7, + "w": 249 + }, + { + "n1": 8, + "n2": 8, + "w": 0 + }, + { + "n1": 8, + "n2": 9, + "w": 495 + }, + { + "n1": 8, + "n2": 10, + "w": 352 + }, + { + "n1": 8, + "n2": 11, + "w": 95 + }, + { + "n1": 8, + "n2": 12, + "w": 189 + }, + { + "n1": 8, + "n2": 13, + "w": 326 + }, + { + "n1": 8, + "n2": 14, + "w": 383 + }, + { + "n1": 8, + "n2": 15, + "w": 202 + }, + { + "n1": 8, + "n2": 16, + "w": 236 + }, + { + "n1": 9, + "n2": 0, + "w": 505 + }, + { + "n1": 9, + "n2": 1, + "w": 289 + }, + { + "n1": 9, + "n2": 2, + "w": 262 + }, + { + "n1": 9, + "n2": 3, + "w": 476 + }, + { + "n1": 9, + "n2": 4, + "w": 196 + }, + { + "n1": 9, + "n2": 5, + "w": 360 + }, + { + "n1": 9, + "n2": 6, + "w": 444 + }, + { + "n1": 9, + "n2": 7, + "w": 402 + }, + { + "n1": 9, + "n2": 8, + "w": 495 + }, + { + "n1": 9, + "n2": 9, + "w": 0 + }, + { + "n1": 9, + "n2": 10, + "w": 154 + }, + { + "n1": 9, + "n2": 11, + "w": 578 + }, + { + "n1": 9, + "n2": 12, + "w": 439 + }, + { + "n1": 9, + "n2": 13, + "w": 336 + }, + { + "n1": 9, + "n2": 14, + "w": 240 + }, + { + "n1": 9, + "n2": 15, + "w": 685 + }, + { + "n1": 9, + "n2": 16, + "w": 390 + }, + { + "n1": 10, + "n2": 0, + "w": 353 + }, + { + "n1": 10, + "n2": 1, + "w": 282 + }, + { + "n1": 10, + "n2": 2, + "w": 110 + }, + { + "n1": 10, + "n2": 3, + "w": 324 + }, + { + "n1": 10, + "n2": 4, + "w": 61 + }, + { + "n1": 10, + "n2": 5, + "w": 208 + }, + { + "n1": 10, + "n2": 6, + "w": 292 + }, + { + "n1": 10, + "n2": 7, + "w": 250 + }, + { + "n1": 10, + "n2": 8, + "w": 352 + }, + { + "n1": 10, + "n2": 9, + "w": 154 + }, + { + "n1": 10, + "n2": 10, + "w": 0 + }, + { + "n1": 10, + "n2": 11, + "w": 435 + }, + { + "n1": 10, + "n2": 12, + "w": 287 + }, + { + "n1": 10, + "n2": 13, + "w": 184 + }, + { + "n1": 10, + "n2": 14, + "w": 140 + }, + { + "n1": 10, + "n2": 15, + "w": 542 + }, + { + "n1": 10, + "n2": 16, + "w": 238 + }, + { + "n1": 11, + "n2": 0, + "w": 324 + }, + { + "n1": 11, + "n2": 1, + "w": 638 + }, + { + "n1": 11, + "n2": 2, + "w": 437 + }, + { + "n1": 11, + "n2": 3, + "w": 240 + }, + { + "n1": 11, + "n2": 4, + "w": 421 + }, + { + "n1": 11, + "n2": 5, + "w": 329 + }, + { + "n1": 11, + "n2": 6, + "w": 297 + }, + { + "n1": 11, + "n2": 7, + "w": 314 + }, + { + "n1": 11, + "n2": 8, + "w": 95 + }, + { + "n1": 11, + "n2": 9, + "w": 578 + }, + { + "n1": 11, + "n2": 10, + "w": 435 + }, + { + "n1": 11, + "n2": 11, + "w": 0 + }, + { + "n1": 11, + "n2": 12, + "w": 254 + }, + { + "n1": 11, + "n2": 13, + "w": 391 + }, + { + "n1": 11, + "n2": 14, + "w": 448 + }, + { + "n1": 11, + "n2": 15, + "w": 157 + }, + { + "n1": 11, + "n2": 16, + "w": 301 + }, + { + "n1": 12, + "n2": 0, + "w": 70 + }, + { + "n1": 12, + "n2": 1, + "w": 567 + }, + { + "n1": 12, + "n2": 2, + "w": 191 + }, + { + "n1": 12, + "n2": 3, + "w": 27 + }, + { + "n1": 12, + "n2": 4, + "w": 346 + }, + { + "n1": 12, + "n2": 5, + "w": 83 + }, + { + "n1": 12, + "n2": 6, + "w": 47 + }, + { + "n1": 12, + "n2": 7, + "w": 68 + }, + { + "n1": 12, + "n2": 8, + "w": 189 + }, + { + "n1": 12, + "n2": 9, + "w": 439 + }, + { + "n1": 12, + "n2": 10, + "w": 287 + }, + { + "n1": 12, + "n2": 11, + "w": 254 + }, + { + "n1": 12, + "n2": 12, + "w": 0 + }, + { + "n1": 12, + "n2": 13, + "w": 145 + }, + { + "n1": 12, + "n2": 14, + "w": 202 + }, + { + "n1": 12, + "n2": 15, + "w": 289 + }, + { + "n1": 12, + "n2": 16, + "w": 55 + }, + { + "n1": 13, + "n2": 0, + "w": 211 + }, + { + "n1": 13, + "n2": 1, + "w": 466 + }, + { + "n1": 13, + "n2": 2, + "w": 74 + }, + { + "n1": 13, + "n2": 3, + "w": 182 + }, + { + "n1": 13, + "n2": 4, + "w": 243 + }, + { + "n1": 13, + "n2": 5, + "w": 105 + }, + { + "n1": 13, + "n2": 6, + "w": 150 + }, + { + "n1": 13, + "n2": 7, + "w": 108 + }, + { + "n1": 13, + "n2": 8, + "w": 326 + }, + { + "n1": 13, + "n2": 9, + "w": 336 + }, + { + "n1": 13, + "n2": 10, + "w": 184 + }, + { + "n1": 13, + "n2": 11, + "w": 391 + }, + { + "n1": 13, + "n2": 12, + "w": 145 + }, + { + "n1": 13, + "n2": 13, + "w": 0 + }, + { + "n1": 13, + "n2": 14, + "w": 57 + }, + { + "n1": 13, + "n2": 15, + "w": 426 + }, + { + "n1": 13, + "n2": 16, + "w": 96 + }, + { + "n1": 14, + "n2": 0, + "w": 268 + }, + { + "n1": 14, + "n2": 1, + "w": 420 + }, + { + "n1": 14, + "n2": 2, + "w": 53 + }, + { + "n1": 14, + "n2": 3, + "w": 239 + }, + { + "n1": 14, + "n2": 4, + "w": 199 + }, + { + "n1": 14, + "n2": 5, + "w": 123 + }, + { + "n1": 14, + "n2": 6, + "w": 207 + }, + { + "n1": 14, + "n2": 7, + "w": 165 + }, + { + "n1": 14, + "n2": 8, + "w": 383 + }, + { + "n1": 14, + "n2": 9, + "w": 240 + }, + { + "n1": 14, + "n2": 10, + "w": 140 + }, + { + "n1": 14, + "n2": 11, + "w": 448 + }, + { + "n1": 14, + "n2": 12, + "w": 202 + }, + { + "n1": 14, + "n2": 13, + "w": 57 + }, + { + "n1": 14, + "n2": 14, + "w": 0 + }, + { + "n1": 14, + "n2": 15, + "w": 483 + }, + { + "n1": 14, + "n2": 16, + "w": 153 + }, + { + "n1": 15, + "n2": 0, + "w": 246 + }, + { + "n1": 15, + "n2": 1, + "w": 745 + }, + { + "n1": 15, + "n2": 2, + "w": 472 + }, + { + "n1": 15, + "n2": 3, + "w": 237 + }, + { + "n1": 15, + "n2": 4, + "w": 528 + }, + { + "n1": 15, + "n2": 5, + "w": 364 + }, + { + "n1": 15, + "n2": 6, + "w": 332 + }, + { + "n1": 15, + "n2": 7, + "w": 349 + }, + { + "n1": 15, + "n2": 8, + "w": 202 + }, + { + "n1": 15, + "n2": 9, + "w": 685 + }, + { + "n1": 15, + "n2": 10, + "w": 542 + }, + { + "n1": 15, + "n2": 11, + "w": 157 + }, + { + "n1": 15, + "n2": 12, + "w": 289 + }, + { + "n1": 15, + "n2": 13, + "w": 426 + }, + { + "n1": 15, + "n2": 14, + "w": 483 + }, + { + "n1": 15, + "n2": 15, + "w": 0 + }, + { + "n1": 15, + "n2": 16, + "w": 336 + }, + { + "n1": 16, + "n2": 0, + "w": 121 + }, + { + "n1": 16, + "n2": 1, + "w": 518 + }, + { + "n1": 16, + "n2": 2, + "w": 142 + }, + { + "n1": 16, + "n2": 3, + "w": 84 + }, + { + "n1": 16, + "n2": 4, + "w": 297 + }, + { + "n1": 16, + "n2": 5, + "w": 35 + }, + { + "n1": 16, + "n2": 6, + "w": 29 + }, + { + "n1": 16, + "n2": 7, + "w": 36 + }, + { + "n1": 16, + "n2": 8, + "w": 236 + }, + { + "n1": 16, + "n2": 9, + "w": 390 + }, + { + "n1": 16, + "n2": 10, + "w": 238 + }, + { + "n1": 16, + "n2": 11, + "w": 301 + }, + { + "n1": 16, + "n2": 12, + "w": 55 + }, + { + "n1": 16, + "n2": 13, + "w": 96 + }, + { + "n1": 16, + "n2": 14, + "w": 153 + }, + { + "n1": 16, + "n2": 15, + "w": 336 + }, + { + "n1": 16, + "n2": 16, + "w": 0 + } + ] +} \ No newline at end of file diff --git a/cornflow-server/cornflow/tests/integration/test_cornflowclient.py b/cornflow-server/cornflow/tests/integration/test_cornflowclient.py index 9b6c235a1..46b95f66c 100644 --- a/cornflow-server/cornflow/tests/integration/test_cornflowclient.py +++ b/cornflow-server/cornflow/tests/integration/test_cornflowclient.py @@ -141,6 +141,25 @@ def create_instance_and_execution(self): ) return self.create_new_execution(payload) + def create_instance_and_execution_report( + self, schema="tsp", solver="cpsat", data=None, timeLimit=10 + ): + name = "test_instance_1" + description = "description123" + if data is None: + data = load_file("./cornflow/tests/data/tsp_instance.json") + payload = dict(data=data, name=name, description=description, schema=schema) + one_instance = self.create_new_instance_payload(payload) + payload = dict( + instance_id=one_instance["id"], + config=dict(solver=solver, timeLimit=timeLimit), + description="test_execution_description_123", + name="test_execution_123", + schema=schema, + report=dict(name="report"), + ) + return self.create_new_execution(payload) + def create_timer_instance_and_execution(self, seconds=5): payload = dict( data=dict(seconds=seconds), @@ -190,6 +209,31 @@ def test_delete_instance(self): def test_new_execution(self): return self.create_instance_and_execution() + def test_new_execution_with_tsp_report(self): + return self.create_instance_and_execution_report() + + def test_new_execution_with_tsp_report_wait(self): + execution = self.create_instance_and_execution_report() + time.sleep(10) + execution = self.client.raw.get_results(execution["id"]) + id_report = execution["reports"][0]["id"] + my_report = self.client.raw.get_report(id_report) + with open("my_report.html", "wb") as f: + f.write(my_report) + return + # read header of file? we can parse it with beatifulsoup + + def test_new_execution_with_timer_report_wait(self): + payload = dict(solver="default", schema="timer", data={}, timeLimit=1) + execution = self.create_instance_and_execution_report(**payload) + time.sleep(5) + execution = self.client.raw.get_results(execution["id"]) + id_report = execution["reports"][0]["id"] + my_report = self.client.raw.get_report(id_report) + with open("my_report.html", "wb") as f: + f.write(my_report) + return + def test_delete_execution(self): execution = self.test_new_execution() response = self.client.raw.get_api_for_id("execution/", execution["id"]) From bdfb141e8e80fc974cc32fe0c20aa92e2753a27a Mon Sep 17 00:00:00 2001 From: Guillermo Gonzalez-Santander Date: Tue, 16 Jul 2024 15:05:19 +0200 Subject: [PATCH 31/84] Modified default location of reports. Modified MANIFEST to include static folder --- cornflow-server/MANIFEST.in | 3 ++- cornflow-server/cornflow/config.py | 2 +- cornflow-server/static/__init__.py | 0 3 files changed, 3 insertions(+), 2 deletions(-) delete mode 100644 cornflow-server/static/__init__.py diff --git a/cornflow-server/MANIFEST.in b/cornflow-server/MANIFEST.in index 4f0473188..3816e709f 100644 --- a/cornflow-server/MANIFEST.in +++ b/cornflow-server/MANIFEST.in @@ -3,4 +3,5 @@ include MANIFEST.in include README.rst include setup.py include cornflow/migrations/* -include cornflow/migrations/versions/* \ No newline at end of file +include cornflow/migrations/versions/* +include cornflow/static/* \ No newline at end of file diff --git a/cornflow-server/cornflow/config.py b/cornflow-server/cornflow/config.py index 4dba2613e..117d34770 100644 --- a/cornflow-server/cornflow/config.py +++ b/cornflow-server/cornflow/config.py @@ -26,7 +26,7 @@ class DefaultConfig(object): FILE_BACKEND = os.getenv("FILE_BACKEND", "local") UPLOAD_FOLDER = os.getenv( "UPLOAD_FOLDER", - os.path.abspath(os.path.join(os.path.dirname(__file__), "../static")), + os.path.abspath(os.path.join(os.path.dirname(__file__), "./static")), ) ALLOWED_EXTENSIONS = os.getenv("ALLOWED_EXTENSIONS", ["pdf", "html"]) diff --git a/cornflow-server/static/__init__.py b/cornflow-server/static/__init__.py deleted file mode 100644 index e69de29bb..000000000 From b2ca5c1727c73c5eb0e8e446e9458c30c5eb7053 Mon Sep 17 00:00:00 2001 From: Guillermo Gonzalez-Santander Date: Tue, 16 Jul 2024 16:01:14 +0200 Subject: [PATCH 32/84] More fixes to tests --- cornflow-server/cornflow/config.py | 2 + cornflow-server/cornflow/endpoints/reports.py | 2 +- .../cornflow_client/raw_cornflow_client.py | 18 ++--- libs/client/cornflow_client/tests/const.py | 1 + .../integration/test_cornflow_integration.py | 13 +++- .../test_raw_cornflow_integration.py | 76 ++++++++++++++++--- 6 files changed, 87 insertions(+), 25 deletions(-) diff --git a/cornflow-server/cornflow/config.py b/cornflow-server/cornflow/config.py index 117d34770..a33e7d735 100644 --- a/cornflow-server/cornflow/config.py +++ b/cornflow-server/cornflow/config.py @@ -95,6 +95,7 @@ class Development(DefaultConfig): """ """ ENV = "development" + UPLOAD_FOLDER = os.getenv("UPLOAD_FOLDER", "/usr/src/app/static") class Testing(DefaultConfig): @@ -126,6 +127,7 @@ class Production(DefaultConfig): # needs to be on to avoid getting only 500 codes: # and https://medium.com/@johanesriandy/flask-error-handler-not-working-on-production-mode-3adca4c7385c PROPAGATE_EXCEPTIONS = True + UPLOAD_FOLDER = os.getenv("UPLOAD_FOLDER", "/usr/src/app/static") app_config = {"development": Development, "testing": Testing, "production": Production} diff --git a/cornflow-server/cornflow/endpoints/reports.py b/cornflow-server/cornflow/endpoints/reports.py index e6c034ccf..0e1e073f7 100644 --- a/cornflow-server/cornflow/endpoints/reports.py +++ b/cornflow-server/cornflow/endpoints/reports.py @@ -70,7 +70,7 @@ def post(self, **kwargs): the reference_id for the newly created report if successful) and a integer with the HTTP status code :rtype: Tuple(dict, integer) """ - execution = ExecutionModel.get_one_object(id=kwargs["execution_id"]) + execution = ExecutionModel.get_one_object(idx=kwargs["execution_id"]) if execution is None: raise ObjectDoesNotExist("The execution does not exist") diff --git a/libs/client/cornflow_client/raw_cornflow_client.py b/libs/client/cornflow_client/raw_cornflow_client.py index 4946e4e79..2d4be87fb 100644 --- a/libs/client/cornflow_client/raw_cornflow_client.py +++ b/libs/client/cornflow_client/raw_cornflow_client.py @@ -530,24 +530,22 @@ def create_report(self, name, filename, execution_id, encoding=None, **kwargs): :param str encoding: the type of encoding used in the call. Defaults to 'br' """ with open(filename, "rb") as _file: - payload = ( - dict(file=_file, name=name, execution_id=execution_id, **kwargs), + result = self.create_api( + "report/", + data=dict(name=name, execution_id=execution_id, **kwargs), + files=dict(file=_file), + encoding=encoding, + headers={"content_type": "multipart/form-data"}, ) - result = self.create_api( - "report/", - data=payload, - encoding=encoding, - headers={"content_type": "multipart/form-data"}, - ) return result @ask_token @prepare_encoding def get_one_report(self, reference_id, folder_destination, encoding=None): result = self.get_api_for_id(api="report", id=reference_id, encoding=encoding) - content = result.content() + content = result.content - file_name = result.headers["Content-Disposition"].split["="][1] + file_name = result.headers["Content-Disposition"].split("=")[1] path = os.path.normpath(os.path.join(folder_destination, file_name)) diff --git a/libs/client/cornflow_client/tests/const.py b/libs/client/cornflow_client/tests/const.py index ba21f4a1c..930e75fea 100644 --- a/libs/client/cornflow_client/tests/const.py +++ b/libs/client/cornflow_client/tests/const.py @@ -196,6 +196,7 @@ def _get_file(relative_path): PULP_EXAMPLE = _get_file("./data/pulp_example_data.json") HTML_REPORT = "../data/new_report.html" +TEST_FOLDER = "./" PUBLIC_DAGS = [ "solve_model_dag", diff --git a/libs/client/cornflow_client/tests/integration/test_cornflow_integration.py b/libs/client/cornflow_client/tests/integration/test_cornflow_integration.py index 38bf1be2a..742d553b1 100644 --- a/libs/client/cornflow_client/tests/integration/test_cornflow_integration.py +++ b/libs/client/cornflow_client/tests/integration/test_cornflow_integration.py @@ -11,7 +11,7 @@ import pulp as pl -from cornflow_client import CornFlow +from cornflow_client import CornFlow, CornFlowApiError from cornflow_client.constants import STATUS_OPTIMAL, STATUS_NOT_SOLVED, STATUS_QUEUED from cornflow_client.schema.tools import get_pulp_jsonschema from cornflow_client.tests.const import PUBLIC_DAGS, PULP_EXAMPLE @@ -551,9 +551,14 @@ def setUp(self): login_result = self.client.login("admin", "Adminpassword1!") self.assertIn("id", login_result.keys()) self.assertIn("token", login_result.keys()) - self.base_user_id = CornFlow(url="http://127.0.0.1:5050/").login( - "user", "UserPassword1!" - )["id"] + try: + self.base_user_id = CornFlow(url="http://127.0.0.1:5050/").login( + "user", "UserPassword1!" + )["id"] + except CornFlowApiError: + self.base_user_id = CornFlow(url="http://127.0.0.1:5050/").sign_up( + username="user", pwd="UserPassword1!", email="user@cornflow.org" + )["id"] def tearDown(self): pass diff --git a/libs/client/cornflow_client/tests/integration/test_raw_cornflow_integration.py b/libs/client/cornflow_client/tests/integration/test_raw_cornflow_integration.py index 2c1e7626f..2f1c168e9 100644 --- a/libs/client/cornflow_client/tests/integration/test_raw_cornflow_integration.py +++ b/libs/client/cornflow_client/tests/integration/test_raw_cornflow_integration.py @@ -11,10 +11,15 @@ import pulp as pl -from cornflow_client import CornFlow +from cornflow_client import CornFlow, CornFlowApiError from cornflow_client.constants import STATUS_OPTIMAL, STATUS_NOT_SOLVED, STATUS_QUEUED from cornflow_client.schema.tools import get_pulp_jsonschema -from cornflow_client.tests.const import PUBLIC_DAGS, PULP_EXAMPLE, HTML_REPORT +from cornflow_client.tests.const import ( + PUBLIC_DAGS, + PULP_EXAMPLE, + HTML_REPORT, + TEST_FOLDER, +) # Constants path_to_tests_dir = os.path.dirname(os.path.abspath(__file__)) @@ -35,7 +40,12 @@ def _get_file(relative_path): class TestRawCornflowClientUser(TestCase): def setUp(self): self.client = CornFlow(url="http://127.0.0.1:5050/") - login_result = self.client.raw.login("user", "UserPassword1!") + try: + login_result = self.client.raw.login("user", "UserPassword1!") + except CornFlowApiError: + login_result = self.client.raw.sign_up( + username="user", pwd="UserPassword1!", email="user@cornflow.org" + ) data = login_result.json() self.assertEqual(login_result.status_code, 200) self.assertIn("id", data.keys()) @@ -46,9 +56,9 @@ def tearDown(self): pass def check_execution_statuses( - self, execution_id, end_state=STATUS_OPTIMAL, initial_state=None + self, execution_id, end_state=STATUS_OPTIMAL, initial_state=STATUS_QUEUED ): - if initial_state is None: + if initial_state is not None: statuses = [initial_state] else: statuses = [] @@ -332,7 +342,9 @@ def test_get_execution_log(self): def test_get_execution_solution(self): execution = self.test_create_execution() - statuses = self.check_execution_statuses(execution["id"]) + statuses = self.check_execution_statuses( + execution["id"], initial_state=STATUS_QUEUED + ) response = self.client.raw.get_solution(execution["id"]) self.assertEqual(response.status_code, 200) @@ -612,9 +624,14 @@ def setUp(self): login_result = self.client.login("admin", "Adminpassword1!") self.assertIn("id", login_result.keys()) self.assertIn("token", login_result.keys()) - self.base_user_id = CornFlow(url="http://127.0.0.1:5050/").login( - "user", "UserPassword1!" - )["id"] + try: + self.base_user_id = CornFlow(url="http://127.0.0.1:5050/").login( + "user", "UserPassword1!" + )["id"] + except CornFlowApiError: + self.base_user_id = CornFlow(url="http://127.0.0.1:5050/").sign_up( + username="user", pwd="UserPassword1!", email="user@cornflow.org" + )["id"] def tearDown(self): pass @@ -642,6 +659,14 @@ def setUp(self): login_result = self.client.login("airflow", "Airflow_test_password1") self.assertIn("id", login_result.keys()) self.assertIn("token", login_result.keys()) + try: + self.base_user_id = CornFlow(url="http://127.0.0.1:5050/").login( + "user", "UserPassword1!" + )["id"] + except CornFlowApiError: + self.base_user_id = CornFlow(url="http://127.0.0.1:5050/").sign_up( + username="user", pwd="UserPassword1!", email="user@cornflow.org" + )["id"] def tearDown(self): pass @@ -752,4 +777,35 @@ def test_post_report_html(self): run=False, ).json() - client.raw.create_report("new_report", HTML_REPORT, execution["id"]) + print(execution["id"]) + + response = self.client.raw.create_report( + "new_report", HTML_REPORT, execution["id"] + ) + + self.assertEqual(response.status_code, 201) + + return response + + def test_get_one_report(self): + response = self.test_post_report_html() + print(response.json()) + report_id = response.json()["id"] + + client = CornFlow(url="http://127.0.0.1:5050/") + _ = client.login("user", "UserPassword1!") + + response = client.get_one_report( + reference_id=report_id, folder_destination=TEST_FOLDER + ) + self.assertEqual(response.status_code, 200) + + # read from TEST FOLDER + with open(os.path.join(TEST_FOLDER, "new_report.html"), "r") as f: + file = f.read() + + # read from test/data folder + with open(HTML_REPORT, "r") as f: + file_2 = f.read() + + self.assertEqual(file, file_2) From 778aad271bd96478b3a28b6b21bd12a333403884 Mon Sep 17 00:00:00 2001 From: Guillermo Gonzalez-Santander Date: Tue, 16 Jul 2024 16:56:58 +0200 Subject: [PATCH 33/84] Modified cornflow dockerfile --- cornflow-server/Dockerfile | 3 +++ 1 file changed, 3 insertions(+) diff --git a/cornflow-server/Dockerfile b/cornflow-server/Dockerfile index 851f6e307..bbf9bc501 100644 --- a/cornflow-server/Dockerfile +++ b/cornflow-server/Dockerfile @@ -36,6 +36,9 @@ RUN pip install "cornflow==${CORNFLOW_VERSION}" # create folder for logs RUN mkdir -p /usr/src/app/log +# create folder for object storage +RUN mkdir -p /usr/src/app/static + # create folder for custom ssh keys RUN mkdir /usr/src/app/.ssh From 68f49897c9d929eff833d8dbe1bdc973441afaca Mon Sep 17 00:00:00 2001 From: Guillermo Gonzalez-Santander Date: Tue, 16 Jul 2024 19:02:50 +0200 Subject: [PATCH 34/84] Modified default location of reports. Modified MANIFEST to include static folder --- .github/workflows/test_cornflow_server.yml | 2 +- libs/client/cornflow_client/tests/const.py | 2 +- .../tests/integration/test_raw_cornflow_integration.py | 5 ++++- 3 files changed, 6 insertions(+), 3 deletions(-) diff --git a/.github/workflows/test_cornflow_server.yml b/.github/workflows/test_cornflow_server.yml index 4e8fc4576..6769fd2e2 100644 --- a/.github/workflows/test_cornflow_server.yml +++ b/.github/workflows/test_cornflow_server.yml @@ -49,7 +49,7 @@ jobs: steps: - uses: actions/checkout@v1 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Copy DAG files diff --git a/libs/client/cornflow_client/tests/const.py b/libs/client/cornflow_client/tests/const.py index 930e75fea..e6f39412a 100644 --- a/libs/client/cornflow_client/tests/const.py +++ b/libs/client/cornflow_client/tests/const.py @@ -195,7 +195,7 @@ def _get_file(relative_path): ) PULP_EXAMPLE = _get_file("./data/pulp_example_data.json") -HTML_REPORT = "../data/new_report.html" +HTML_REPORT = _get_file("./data/new_report.html") TEST_FOLDER = "./" PUBLIC_DAGS = [ diff --git a/libs/client/cornflow_client/tests/integration/test_raw_cornflow_integration.py b/libs/client/cornflow_client/tests/integration/test_raw_cornflow_integration.py index 2f1c168e9..1e856953c 100644 --- a/libs/client/cornflow_client/tests/integration/test_raw_cornflow_integration.py +++ b/libs/client/cornflow_client/tests/integration/test_raw_cornflow_integration.py @@ -795,7 +795,7 @@ def test_get_one_report(self): client = CornFlow(url="http://127.0.0.1:5050/") _ = client.login("user", "UserPassword1!") - response = client.get_one_report( + response = client.raw.get_one_report( reference_id=report_id, folder_destination=TEST_FOLDER ) self.assertEqual(response.status_code, 200) @@ -809,3 +809,6 @@ def test_get_one_report(self): file_2 = f.read() self.assertEqual(file, file_2) + + # remove file from TEST_FOLDER + os.remove(os.path.join(TEST_FOLDER, "new_report.html")) From 824d4729ce5766a3a37698d1cfb13e7f23380b05 Mon Sep 17 00:00:00 2001 From: Guillermo Gonzalez-Santander Date: Wed, 17 Jul 2024 09:17:39 +0200 Subject: [PATCH 35/84] Some print and error catching to get the error on Github actions --- cornflow-server/cornflow/endpoints/reports.py | 69 ++++++++++--------- .../test_raw_cornflow_integration.py | 6 +- 2 files changed, 41 insertions(+), 34 deletions(-) diff --git a/cornflow-server/cornflow/endpoints/reports.py b/cornflow-server/cornflow/endpoints/reports.py index 0e1e073f7..85f2f9569 100644 --- a/cornflow-server/cornflow/endpoints/reports.py +++ b/cornflow-server/cornflow/endpoints/reports.py @@ -22,6 +22,7 @@ FileError, ObjectDoesNotExist, NoPermission, + InvalidUsage, ) @@ -70,49 +71,53 @@ def post(self, **kwargs): the reference_id for the newly created report if successful) and a integer with the HTTP status code :rtype: Tuple(dict, integer) """ - execution = ExecutionModel.get_one_object(idx=kwargs["execution_id"]) + try: + execution = ExecutionModel.get_one_object(idx=kwargs["execution_id"]) - if execution is None: - raise ObjectDoesNotExist("The execution does not exist") + if execution is None: + raise ObjectDoesNotExist("The execution does not exist") - if "file" not in request.files: - return {"message": "No file part"}, 400 + if "file" not in request.files: + return {"message": "No file part"}, 400 - file = request.files["file"] - filename = secure_filename(file.filename) - filename_extension = filename.split(".")[-1] + file = request.files["file"] + filename = secure_filename(file.filename) + filename_extension = filename.split(".")[-1] - if filename_extension not in current_app.config["ALLOWED_EXTENSIONS"]: - return { - "message": f"Invalid file extension. " - f"Valid extensions are: {current_app.config['ALLOWED_EXTENSIONS']}" - }, 400 + if filename_extension not in current_app.config["ALLOWED_EXTENSIONS"]: + return { + "message": f"Invalid file extension. " + f"Valid extensions are: {current_app.config['ALLOWED_EXTENSIONS']}" + }, 400 - my_directory = f"{current_app.config['UPLOAD_FOLDER']}/{execution.id}" + my_directory = f"{current_app.config['UPLOAD_FOLDER']}/{execution.id}" - # we create a directory for the execution - if not os.path.exists(my_directory): - current_app.logger.info(f"Creating directory {my_directory}") - os.mkdir(my_directory) + # we create a directory for the execution + if not os.path.exists(my_directory): + current_app.logger.info(f"Creating directory {my_directory}") + os.mkdir(my_directory) - report_name = f"{secure_filename(kwargs['name'])}.{filename_extension}" + report_name = f"{secure_filename(kwargs['name'])}.{filename_extension}" - save_path = os.path.normpath(os.path.join(my_directory, report_name)) + save_path = os.path.normpath(os.path.join(my_directory, report_name)) - if "static" not in save_path or ".." in save_path: - raise NoPermission("Invalid file name") + if "static" not in save_path or ".." in save_path: + raise NoPermission("Invalid file name") - report = ReportModel( - { - "name": kwargs["name"], - "file_url": save_path, - "execution_id": kwargs["execution_id"], - "user_id": execution.user_id, - "description": kwargs.get("description", ""), - } - ) + report = ReportModel( + { + "name": kwargs["name"], + "file_url": save_path, + "execution_id": kwargs["execution_id"], + "user_id": execution.user_id, + "description": kwargs.get("description", ""), + } + ) - report.save() + report.save() + except Exception as error: + current_app.logger.error(error) + raise InvalidUsage("Error on POST report") try: # We try to save the file, if an error is raised then we delete the record on the database diff --git a/libs/client/cornflow_client/tests/integration/test_raw_cornflow_integration.py b/libs/client/cornflow_client/tests/integration/test_raw_cornflow_integration.py index 1e856953c..dacb5e87b 100644 --- a/libs/client/cornflow_client/tests/integration/test_raw_cornflow_integration.py +++ b/libs/client/cornflow_client/tests/integration/test_raw_cornflow_integration.py @@ -777,12 +777,14 @@ def test_post_report_html(self): run=False, ).json() - print(execution["id"]) - response = self.client.raw.create_report( "new_report", HTML_REPORT, execution["id"] ) + print(execution["id"]) + print(response.status_code) + print(response.json()) + self.assertEqual(response.status_code, 201) return response From 485315988cde969a090f8a6cbcc5f93fa5d033fe Mon Sep 17 00:00:00 2001 From: Guillermo Gonzalez-Santander Date: Wed, 17 Jul 2024 09:29:41 +0200 Subject: [PATCH 36/84] More debugging --- cornflow-server/cornflow/endpoints/reports.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cornflow-server/cornflow/endpoints/reports.py b/cornflow-server/cornflow/endpoints/reports.py index 85f2f9569..9f42dcdc6 100644 --- a/cornflow-server/cornflow/endpoints/reports.py +++ b/cornflow-server/cornflow/endpoints/reports.py @@ -117,7 +117,7 @@ def post(self, **kwargs): report.save() except Exception as error: current_app.logger.error(error) - raise InvalidUsage("Error on POST report") + raise InvalidUsage(error=error) try: # We try to save the file, if an error is raised then we delete the record on the database From 3529616b0e1b6e5a8560bcb8bfe18d0866e7ba4a Mon Sep 17 00:00:00 2001 From: Guillermo Gonzalez-Santander Date: Wed, 17 Jul 2024 09:41:45 +0200 Subject: [PATCH 37/84] Modified the way the error is built to be able to decode it as a JSON --- cornflow-server/cornflow/endpoints/reports.py | 2 +- cornflow-server/cornflow/shared/exceptions.py | 5 ++++- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/cornflow-server/cornflow/endpoints/reports.py b/cornflow-server/cornflow/endpoints/reports.py index 9f42dcdc6..1c81e7058 100644 --- a/cornflow-server/cornflow/endpoints/reports.py +++ b/cornflow-server/cornflow/endpoints/reports.py @@ -117,7 +117,7 @@ def post(self, **kwargs): report.save() except Exception as error: current_app.logger.error(error) - raise InvalidUsage(error=error) + raise InvalidUsage(error=str(error)) try: # We try to save the file, if an error is raised then we delete the record on the database diff --git a/cornflow-server/cornflow/shared/exceptions.py b/cornflow-server/cornflow/shared/exceptions.py index c3f4da63a..605a8e753 100644 --- a/cornflow-server/cornflow/shared/exceptions.py +++ b/cornflow-server/cornflow/shared/exceptions.py @@ -21,7 +21,10 @@ class InvalidUsage(Exception): def __init__(self, error=None, status_code=None, payload=None, log_txt=None): Exception.__init__(self, error) if error is not None: - self.error = error + if isinstance(error, Exception): + self.error = str(error) + else: + self.error = error if status_code is not None: self.status_code = status_code self.payload = payload From fba74181e82df87d409a97a6687c46dd26e2f402 Mon Sep 17 00:00:00 2001 From: Guillermo Gonzalez-Santander Date: Wed, 17 Jul 2024 09:53:08 +0200 Subject: [PATCH 38/84] The problem was that the destination folder for the files was not correct --- cornflow-server/cornflow/app.py | 3 +++ cornflow-server/cornflow/config.py | 4 ++++ 2 files changed, 7 insertions(+) diff --git a/cornflow-server/cornflow/app.py b/cornflow-server/cornflow/app.py index d8125a931..2301af0c8 100644 --- a/cornflow-server/cornflow/app.py +++ b/cornflow-server/cornflow/app.py @@ -46,6 +46,9 @@ def create_app(env_name="development", dataconn=None): :return: the application that is going to be running :class:`Flask` :rtype: :class:`Flask` """ + if os.getenv("FLASK_ENV", None) is not None: + env_name = os.getenv("FLASK_ENV") + dictConfig(log_config(app_config[env_name].LOG_LEVEL)) app = Flask(__name__) diff --git a/cornflow-server/cornflow/config.py b/cornflow-server/cornflow/config.py index a33e7d735..627b19c39 100644 --- a/cornflow-server/cornflow/config.py +++ b/cornflow-server/cornflow/config.py @@ -115,6 +115,10 @@ class Testing(DefaultConfig): AIRFLOW_PWD = os.getenv("AIRFLOW_PWD", "admin") OPEN_DEPLOYMENT = 1 LOG_LEVEL = int(os.getenv("LOG_LEVEL", 10)) + UPLOAD_FOLDER = os.getenv( + "UPLOAD_FOLDER", + os.path.abspath(os.path.join(os.path.dirname(__file__), "./static")), + ) class Production(DefaultConfig): From 8a2502a10b324681cb772b13a44bcfd3cffd91e5 Mon Sep 17 00:00:00 2001 From: Guillermo Gonzalez-Santander Date: Wed, 17 Jul 2024 12:20:45 +0200 Subject: [PATCH 39/84] Added test for the PUT of reports --- cornflow-server/cornflow/endpoints/reports.py | 74 ++++++------- .../client/cornflow_client/cornflow_client.py | 1 - .../cornflow_client/raw_cornflow_client.py | 103 +++++++++++------- .../test_raw_cornflow_integration.py | 79 +++++++++++++- 4 files changed, 172 insertions(+), 85 deletions(-) diff --git a/cornflow-server/cornflow/endpoints/reports.py b/cornflow-server/cornflow/endpoints/reports.py index 1c81e7058..b5605701f 100644 --- a/cornflow-server/cornflow/endpoints/reports.py +++ b/cornflow-server/cornflow/endpoints/reports.py @@ -22,7 +22,6 @@ FileError, ObjectDoesNotExist, NoPermission, - InvalidUsage, ) @@ -71,53 +70,50 @@ def post(self, **kwargs): the reference_id for the newly created report if successful) and a integer with the HTTP status code :rtype: Tuple(dict, integer) """ - try: - execution = ExecutionModel.get_one_object(idx=kwargs["execution_id"]) - if execution is None: - raise ObjectDoesNotExist("The execution does not exist") + execution = ExecutionModel.get_one_object(idx=kwargs["execution_id"]) - if "file" not in request.files: - return {"message": "No file part"}, 400 + if execution is None: + raise ObjectDoesNotExist("The execution does not exist") - file = request.files["file"] - filename = secure_filename(file.filename) - filename_extension = filename.split(".")[-1] + if "file" not in request.files: + return {"message": "No file part"}, 400 - if filename_extension not in current_app.config["ALLOWED_EXTENSIONS"]: - return { - "message": f"Invalid file extension. " - f"Valid extensions are: {current_app.config['ALLOWED_EXTENSIONS']}" - }, 400 + file = request.files["file"] + filename = secure_filename(file.filename) + filename_extension = filename.split(".")[-1] - my_directory = f"{current_app.config['UPLOAD_FOLDER']}/{execution.id}" + if filename_extension not in current_app.config["ALLOWED_EXTENSIONS"]: + return { + "message": f"Invalid file extension. " + f"Valid extensions are: {current_app.config['ALLOWED_EXTENSIONS']}" + }, 400 - # we create a directory for the execution - if not os.path.exists(my_directory): - current_app.logger.info(f"Creating directory {my_directory}") - os.mkdir(my_directory) + my_directory = f"{current_app.config['UPLOAD_FOLDER']}/{execution.id}" - report_name = f"{secure_filename(kwargs['name'])}.{filename_extension}" + # we create a directory for the execution + if not os.path.exists(my_directory): + current_app.logger.info(f"Creating directory {my_directory}") + os.mkdir(my_directory) - save_path = os.path.normpath(os.path.join(my_directory, report_name)) + report_name = f"{secure_filename(kwargs['name'])}.{filename_extension}" - if "static" not in save_path or ".." in save_path: - raise NoPermission("Invalid file name") + save_path = os.path.normpath(os.path.join(my_directory, report_name)) - report = ReportModel( - { - "name": kwargs["name"], - "file_url": save_path, - "execution_id": kwargs["execution_id"], - "user_id": execution.user_id, - "description": kwargs.get("description", ""), - } - ) + if "static" not in save_path or ".." in save_path: + raise NoPermission("Invalid file name") - report.save() - except Exception as error: - current_app.logger.error(error) - raise InvalidUsage(error=str(error)) + report = ReportModel( + { + "name": kwargs["name"], + "file_url": save_path, + "execution_id": kwargs["execution_id"], + "user_id": execution.user_id, + "description": kwargs.get("description", ""), + } + ) + + report.save() try: # We try to save the file, if an error is raised then we delete the record on the database @@ -127,7 +123,7 @@ def post(self, **kwargs): except Exception as error: report.delete() current_app.logger.error(error) - raise FileError + raise FileError(error=str(error)) class ReportDetailsEndpointBase(BaseMetaResource): @@ -181,7 +177,7 @@ def put(self, idx, **data): :rtype: Tuple(dict, integer) """ current_app.logger.info(f"User {self.get_user()} edits report {idx}") - return self.put_detail(data, user=self.get_user(), idx=idx) + return self.put_detail(data, user_id=self.get_user_id(), idx=idx) @doc(description="Delete a report", tags=["Reports"], inherit=False) @authenticate(auth_class=Auth()) diff --git a/libs/client/cornflow_client/cornflow_client.py b/libs/client/cornflow_client/cornflow_client.py index 178943f68..ea8aea81e 100644 --- a/libs/client/cornflow_client/cornflow_client.py +++ b/libs/client/cornflow_client/cornflow_client.py @@ -1,7 +1,6 @@ from .raw_cornflow_client import RawCornFlow, CornFlowApiError # TODO: review the standard calls for the reports. -# TODO: modify the headers on the calls that require a file. # TODO: have the download report method to receive the path to save it on the local machine. diff --git a/libs/client/cornflow_client/raw_cornflow_client.py b/libs/client/cornflow_client/raw_cornflow_client.py index 2d4be87fb..bb07bc08b 100644 --- a/libs/client/cornflow_client/raw_cornflow_client.py +++ b/libs/client/cornflow_client/raw_cornflow_client.py @@ -1,5 +1,5 @@ """ - +Code for the main class to interact to cornflow from python code. """ import logging as log @@ -11,6 +11,45 @@ import requests +def ask_token(func: callable): + @wraps(func) + def wrapper(self, *args, **kwargs): + if not self.token: + raise CornFlowApiError("Need to login first!") + return func(self, *args, **kwargs) + + return wrapper + + +def log_call(func: callable): + @wraps(func) + def wrapper(*args, **kwargs): + result = func(*args, **kwargs) + log.debug(result.json()) + return result + + return wrapper + + +def prepare_encoding(func: callable): + @wraps(func) + def wrapper(*args, **kwargs): + encoding = kwargs.get("encoding", "br") + if encoding not in [ + "gzip", + "compress", + "deflate", + "br", + "identity", + ]: + encoding = "br" + kwargs["encoding"] = encoding + result = func(*args, **kwargs) + return result + + return wrapper + + class RawCornFlow(object): """ Base class to access cornflow-server @@ -20,42 +59,6 @@ def __init__(self, url, token=None): self.url = url self.token = token - def ask_token(func): - @wraps(func) - def wrapper(self, *args, **kwargs): - if not self.token: - raise CornFlowApiError("Need to login first!") - return func(self, *args, **kwargs) - - return wrapper - - def log_call(func): - @wraps(func) - def wrapper(*args, **kwargs): - result = func(*args, **kwargs) - log.debug(result.json()) - return result - - return wrapper - - def prepare_encoding(func): - @wraps(func) - def wrapper(*args, **kwargs): - encoding = kwargs.get("encoding", "br") - if encoding not in [ - "gzip", - "compress", - "deflate", - "br", - "identity", - ]: - encoding = "br" - kwargs["encoding"] = encoding - result = func(*args, **kwargs) - return result - - return wrapper - # def expect_201(func): # return partial(expect_status, status=201) # @@ -352,7 +355,7 @@ def create_execution( :param str instance_id: id for the instance :param str name: name for the execution :param str description: description of the execution - :param dict config: execution configuration + :param dict config: configuration for the execution :param str schema: name of the problem to solve :param str encoding: the type of encoding used in the call. Defaults to 'br' :param bool run: if the execution should be run or not @@ -542,6 +545,7 @@ def create_report(self, name, filename, execution_id, encoding=None, **kwargs): @ask_token @prepare_encoding def get_one_report(self, reference_id, folder_destination, encoding=None): + """""" result = self.get_api_for_id(api="report", id=reference_id, encoding=encoding) content = result.content @@ -555,6 +559,25 @@ def get_one_report(self, reference_id, folder_destination, encoding=None): return result + @ask_token + @log_call + @prepare_encoding + def delete_one_report(self, reference_id, encoding=None): + """""" + return self.delete_api_for_id(api="report", id=reference_id, encoding=encoding) + + @ask_token + @log_call + @prepare_encoding + def put_one_report(self, reference_id, payload, encoding=None): + """ + + """ + return self.put_api_for_id( + api="report", id=reference_id, payload=payload, encoding=encoding + ) + + @ask_token @prepare_encoding def write_instance_checks(self, instance_id, encoding=None, **kwargs): @@ -934,7 +957,7 @@ def create_deployed_dag( encoding=None, ): if name is None: - return {"error": "No dag name was given"} + raise CornFlowApiError("No dag name was given") payload = dict( id=name, description=description, @@ -1017,7 +1040,7 @@ def group_variables_by_name(_vars, names_list, **kwargs): # 2. key can be a tuple or a single string. # 3. if a tuple, they can be an integer or a string. # - # it dos not permit the nested dictionary format of variables + # it does not permit the nested dictionary format of variables # we copy it because we will be taking out already seen variables _vars = dict(_vars) __vars = {k: {} for k in names_list} diff --git a/libs/client/cornflow_client/tests/integration/test_raw_cornflow_integration.py b/libs/client/cornflow_client/tests/integration/test_raw_cornflow_integration.py index dacb5e87b..c4741ba43 100644 --- a/libs/client/cornflow_client/tests/integration/test_raw_cornflow_integration.py +++ b/libs/client/cornflow_client/tests/integration/test_raw_cornflow_integration.py @@ -617,6 +617,11 @@ def test_get_all_schemas(self): for schema in PUBLIC_DAGS: self.assertIn(schema, read_schemas) + def test_log_in_first(self): + client = CornFlow(url="http://127.0.0.1:5050/") + + self.assertRaises(CornFlowApiError, client.raw.get_all_instances) + class TestRawCornflowClientAdmin(TestCase): def setUp(self): @@ -758,6 +763,19 @@ def test_post_deployed_dag(self): self.assertEqual("test_dag_2", response["id"]) self.assertEqual("test_dag_2_description", response["description"]) + def test_raises_post_deployed_dag(self): + self.assertRaises( + CornFlowApiError, + self.client.raw.create_deployed_dag, + name=None, + description="test_dag_2_description", + instance_schema=dict(), + instance_checks_schema=dict(), + solution_schema=dict(), + solution_checks_schema=dict(), + config_schema=dict(), + ) + def test_post_report_html(self): client = CornFlow(url="http://127.0.0.1:5050/") _ = client.login("user", "UserPassword1!") @@ -781,17 +799,12 @@ def test_post_report_html(self): "new_report", HTML_REPORT, execution["id"] ) - print(execution["id"]) - print(response.status_code) - print(response.json()) - self.assertEqual(response.status_code, 201) return response def test_get_one_report(self): response = self.test_post_report_html() - print(response.json()) report_id = response.json()["id"] client = CornFlow(url="http://127.0.0.1:5050/") @@ -814,3 +827,59 @@ def test_get_one_report(self): # remove file from TEST_FOLDER os.remove(os.path.join(TEST_FOLDER, "new_report.html")) + + def test_get_all_reports(self): + report_1 = self.test_post_report_html().json()["id"] + report_2 = self.test_post_report_html().json()["id"] + + client = CornFlow(url="http://127.0.0.1:5050/") + _ = client.login("user", "UserPassword1!") + + response = client.raw.get_reports() + + self.assertEqual(response.status_code, 200) + self.assertGreaterEqual(len(response.json()), 2) + + client.raw.delete_one_report(reference_id=report_1) + client.raw.delete_one_report(reference_id=report_2) + + def test_put_one_report(self): + response = self.test_post_report_html() + report_id = response.json()["id"] + + client = CornFlow(url="http://127.0.0.1:5050/") + _ = client.login("user", "UserPassword1!") + + payload = {"name": "new_name", "description": "some_description"} + + response = client.raw.put_one_report(reference_id=report_id, payload=payload) + + self.assertEqual(response.status_code, 200) + + new_report = client.raw.get_one_report(reference_id=report_id) + + self.assertEqual(new_report.json()["name"], paylaod["name"]) + self.assertEqual(new_report.json()["description"], payload["description"]) + self.assertNotEqual(new_report.json()["name"], "new_report") + self.assertNotEqual(new_report.json()["description"], "") + + delete = client.raw.delete_one_report(reference_id=report_id) + self.assertEqual(delete.status_code, 200) + + def test_delete_one_report(self): + response = self.test_post_report_html() + report_id = response.json()["id"] + + client = CornFlow(url="http://127.0.0.1:5050/") + _ = client.login("user", "UserPassword1!") + + reports_before = client.raw.get_reports() + + self.assertEqual(reports_before.status_code, 200) + + response = client.raw.delete_one_report(reference_id=report_id) + self.assertEqual(response.status_code, 200) + + reports_after = client.raw.get_reports() + + self.assertLess(len(reports_after.json()), len(reports_before.json())) From a3e58716a6403ab07d4945e3deecd4f96b7d82d0 Mon Sep 17 00:00:00 2001 From: Guillermo Gonzalez-Santander Date: Wed, 17 Jul 2024 12:48:42 +0200 Subject: [PATCH 40/84] Changed the way get one report reports back the name of the report and the description --- cornflow-server/cornflow/endpoints/reports.py | 35 +++++++++++++++++-- cornflow-server/cornflow/models/reports.py | 4 +++ .../cornflow/tests/unit/test_reports.py | 21 +++++++++++ .../test_raw_cornflow_integration.py | 8 ++--- 4 files changed, 61 insertions(+), 7 deletions(-) diff --git a/cornflow-server/cornflow/endpoints/reports.py b/cornflow-server/cornflow/endpoints/reports.py index b5605701f..b2d46421d 100644 --- a/cornflow-server/cornflow/endpoints/reports.py +++ b/cornflow-server/cornflow/endpoints/reports.py @@ -154,7 +154,7 @@ def get(self, idx): :rtype: Tuple(dict, integer) """ current_app.logger.info(f"User {self.get_user()} gets details of report {idx}") - report = self.get_detail(user_id=self.get_user_id(), idx=idx) + report = self.get_detail(user=self.get_user(), idx=idx) if report is None: raise ObjectDoesNotExist @@ -162,7 +162,10 @@ def get(self, idx): file = f"{report.name}{file}" directory = directory[:-1] - return send_from_directory(directory, file) + response = send_from_directory(directory, file) + response.headers["File-Description"] = report.description + response.headers["File-Name"] = report.name + return response @doc(description="Edit a report", tags=["Reports"], inherit=False) @authenticate(auth_class=Auth()) @@ -177,7 +180,33 @@ def put(self, idx, **data): :rtype: Tuple(dict, integer) """ current_app.logger.info(f"User {self.get_user()} edits report {idx}") - return self.put_detail(data, user_id=self.get_user_id(), idx=idx) + + report = self.get_detail(user=self.get_user(), idx=idx) + + try: + if report.name != data["name"]: + directory, file = report.file_url.split(report.name) + + new_location = ( + f"{os.path.join(directory, secure_filename(data['name']))}{file}" + ) + old_location = report.file_url + + current_app.logger.debug(f"Old location: {old_location}") + current_app.logger.debug(f"New location: {new_location}") + + os.rename(old_location, new_location) + data["file_url"] = new_location + + except Exception as error: + current_app.logger.error(error) + return {"error": "Error moving file"}, 400 + + report.update(data) + + report.save() + + return {"message": "Updated correctly"}, 200 @doc(description="Delete a report", tags=["Reports"], inherit=False) @authenticate(auth_class=Auth()) diff --git a/cornflow-server/cornflow/models/reports.py b/cornflow-server/cornflow/models/reports.py index 73ef15c8f..d1eaa2188 100644 --- a/cornflow-server/cornflow/models/reports.py +++ b/cornflow-server/cornflow/models/reports.py @@ -53,6 +53,10 @@ def user_id(self): """ return db.Column(db.Integer, db.ForeignKey("users.id"), nullable=False) + @declared_attr + def user(self): + return db.relationship("UserModel") + def __init__(self, data: dict): super().__init__() self.user_id = data.get("user_id") diff --git a/cornflow-server/cornflow/tests/unit/test_reports.py b/cornflow-server/cornflow/tests/unit/test_reports.py index a142f93ce..945e1aadd 100644 --- a/cornflow-server/cornflow/tests/unit/test_reports.py +++ b/cornflow-server/cornflow/tests/unit/test_reports.py @@ -187,6 +187,27 @@ def test_get_one_report(self): self.assertEqual(200, response.status_code) self.assertEqual(content, file) + def test_modify_report(self): + item = self.test_new_report_html() + + payload = {"name": "new_name", "description": "some_description"} + + response = self.client.put( + f"{self.url}{item['id']}/", + headers=self.get_header_with_auth(self.token), + json=payload, + ) + + self.assertEqual(response.status_code, 200) + + response = self.client.get( + f"{self.url}{item['id']}/", headers=self.get_header_with_auth(self.token) + ) + + self.assertEqual(200, response.status_code) + self.assertEqual("new_name", dict(response.headers)["File-Name"]) + self.assertEqual("some_description", dict(response.headers)["File-Description"]) + def test_delete_report(self): item = self.test_new_report_html() response = self.client.delete( diff --git a/libs/client/cornflow_client/tests/integration/test_raw_cornflow_integration.py b/libs/client/cornflow_client/tests/integration/test_raw_cornflow_integration.py index c4741ba43..90621e96e 100644 --- a/libs/client/cornflow_client/tests/integration/test_raw_cornflow_integration.py +++ b/libs/client/cornflow_client/tests/integration/test_raw_cornflow_integration.py @@ -858,10 +858,10 @@ def test_put_one_report(self): new_report = client.raw.get_one_report(reference_id=report_id) - self.assertEqual(new_report.json()["name"], paylaod["name"]) - self.assertEqual(new_report.json()["description"], payload["description"]) - self.assertNotEqual(new_report.json()["name"], "new_report") - self.assertNotEqual(new_report.json()["description"], "") + self.assertEqual(new_report.headers["File-Name"], payload["name"]) + self.assertEqual(new_report.headers["File-Description"], payload["description"]) + self.assertNotEqual(new_report.headers["File-Name"], "new_report") + self.assertNotEqual(new_report.headers["File-Description"], "") delete = client.raw.delete_one_report(reference_id=report_id) self.assertEqual(delete.status_code, 200) From f955e62db5d0b8f04a3598f71c8de3287c6c25e1 Mon Sep 17 00:00:00 2001 From: Guillermo Gonzalez-Santander Date: Wed, 17 Jul 2024 12:50:10 +0200 Subject: [PATCH 41/84] Fixed status code on failing test --- cornflow-server/cornflow/tests/unit/test_reports.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cornflow-server/cornflow/tests/unit/test_reports.py b/cornflow-server/cornflow/tests/unit/test_reports.py index 945e1aadd..abab00834 100644 --- a/cornflow-server/cornflow/tests/unit/test_reports.py +++ b/cornflow-server/cornflow/tests/unit/test_reports.py @@ -158,7 +158,7 @@ def test_new_report_no_execution(self): ), ) - self.assertEqual(400, response.status_code) + self.assertEqual(404, response.status_code) self.assertTrue("error" in response.json) def test_get_no_reports(self): From c698a1743581103e48391266ec045ba6a2171ed3 Mon Sep 17 00:00:00 2001 From: Guillermo Gonzalez-Santander Date: Wed, 17 Jul 2024 12:57:30 +0200 Subject: [PATCH 42/84] Fixed error on client test --- .../tests/integration/test_raw_cornflow_integration.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/libs/client/cornflow_client/tests/integration/test_raw_cornflow_integration.py b/libs/client/cornflow_client/tests/integration/test_raw_cornflow_integration.py index 90621e96e..f0a32fae7 100644 --- a/libs/client/cornflow_client/tests/integration/test_raw_cornflow_integration.py +++ b/libs/client/cornflow_client/tests/integration/test_raw_cornflow_integration.py @@ -856,7 +856,9 @@ def test_put_one_report(self): self.assertEqual(response.status_code, 200) - new_report = client.raw.get_one_report(reference_id=report_id) + new_report = client.raw.get_one_report( + reference_id=report_id, folder_destination=TEST_FOLDER + ) self.assertEqual(new_report.headers["File-Name"], payload["name"]) self.assertEqual(new_report.headers["File-Description"], payload["description"]) From 35913d5654cf0a4a157cf8e58afdda6ef7230565 Mon Sep 17 00:00:00 2001 From: Guillermo Gonzalez-Santander Date: Wed, 17 Jul 2024 13:08:12 +0200 Subject: [PATCH 43/84] Added tests for base client --- .../client/cornflow_client/cornflow_client.py | 17 ++- .../integration/test_cornflow_integration.py | 108 +++++++++++++++++- 2 files changed, 121 insertions(+), 4 deletions(-) diff --git a/libs/client/cornflow_client/cornflow_client.py b/libs/client/cornflow_client/cornflow_client.py index ea8aea81e..3f2df4890 100644 --- a/libs/client/cornflow_client/cornflow_client.py +++ b/libs/client/cornflow_client/cornflow_client.py @@ -21,7 +21,11 @@ def __init__(self, url, token=None): ) self.create_report = self.expect_status(self.raw.create_report, 201) self.get_reports = self.expect_status(self.raw.get_reports, 200) - self.get_one_report = self.expect_status(self.raw.get_one_report, 200) + self.get_one_report = self.expect_status( + self.raw.get_one_report, 200, json=False + ) + self.put_one_report = self.expect_status(self.raw.put_one_report, 200) + self.delete_one_report = self.expect_status(self.raw.delete_one_report, 200) self.create_instance_data_check = self.expect_status( self.raw.create_instance_data_check, 201 @@ -91,10 +95,13 @@ def token(self, token): self.raw.token = token @staticmethod - def expect_status(func, expected_status=None): + def expect_status(func, expected_status=None, json=True): """ Gets the response of the call and raise an exception if the status of the response is not the expected + + The response of the call is the json in the body for those calls that are application/json + For the calls that are form/data the response of the call is the content and the headers """ def decorator(*args, **kwargs): @@ -103,7 +110,11 @@ def decorator(*args, **kwargs): raise CornFlowApiError( f"Expected a code {expected_status}, got a {response.status_code} error instead: {response.text}" ) - return response.json() + + if json: + return response.json() + else: + return response.content, response.headers return decorator diff --git a/libs/client/cornflow_client/tests/integration/test_cornflow_integration.py b/libs/client/cornflow_client/tests/integration/test_cornflow_integration.py index 742d553b1..d78e6e4d9 100644 --- a/libs/client/cornflow_client/tests/integration/test_cornflow_integration.py +++ b/libs/client/cornflow_client/tests/integration/test_cornflow_integration.py @@ -14,7 +14,12 @@ from cornflow_client import CornFlow, CornFlowApiError from cornflow_client.constants import STATUS_OPTIMAL, STATUS_NOT_SOLVED, STATUS_QUEUED from cornflow_client.schema.tools import get_pulp_jsonschema -from cornflow_client.tests.const import PUBLIC_DAGS, PULP_EXAMPLE +from cornflow_client.tests.const import ( + PUBLIC_DAGS, + PULP_EXAMPLE, + HTML_REPORT, + TEST_FOLDER, +) # Constants path_to_tests_dir = os.path.dirname(os.path.abspath(__file__)) @@ -665,3 +670,104 @@ def test_post_deployed_dag(self): self.assertIn(item, response.keys()) self.assertEqual("test_dag", response["id"]) self.assertEqual("test_dag_description", response["description"]) + + def test_post_report_html(self): + client = CornFlow(url="http://127.0.0.1:5050/") + _ = client.login("user", "UserPassword1!") + + data = _load_file(PULP_EXAMPLE) + + instance = client.create_instance(data, "test_example", "test_description") + + execution = client.create_execution( + instance_id=instance["id"], + config={"solver": "PULP_CBC_CMD", "timeLimit": 60}, + name="test_execution", + description="execution_description", + schema="solve_model_dag", + run=False, + ) + + response = self.client.create_report("new_report", HTML_REPORT, execution["id"]) + + self.assertEqual(response["execution_id"], execution["id"]) + + return response + + def test_get_one_report(self): + response = self.test_post_report_html() + report_id = response["id"] + + client = CornFlow(url="http://127.0.0.1:5050/") + _ = client.login("user", "UserPassword1!") + + content, headers = client.get_one_report( + reference_id=report_id, folder_destination=TEST_FOLDER + ) + + self.assertEqual(headers["File-Name"], response["name"]) + self.assertEqual(headers["File-Description"], response["description"]) + + # read from TEST FOLDER + with open(os.path.join(TEST_FOLDER, "new_report.html"), "r") as f: + file = f.read() + + # read from test/data folder + with open(HTML_REPORT, "r") as f: + file_2 = f.read() + + self.assertEqual(file, file_2) + + # remove file from TEST_FOLDER + os.remove(os.path.join(TEST_FOLDER, "new_report.html")) + + def test_get_all_reports(self): + report_1 = self.test_post_report_html()["id"] + report_2 = self.test_post_report_html()["id"] + + client = CornFlow(url="http://127.0.0.1:5050/") + _ = client.login("user", "UserPassword1!") + + response = client.get_reports() + + self.assertGreaterEqual(len(response), 2) + + client.delete_one_report(reference_id=report_1) + client.delete_one_report(reference_id=report_2) + + def test_put_one_report(self): + response = self.test_post_report_html() + report_id = response["id"] + + client = CornFlow(url="http://127.0.0.1:5050/") + _ = client.login("user", "UserPassword1!") + + payload = {"name": "new_name", "description": "some_description"} + + _ = client.put_one_report(reference_id=report_id, payload=payload) + + content, headers = client.get_one_report( + reference_id=report_id, folder_destination=TEST_FOLDER + ) + + self.assertEqual(headers["File-Name"], payload["name"]) + self.assertEqual(headers["File-Description"], payload["description"]) + self.assertNotEqual(headers["File-Name"], "new_report") + self.assertNotEqual(headers["File-Description"], "") + + _ = client.delete_one_report(reference_id=report_id) + + def test_delete_one_report(self): + response = self.test_post_report_html() + report_id = response["id"] + + client = CornFlow(url="http://127.0.0.1:5050/") + _ = client.login("user", "UserPassword1!") + + reports_before = client.get_reports() + + _ = client.delete_one_report(reference_id=report_id) + + reports_after = client.get_reports() + + self.assertLess(len(reports_after), len(reports_before)) From e4bdf9ecd9f3c454f556de508b28b6d5ca733314 Mon Sep 17 00:00:00 2001 From: Guillermo Gonzalez-Santander Date: Wed, 17 Jul 2024 13:18:56 +0200 Subject: [PATCH 44/84] Added some typing --- .../cornflow_client/raw_cornflow_client.py | 87 ++++++++++++++----- 1 file changed, 63 insertions(+), 24 deletions(-) diff --git a/libs/client/cornflow_client/raw_cornflow_client.py b/libs/client/cornflow_client/raw_cornflow_client.py index bb07bc08b..ec1273fcb 100644 --- a/libs/client/cornflow_client/raw_cornflow_client.py +++ b/libs/client/cornflow_client/raw_cornflow_client.py @@ -6,9 +6,11 @@ import os import re from functools import wraps +from typing import Union, Dict from urllib.parse import urljoin import requests +from requests import Response def ask_token(func: callable): @@ -55,7 +57,7 @@ class RawCornFlow(object): Base class to access cornflow-server """ - def __init__(self, url, token=None): + def __init__(self, url: str, token=None): self.url = url self.token = token @@ -67,25 +69,25 @@ def __init__(self, url, token=None): def api_for_id( self, - api, - id=None, - method="GET", - post_url=None, - query_args=None, - encoding=None, + api: str, + id: Union[str, int] = None, + method: str = "GET", + post_url: str = None, + query_args: Dict = None, + encoding: str = None, **kwargs, - ): + ) -> Response: """ - :param api: the resource in the server + :param str api: the resource in the server :param id: the id of the particular object - :param method: HTTP method to apply - :param post_url: optional action to apply - :param query_args: query arguments for the request - :param encoding: optional string with the type of encoding, if it is not specified it uses br encoding, + :param str method: HTTP method to apply + :param str post_url: optional action to apply + :param Dict query_args: query arguments for the request + :param str encoding: optional string with the type of encoding, if it is not specified it uses br encoding, options are: gzip, compress, deflate, br or identity :param kwargs: other arguments to requests.request - :return: requests.request + :return: :class:`requests.Response` """ if api[0] == "/" and self.url[-1] == "/": api = api[1:] @@ -125,7 +127,9 @@ def api_for_id( **kwargs, ) - def get_api(self, api, method="GET", encoding=None, **kwargs): + def get_api( + self, api: str, method: str = "GET", encoding: str = None, **kwargs + ) -> Response: return requests.request( method=method, url=urljoin(self.url, api) + "/", @@ -138,7 +142,14 @@ def get_api(self, api, method="GET", encoding=None, **kwargs): @ask_token @prepare_encoding - def get_api_for_id(self, api, id=None, post_url=None, encoding=None, **kwargs): + def get_api_for_id( + self, + api: str, + id: Union[str, id] = None, + post_url: str = None, + encoding: str = None, + **kwargs, + ) -> Response: """ api_for_id with a GET request """ @@ -506,8 +517,8 @@ def write_solution(self, execution_id, encoding=None, **kwargs): Edits an execution :param str execution_id: id for the execution - :param kwargs: optional data to edit :param str encoding: the type of encoding used in the call. Defaults to 'br' + :param kwargs: optional data to edit """ return self.put_api_for_id( "dag/", id=execution_id, encoding=encoding, payload=kwargs @@ -517,7 +528,14 @@ def write_solution(self, execution_id, encoding=None, **kwargs): @log_call @prepare_encoding def get_reports(self, params=None, encoding=None): - """ """ + """ + Gets all reports for a given user + + :param dict params: optional filters + :param str encoding: the type of encoding used in the call. Defaults to 'br' + :return: the response object + :rtype: :class:`Response` + """ return self.get_api("report", params=params, encoding=encoding) @ask_token @@ -529,8 +547,8 @@ def create_report(self, name, filename, execution_id, encoding=None, **kwargs): :param str execution_id: id for the execution :param str name: the name of the report :param file filename: the file object with the report (e.g., open(REPORT_FILE_PATH, "rb")) - :param kwargs: optional data to write (description) :param str encoding: the type of encoding used in the call. Defaults to 'br' + :param kwargs: optional data to write (description) """ with open(filename, "rb") as _file: result = self.create_api( @@ -544,8 +562,18 @@ def create_report(self, name, filename, execution_id, encoding=None, **kwargs): @ask_token @prepare_encoding - def get_one_report(self, reference_id, folder_destination, encoding=None): - """""" + def get_one_report( + self, reference_id, folder_destination, encoding=None + ) -> Response: + """ + Gets one specific report and downloads it to disk + + :param int reference_id: id of the report to download + :param str folder_destination: Path on the local system where to save the downloaded report + :param str encoding: the type of encoding used in the call. Defaults to 'br' + :return: the response object + :rtype: :class:`Response` + """ result = self.get_api_for_id(api="report", id=reference_id, encoding=encoding) content = result.content @@ -563,21 +591,32 @@ def get_one_report(self, reference_id, folder_destination, encoding=None): @log_call @prepare_encoding def delete_one_report(self, reference_id, encoding=None): - """""" + """ + Deletes a report + + :param int reference_id: id of the report to download + :param str encoding: the type of encoding used in the call. Defaults to 'br' + :return: the response object + :rtype: :class:`Response` + """ return self.delete_api_for_id(api="report", id=reference_id, encoding=encoding) @ask_token @log_call @prepare_encoding - def put_one_report(self, reference_id, payload, encoding=None): + def put_one_report(self, reference_id, payload, encoding=None) -> Response: """ + Edits one specific report and downloads it to disk + :param int reference_id: id of the report to download + :param str encoding: the type of encoding used in the call. Defaults to 'br' + :return: the response object + :rtype: :class:`Response` """ return self.put_api_for_id( api="report", id=reference_id, payload=payload, encoding=encoding ) - @ask_token @prepare_encoding def write_instance_checks(self, instance_id, encoding=None, **kwargs): From ffcef7cec24304d400e490c1362cf7bd2225ad62 Mon Sep 17 00:00:00 2001 From: pchtsp Date: Thu, 18 Jul 2024 14:26:43 +0200 Subject: [PATCH 45/84] integration tests passing for tsp and timer reports --- cornflow-dags/tests/test_dags.py | 75 ++++++++++++++++--- cornflow-server/cornflow/schemas/execution.py | 1 + cornflow-server/cornflow/tests/const.py | 1 + .../cornflow/tests/custom_liveServer.py | 4 +- .../tests/integration/test_cornflowclient.py | 73 +++++++++++------- .../cornflow_client/airflow/dag_utilities.py | 5 +- .../cornflow_client/raw_cornflow_client.py | 25 ++++--- 7 files changed, 131 insertions(+), 53 deletions(-) diff --git a/cornflow-dags/tests/test_dags.py b/cornflow-dags/tests/test_dags.py index d0bb4cf43..24d82f669 100644 --- a/cornflow-dags/tests/test_dags.py +++ b/cornflow-dags/tests/test_dags.py @@ -7,8 +7,7 @@ import unittest from unittest.mock import patch, Mock, MagicMock -import html -import xml.etree.ElementTree as ET +from html.parser import HTMLParser # we mock everything that's airflow related: mymodule = MagicMock() @@ -22,6 +21,8 @@ from jsonschema import Draft7Validator from pytups import SuperDict +from typing import Dict, List, Tuple + class BaseDAGTests: class SolvingTests(unittest.TestCase): @@ -173,14 +174,24 @@ def test_report(self): # check the file is created. self.assertTrue(os.path.exists(report_path)) - tree = ET.parse(report_path) - elements = [elem.tag for elem in tree.iter()] - self.assertSetEqual(set(elements), {"html", "div", "span", "body"}) + # let's just check for an element inside the html that we know should exist + # in this case a few 'section' tags with an attribute with a specific id + things_to_look = dict( + section=[ + ("id", "solution"), + ("id", "instance-statistics"), + ("id", "tsp"), + ] + ) + parser = HTMLCheckTags(things_to_look) + with open(report_path, "r") as f: + content = f.read() - # try: - # os.remove(report_path) - # except FileNotFoundError: - # pass + try: + os.remove(report_path) + except FileNotFoundError: + pass + self.assertRaises(StopIteration, parser.feed, content) def test_export(self): tests = self.app.test_cases @@ -327,11 +338,51 @@ def test_report(self): my_experim.generate_report(report_path=report_path) # check the file is created. self.assertTrue(os.path.exists(report_path)) - tree = ET.parse(report_path) - elements = [elem.tag for elem in tree.iter()] - self.assertSetEqual(set(elements), {"html", "div", "span", "body"}) + # let's just check for an element inside the html that we know should exist + # a 'div' tag with a 'foo' attribute + + # class MyHTMLParser(HTMLParser): + # + # def handle_starttag(self, tag, attrs): + # print("Start tag:", tag) + # for attr in attrs: + # print(" attr:", attr) + + parser = HTMLCheckTags(dict(div=[("class", "foo")], span=[("class", "bar")])) + with open(report_path, "r") as f: + content = f.read() + # parser.feed(content) try: os.remove(report_path) except FileNotFoundError: pass + self.assertRaises(StopIteration, parser.feed, content) + + +class HTMLCheckTags(HTMLParser): + things_to_check: Dict[str, List[Tuple[str, str]]] + + def __init__(self, things_to_check: Dict[str, List[Tuple[str, str]]]): + HTMLParser.__init__(self) + self.things_to_check = SuperDict(things_to_check).copy_deep() + + def handle_starttag(self, tag: str, attrs: List[Tuple[str, str]]): + # print("Start tag:", tag) + if tag not in self.things_to_check: + return + for attr in attrs: + # print(" attr:", attr) + try: + # we find the element in the list and remove it + index = self.things_to_check[tag].index(attr) + self.things_to_check[tag].pop(index) + except ValueError: + continue + # if the list is empty, we take out the key + if not len(self.things_to_check[tag]): + self.things_to_check.pop(tag) + # if we have nothing else to check, + # we stop searching + if not (self.things_to_check): + raise StopIteration diff --git a/cornflow-server/cornflow/schemas/execution.py b/cornflow-server/cornflow/schemas/execution.py index 1528fbd30..a03aafb07 100644 --- a/cornflow-server/cornflow/schemas/execution.py +++ b/cornflow-server/cornflow/schemas/execution.py @@ -31,6 +31,7 @@ class ConfigSchema(Schema): threads = fields.Int(required=False) logPath = fields.Str(required=False) MIPGap = fields.Float(required=False) + report = fields.Raw(required=False) class ConfigSchemaResponse(ConfigSchema): diff --git a/cornflow-server/cornflow/tests/const.py b/cornflow-server/cornflow/tests/const.py index 8060938d4..b8ba1312f 100644 --- a/cornflow-server/cornflow/tests/const.py +++ b/cornflow-server/cornflow/tests/const.py @@ -14,6 +14,7 @@ def _get_file(relative_path): INSTANCE_MPS = _get_file("./data/test_mps.mps") INSTANCE_GC_20 = _get_file("./data/gc_20_7.json") INSTANCE_FILE_FAIL = _get_file("./unit/test_instances.py") +INSTANCE_TSP = _get_file("./data/tsp_instance.json") EXECUTION_PATH = _get_file("./data/new_execution.json") BAD_EXECUTION_PATH = _get_file("./data/bad_execution.json") diff --git a/cornflow-server/cornflow/tests/custom_liveServer.py b/cornflow-server/cornflow/tests/custom_liveServer.py index 256950c82..5c6e1c44c 100644 --- a/cornflow-server/cornflow/tests/custom_liveServer.py +++ b/cornflow-server/cornflow/tests/custom_liveServer.py @@ -39,7 +39,9 @@ def setUp(self, create_all=True): if create_all: db.create_all() access_init_command(False) - register_deployed_dags_command_test(verbose=False) + register_deployed_dags_command_test( + verbose=False, dags=["solve_model_dag", "gc", "timer", "tsp"] + ) user_data = dict( username="testname", email="test@test.com", diff --git a/cornflow-server/cornflow/tests/integration/test_cornflowclient.py b/cornflow-server/cornflow/tests/integration/test_cornflowclient.py index 46b95f66c..f304a0dcd 100644 --- a/cornflow-server/cornflow/tests/integration/test_cornflowclient.py +++ b/cornflow-server/cornflow/tests/integration/test_cornflowclient.py @@ -4,6 +4,8 @@ # Full imports import json +import os + import pulp import logging as log import time @@ -21,7 +23,7 @@ EXEC_STATE_QUEUED, STATUS_HEALTHY, ) -from cornflow.tests.const import INSTANCE_PATH, CASE_PATH +from cornflow.tests.const import INSTANCE_PATH, CASE_PATH, INSTANCE_MPS, INSTANCE_TSP from cornflow.tests.custom_liveServer import CustomTestCaseLive @@ -128,7 +130,7 @@ def create_new_execution(self, payload): return execution def create_instance_and_execution(self): - one_instance = self.create_new_instance("./cornflow/tests/data/test_mps.mps") + one_instance = self.create_new_instance(INSTANCE_MPS) name = "test_execution_name_123" description = "test_execution_description_123" schema = "solve_model_dag" @@ -147,16 +149,15 @@ def create_instance_and_execution_report( name = "test_instance_1" description = "description123" if data is None: - data = load_file("./cornflow/tests/data/tsp_instance.json") + data = load_file(INSTANCE_TSP) payload = dict(data=data, name=name, description=description, schema=schema) one_instance = self.create_new_instance_payload(payload) payload = dict( instance_id=one_instance["id"], - config=dict(solver=solver, timeLimit=timeLimit), + config=dict(solver=solver, timeLimit=timeLimit, report=dict(name="report")), description="test_execution_description_123", name="test_execution_123", schema=schema, - report=dict(name="report"), ) return self.create_new_execution(payload) @@ -183,10 +184,10 @@ class TestCornflowClientOpen(TestCornflowClientBasic): # TODO: infeasible execution def test_new_instance_file(self): - self.create_new_instance_file("./cornflow/tests/data/test_mps.mps") + self.create_new_instance_file(INSTANCE_MPS) def test_new_instance(self): - return self.create_new_instance("./cornflow/tests/data/test_mps.mps") + return self.create_new_instance(INSTANCE_MPS) # TODO: reactivate test with new version of cornflow client which allows to pass # optional arguments for the headers of the request @@ -212,27 +213,45 @@ def test_new_execution(self): def test_new_execution_with_tsp_report(self): return self.create_instance_and_execution_report() + # def test_new_execution_with_tsp_report_wait(self): execution = self.create_instance_and_execution_report() - time.sleep(10) - execution = self.client.raw.get_results(execution["id"]) - id_report = execution["reports"][0]["id"] - my_report = self.client.raw.get_report(id_report) - with open("my_report.html", "wb") as f: - f.write(my_report) - return - # read header of file? we can parse it with beatifulsoup + reports_info = [] + for _ in range(6): + time.sleep(5) + execution_info = self.client.raw.get_results(execution["id"]) + reports_info = execution_info.json()["reports"] + if len(reports_info) > 0: + break + id_report = reports_info[0]["id"] + my_name = "./my_report.html" + self.client.raw.get_one_report(id_report, "./", my_name) + self.assertTrue(os.path.exists(my_name)) + try: + os.remove(my_name) + except OSError: + pass + + # read header of file? we can parse it with beatifulsoup def test_new_execution_with_timer_report_wait(self): - payload = dict(solver="default", schema="timer", data={}, timeLimit=1) + payload = dict(solver="default", schema="timer", data={"a": 1}, timeLimit=1) execution = self.create_instance_and_execution_report(**payload) - time.sleep(5) - execution = self.client.raw.get_results(execution["id"]) - id_report = execution["reports"][0]["id"] - my_report = self.client.raw.get_report(id_report) - with open("my_report.html", "wb") as f: - f.write(my_report) - return + reports_info = [] + for _ in range(6): + time.sleep(5) + execution_info = self.client.raw.get_results(execution["id"]) + reports_info = execution_info.json()["reports"] + if len(reports_info) > 0: + break + id_report = reports_info[0]["id"] + my_name = "./my_report.html" + self.client.raw.get_one_report(id_report, "./", my_name) + self.assertTrue(os.path.exists(my_name)) + try: + os.remove(my_name) + except OSError: + pass def test_delete_execution(self): execution = self.test_new_execution() @@ -258,7 +277,7 @@ def test_get_dag_schema_no_schema(self): self.assertTrue("error" in response.json()) def test_new_execution_bad_dag_name(self): - one_instance = self.create_new_instance("./cornflow/tests/data/test_mps.mps") + one_instance = self.create_new_instance(INSTANCE_MPS) name = "test_execution_name_123" description = "test_execution_description_123" payload = dict( @@ -272,7 +291,7 @@ def test_new_execution_bad_dag_name(self): self.assertRaises(CornFlowApiError, _bad_func) def test_new_execution_with_schema(self): - one_instance = self.create_new_instance("./cornflow/tests/data/test_mps.mps") + one_instance = self.create_new_instance(INSTANCE_MPS) name = "test_execution_name_123" description = "test_execution_description_123" payload = dict( @@ -449,7 +468,7 @@ def test_manual_execution_2(self): self.assertIsNone(execution_data["data"]) def test_edit_one_execution(self): - one_instance = self.create_new_instance("./cornflow/tests/data/test_mps.mps") + one_instance = self.create_new_instance(INSTANCE_MPS) payload = dict( name="bla", config=dict(solver="CBC"), @@ -500,7 +519,7 @@ def test_solve_and_relaunch_too_soon(self): self.assertRaises(CornFlowApiError, _launch_too_soon_func) def test_check_instance(self): - instance = self.create_new_instance("./cornflow/tests/data/test_mps.mps") + instance = self.create_new_instance(INSTANCE_MPS) data_check_execution = self.client.create_instance_data_check(instance["id"]) self.assertEqual(data_check_execution["instance_id"], instance["id"]) status = self.client.get_status(data_check_execution["id"]) diff --git a/libs/client/cornflow_client/airflow/dag_utilities.py b/libs/client/cornflow_client/airflow/dag_utilities.py index 6d8f88e75..87977bf71 100644 --- a/libs/client/cornflow_client/airflow/dag_utilities.py +++ b/libs/client/cornflow_client/airflow/dag_utilities.py @@ -334,8 +334,9 @@ def cf_report( report_config = config.get("report", {}) if not report_config: # no need to write report since it's not requested + print("We did not find a report config") return None - + print("Starting to write the report") execution_data = client.get_data(exec_id) input_data = execution_data["data"] solution_data = execution_data["solution_data"] @@ -348,6 +349,7 @@ def cf_report( app.instance(input_data), app.solution(solution_data) ) report_path = os.path.abspath("./my_report.html") + print("Preparing to write the report") my_experiment.generate_report(report_path=report_path, report_name=report_name) if not os.path.exists(report_path): raise AirflowDagException("The generation of the report failed") @@ -359,6 +361,7 @@ def cf_report( name=report_name, description=report_config.get("description"), ) + print("Saving the report in cornflow") client.create_report(**payload) except CornFlowApiError: raise AirflowDagException("The writing of the report failed") diff --git a/libs/client/cornflow_client/raw_cornflow_client.py b/libs/client/cornflow_client/raw_cornflow_client.py index 4946e4e79..9428fe3c6 100644 --- a/libs/client/cornflow_client/raw_cornflow_client.py +++ b/libs/client/cornflow_client/raw_cornflow_client.py @@ -530,25 +530,26 @@ def create_report(self, name, filename, execution_id, encoding=None, **kwargs): :param str encoding: the type of encoding used in the call. Defaults to 'br' """ with open(filename, "rb") as _file: - payload = ( - dict(file=_file, name=name, execution_id=execution_id, **kwargs), + payload = dict(name=name, execution_id=execution_id, **kwargs) + result = self.create_api( + "report/", + data=payload, + files=dict(file=_file), + encoding=encoding, + headers={"content_type": "multipart/form-data"}, ) - result = self.create_api( - "report/", - data=payload, - encoding=encoding, - headers={"content_type": "multipart/form-data"}, - ) return result @ask_token @prepare_encoding - def get_one_report(self, reference_id, folder_destination, encoding=None): + def get_one_report( + self, reference_id, folder_destination, file_name, encoding=None + ): result = self.get_api_for_id(api="report", id=reference_id, encoding=encoding) - content = result.content() - - file_name = result.headers["Content-Disposition"].split["="][1] + content = result.content + if file_name is None: + file_name = result.headers["Content-Disposition"].split("=")[1] path = os.path.normpath(os.path.join(folder_destination, file_name)) # write content to disk on path From 06dd07cb726540d2ce44db1c01afc8189607b280 Mon Sep 17 00:00:00 2001 From: pchtsp Date: Fri, 19 Jul 2024 10:34:20 +0200 Subject: [PATCH 46/84] added more time to tests for github actions --- .../tests/integration/test_cornflowclient.py | 39 ++++++++++++------- 1 file changed, 24 insertions(+), 15 deletions(-) diff --git a/cornflow-server/cornflow/tests/integration/test_cornflowclient.py b/cornflow-server/cornflow/tests/integration/test_cornflowclient.py index f304a0dcd..eda062d22 100644 --- a/cornflow-server/cornflow/tests/integration/test_cornflowclient.py +++ b/cornflow-server/cornflow/tests/integration/test_cornflowclient.py @@ -26,8 +26,10 @@ from cornflow.tests.const import INSTANCE_PATH, CASE_PATH, INSTANCE_MPS, INSTANCE_TSP from cornflow.tests.custom_liveServer import CustomTestCaseLive +from typing import Callable, Any -def load_file(_file): + +def load_file(_file: str): with open(_file) as f: temp = json.load(f) return temp @@ -216,13 +218,10 @@ def test_new_execution_with_tsp_report(self): # def test_new_execution_with_tsp_report_wait(self): execution = self.create_instance_and_execution_report() - reports_info = [] - for _ in range(6): - time.sleep(5) - execution_info = self.client.raw.get_results(execution["id"]) - reports_info = execution_info.json()["reports"] - if len(reports_info) > 0: - break + + func = lambda: self.client.raw.get_results(execution["id"]).json()["reports"] + condition = lambda v: len(v) > 0 + reports_info = try_until_condition(func, condition, 10, 10) id_report = reports_info[0]["id"] my_name = "./my_report.html" self.client.raw.get_one_report(id_report, "./", my_name) @@ -237,13 +236,9 @@ def test_new_execution_with_tsp_report_wait(self): def test_new_execution_with_timer_report_wait(self): payload = dict(solver="default", schema="timer", data={"a": 1}, timeLimit=1) execution = self.create_instance_and_execution_report(**payload) - reports_info = [] - for _ in range(6): - time.sleep(5) - execution_info = self.client.raw.get_results(execution["id"]) - reports_info = execution_info.json()["reports"] - if len(reports_info) > 0: - break + func = lambda: self.client.raw.get_results(execution["id"]).json()["reports"] + condition = lambda v: len(v) > 0 + reports_info = try_until_condition(func, condition, 10, 10) id_report = reports_info[0]["id"] my_name = "./my_report.html" self.client.raw.get_one_report(id_report, "./", my_name) @@ -567,3 +562,17 @@ def test_check_case(self): api="case", id=case["id"], post_url="data", encoding="br" ).json() self.assertIsNotNone(response["checks"]) + + +def try_until_condition( + func: Callable, + condition: Callable[[Any], bool], + number_of_times: int = 10, + sleep_time: float = 10, +): + for i in range(number_of_times): + time.sleep(sleep_time) + result = func() + if condition(result): + return result + raise TimeoutError("Timed out after {} seconds".format(number_of_times)) From b9a97c5ec9accdb0c0d29cbedad696ea41d9aadb Mon Sep 17 00:00:00 2001 From: pchtsp Date: Wed, 24 Jul 2024 15:15:48 +0200 Subject: [PATCH 47/84] reports now can be created and edited with a file by the service_user and store a state and state_message. The presence of quarto in the system is checked before running the report. The name of the uploaded does not now correspond to the name attribute of the report. Added many unit, integration tests for reports --- .../migrations/versions/83164be03c23_.py | 40 ++++++ cornflow-server/cornflow/models/reports.py | 17 ++- cornflow-server/cornflow/schemas/reports.py | 12 +- cornflow-server/cornflow/shared/const.py | 19 +++ .../tests/integration/test_cornflowclient.py | 63 ++++++++-- .../cornflow/tests/unit/test_reports.py | 118 +++++++++++++++--- .../cornflow_client/airflow/dag_utilities.py | 82 +++++++++--- .../cornflow_client/raw_cornflow_client.py | 57 ++++++--- .../integration/test_airflow_integration.py | 84 ------------- .../integration/test_cornflow_integration.py | 48 ++++--- .../test_raw_cornflow_integration.py | 48 ++++++- 11 files changed, 405 insertions(+), 183 deletions(-) create mode 100644 cornflow-server/cornflow/migrations/versions/83164be03c23_.py delete mode 100644 libs/client/cornflow_client/tests/integration/test_airflow_integration.py diff --git a/cornflow-server/cornflow/migrations/versions/83164be03c23_.py b/cornflow-server/cornflow/migrations/versions/83164be03c23_.py new file mode 100644 index 000000000..02fb8345a --- /dev/null +++ b/cornflow-server/cornflow/migrations/versions/83164be03c23_.py @@ -0,0 +1,40 @@ +"""empty message + +Revision ID: 83164be03c23 +Revises: 96f00d0961d1 +Create Date: 2024-07-23 13:18:47.748324 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '83164be03c23' +down_revision = '96f00d0961d1' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('reports', schema=None) as batch_op: + batch_op.add_column(sa.Column('state', sa.SmallInteger(), nullable=True)) + batch_op.add_column(sa.Column('state_message', sa.TEXT(), nullable=True)) + batch_op.alter_column('file_url', + existing_type=sa.VARCHAR(length=256), + nullable=True) + + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('reports', schema=None) as batch_op: + batch_op.alter_column('file_url', + existing_type=sa.VARCHAR(length=256), + nullable=False) + batch_op.drop_column('state_message') + batch_op.drop_column('state') + + # ### end Alembic commands ### diff --git a/cornflow-server/cornflow/models/reports.py b/cornflow-server/cornflow/models/reports.py index d1eaa2188..5a53b803c 100644 --- a/cornflow-server/cornflow/models/reports.py +++ b/cornflow-server/cornflow/models/reports.py @@ -9,6 +9,7 @@ # Imports from internal modules from cornflow.models.base_data_model import TraceAttributesModel from cornflow.shared import db +from cornflow.shared.const import REPORT_STATE, REPORT_STATE_MSG class ReportModel(TraceAttributesModel): @@ -44,7 +45,9 @@ class ReportModel(TraceAttributesModel): ) name = db.Column(db.String(256), nullable=False) description = db.Column(TEXT, nullable=True) - file_url = db.Column(db.String(256), nullable=False) + file_url = db.Column(db.String(256), nullable=True) + state = db.Column(db.SmallInteger, default=REPORT_STATE.CORRECT) + state_message = db.Column(TEXT, default=REPORT_STATE_MSG[REPORT_STATE.CORRECT]) @declared_attr def user_id(self): @@ -64,6 +67,15 @@ def __init__(self, data: dict): self.name = data.get("name") self.description = data.get("description") self.file_url = data.get("file_url") + self.state = data.get("state") + if self.state is None: + if self.file_url is None: + self.state = REPORT_STATE.UNKNOWN + else: + self.state = REPORT_STATE.CORRECT + self.state_message = data.get("state_message") + if self.state_message is None: + self.state_message = REPORT_STATE_MSG.get(self.state) def update(self, data): """ @@ -73,6 +85,9 @@ def update(self, data): :return: None :rtype: None """ + # we try to keep the state_message synced, by default + if "state" in data and "state_message" not in data: + data["state_message"] = REPORT_STATE_MSG[data["state"]] super().update(data) def update_link(self, file_url: str): diff --git a/cornflow-server/cornflow/schemas/reports.py b/cornflow-server/cornflow/schemas/reports.py index 90158d04a..fd0135e68 100644 --- a/cornflow-server/cornflow/schemas/reports.py +++ b/cornflow-server/cornflow/schemas/reports.py @@ -11,22 +11,30 @@ class QueryFiltersReports(BaseQueryFilters): class ReportSchemaBase(Schema): id = fields.Int(dump_only=True) - file_url = fields.Str(required=True) + file_url = fields.Str(required=False) name = fields.Str(required=True) + state = fields.Int() class ReportSchema(ReportSchemaBase): user_id = fields.Int(required=False, load_only=True) execution_id = fields.Str(required=True) description = fields.Str() + state_message = fields.Str() created_at = fields.DateTime(dump_only=True) updated_at = fields.DateTime(dump_only=True) deleted_at = fields.DateTime(dump_only=True) class ReportEditRequest(Schema): + class META: + unknown = INCLUDE + name = fields.Str() description = fields.Str() + file_url = fields.Str(required=False) + state = fields.Int() + state_message = fields.Str() class ReportRequest(Schema): @@ -36,3 +44,5 @@ class META: name = fields.Str(required=True) description = fields.Str(required=False) execution_id = fields.Str(required=True) + state = fields.Int() + state_message = fields.Str() diff --git a/cornflow-server/cornflow/shared/const.py b/cornflow-server/cornflow/shared/const.py index 20b59ede7..7a81ec94a 100644 --- a/cornflow-server/cornflow/shared/const.py +++ b/cornflow-server/cornflow/shared/const.py @@ -43,6 +43,25 @@ queued=EXEC_STATE_QUEUED, ) +# Reports codes + + +class REPORT_STATE: + RUNNING = 0 + CORRECT = 1 + ERROR = -1 + UNKNOWN = -5 + ERROR_NO_QUARTO = -10 + + +REPORT_STATE_MSG = { + REPORT_STATE.RUNNING: "The report is currently running.", + REPORT_STATE.CORRECT: "The report has been solved correctly.", + REPORT_STATE.ERROR: "The report has an error.", + REPORT_STATE.UNKNOWN: "The report has an unknown error.", + REPORT_STATE.ERROR_NO_QUARTO: "The report failed because Quarto was not found.", +} + # These codes and names are inherited from flask app builder in order to have the same names and values # as this library that is the base of airflow AUTH_DB = 1 diff --git a/cornflow-server/cornflow/tests/integration/test_cornflowclient.py b/cornflow-server/cornflow/tests/integration/test_cornflowclient.py index eda062d22..0751bc6ad 100644 --- a/cornflow-server/cornflow/tests/integration/test_cornflowclient.py +++ b/cornflow-server/cornflow/tests/integration/test_cornflowclient.py @@ -22,6 +22,7 @@ EXEC_STATE_RUNNING, EXEC_STATE_QUEUED, STATUS_HEALTHY, + REPORT_STATE, ) from cornflow.tests.const import INSTANCE_PATH, CASE_PATH, INSTANCE_MPS, INSTANCE_TSP from cornflow.tests.custom_liveServer import CustomTestCaseLive @@ -146,7 +147,12 @@ def create_instance_and_execution(self): return self.create_new_execution(payload) def create_instance_and_execution_report( - self, schema="tsp", solver="cpsat", data=None, timeLimit=10 + self, + schema="tsp", + solver="cpsat", + data=None, + timeLimit=10, + report_name="report", ): name = "test_instance_1" description = "description123" @@ -156,7 +162,9 @@ def create_instance_and_execution_report( one_instance = self.create_new_instance_payload(payload) payload = dict( instance_id=one_instance["id"], - config=dict(solver=solver, timeLimit=timeLimit, report=dict(name="report")), + config=dict( + solver=solver, timeLimit=timeLimit, report=dict(name=report_name) + ), description="test_execution_description_123", name="test_execution_123", schema=schema, @@ -218,11 +226,9 @@ def test_new_execution_with_tsp_report(self): # def test_new_execution_with_tsp_report_wait(self): execution = self.create_instance_and_execution_report() - - func = lambda: self.client.raw.get_results(execution["id"]).json()["reports"] - condition = lambda v: len(v) > 0 - reports_info = try_until_condition(func, condition, 10, 10) - id_report = reports_info[0]["id"] + func = wait_until_report_finishes(self.client, execution["id"]) + reports_info = try_until_condition(func, lambda v: v is not None, 10, 5) + id_report = reports_info["id"] my_name = "./my_report.html" self.client.raw.get_one_report(id_report, "./", my_name) self.assertTrue(os.path.exists(my_name)) @@ -236,10 +242,26 @@ def test_new_execution_with_tsp_report_wait(self): def test_new_execution_with_timer_report_wait(self): payload = dict(solver="default", schema="timer", data={"a": 1}, timeLimit=1) execution = self.create_instance_and_execution_report(**payload) - func = lambda: self.client.raw.get_results(execution["id"]).json()["reports"] - condition = lambda v: len(v) > 0 - reports_info = try_until_condition(func, condition, 10, 10) - id_report = reports_info[0]["id"] + func = wait_until_report_finishes(self.client, execution["id"]) + reports_info = try_until_condition(func, lambda v: v is not None, 10, 5) + id_report = reports_info["id"] + my_name = "./my_report.html" + self.client.raw.get_one_report(id_report, "./", my_name) + self.assertTrue(os.path.exists(my_name)) + try: + os.remove(my_name) + except OSError: + pass + + def test_new_execution_with_tsp_report_error(self): + payload = dict(report_name="wrong_name") + execution = self.create_instance_and_execution_report(**payload) + func = wait_until_report_finishes( + self.client, execution["id"], REPORT_STATE.ERROR + ) + reports_info = try_until_condition(func, lambda v: v is not None, 10, 5) + self.assertEqual(REPORT_STATE.ERROR, reports_info["state"]) + id_report = reports_info["id"] my_name = "./my_report.html" self.client.raw.get_one_report(id_report, "./", my_name) self.assertTrue(os.path.exists(my_name)) @@ -575,4 +597,21 @@ def try_until_condition( result = func() if condition(result): return result - raise TimeoutError("Timed out after {} seconds".format(number_of_times)) + raise TimeoutError( + "Timed out after {} seconds".format(number_of_times * sleep_time) + ) + + +def wait_until_report_finishes( + client, execution_id, report_status=REPORT_STATE.CORRECT +): + def func(): + my_reports = client.raw.get_results(execution_id).json()["reports"] + if len(my_reports) == 0: + return None + first = my_reports[0] + if first["state"] != report_status: + return None + return first + + return func diff --git a/cornflow-server/cornflow/tests/unit/test_reports.py b/cornflow-server/cornflow/tests/unit/test_reports.py index abab00834..4243aedd2 100644 --- a/cornflow-server/cornflow/tests/unit/test_reports.py +++ b/cornflow-server/cornflow/tests/unit/test_reports.py @@ -9,6 +9,7 @@ from flask import current_app from cornflow.models import ReportModel, InstanceModel, ExecutionModel +from cornflow.endpoints.reports import get_report_path from cornflow.tests.const import ( INSTANCE_PATH, REPORT_PATH, @@ -73,9 +74,40 @@ def tearDown(self): pass def test_new_report_html(self): + with open(REPORT_HTML_FILE_PATH, "rb") as _file: + response = self.client.post( + self.url, + data=dict(file=_file, **self.payload), + follow_redirects=True, + headers=self.get_header_with_auth( + self.service_token, content_type="multipart/form-data" + ), + ) + + self.assertEqual(201, response.status_code) + + for key in self.keys_to_check: + self.assertTrue(key in response.json) + + for key, value in self.payload.items(): + self.assertEqual(response.json[key], value) + + # check that the file in the test folder and the one generated on the static fodler are equal + with open(REPORT_HTML_FILE_PATH, "rb") as f: + file = f.read() + + my_upload_path = get_report_path(response.json) + + with open(my_upload_path, "rb") as f: + file2 = f.read() + + self.assertEqual(file, file2) + return response.json + + def test_new_report_empty(self): response = self.client.post( self.url, - data=dict(file=(open(REPORT_HTML_FILE_PATH, "rb")), **self.payload), + data=dict(**self.payload), follow_redirects=True, headers=self.get_header_with_auth( self.service_token, content_type="multipart/form-data" @@ -90,17 +122,13 @@ def test_new_report_html(self): for key, value in self.payload.items(): self.assertEqual(response.json[key], value) - # check that the file in the test folder and the one generated on the static fodler are equal - with open(REPORT_HTML_FILE_PATH, "rb") as f: - file = f.read() + # check that we did not save any file my_upload_path = ( f"{current_app.config['UPLOAD_FOLDER']}/" f"{self.payload['execution_id']}/{self.payload['name']}.html" ) - with open(my_upload_path, "rb") as f: - file2 = f.read() + self.assertFalse(os.path.exists(my_upload_path)) - self.assertEqual(file, file2) return response.json def test_new_report_pdf(self): @@ -124,10 +152,8 @@ def test_new_report_pdf(self): # check that the file in the test folder and the one generated on the static fodler are equal with open(REPORT_PDF_FILE_PATH, "rb") as f: file = f.read() - my_upload_path = ( - f"{current_app.config['UPLOAD_FOLDER']}/" - f"{self.payload['execution_id']}/{self.payload['name']}.pdf" - ) + + my_upload_path = get_report_path(response.json) with open(my_upload_path, "rb") as f: file2 = f.read() @@ -135,9 +161,24 @@ def test_new_report_pdf(self): return response.json def test_new_report_not_allowed(self): - response = self.client.post( - self.url, - data=dict(file=(open(REPORT_HTML_FILE_PATH, "rb")), **self.payload), + with open(REPORT_HTML_FILE_PATH, "rb") as _file: + response = self.client.post( + self.url, + data=dict(file=_file, **self.payload), + follow_redirects=True, + headers=self.get_header_with_auth( + self.token, content_type="multipart/form-data" + ), + ) + + self.assertEqual(response.status_code, 403) + + def test_edit_report_not_allowed(self): + item = self.test_new_report_html() + payload = dict(name="new name2") + response = self.client.put( + f"{self.url}{item['id']}/edit/", + data=payload, follow_redirects=True, headers=self.get_header_with_auth( self.token, content_type="multipart/form-data" @@ -193,9 +234,41 @@ def test_modify_report(self): payload = {"name": "new_name", "description": "some_description"} response = self.client.put( + f"{self.url}{item['id']}/edit/", + headers=self.get_header_with_auth( + self.service_token, content_type="multipart/form-data" + ), + follow_redirects=True, + data=payload, + ) + + self.assertEqual(response.status_code, 200) + + response = self.client.get( + f"{self.url}{item['id']}/", headers=self.get_header_with_auth(self.token) + ) + # response.json + self.assertEqual(200, response.status_code) + self.assertEqual("some_description", dict(response.headers)["File-Description"]) + + def test_modify_report_file(self): + item = self.test_new_report_empty() + + payload = {"name": "new_name", "description": "some_description"} + response = self.client.get( f"{self.url}{item['id']}/", headers=self.get_header_with_auth(self.token), - json=payload, + ) + self.assertEqual(response.status_code, 200) + + self.assertIsNone(response.json["file_url"]) + + response = self.client.put( + f"{self.url}{item['id']}/edit/", + data=dict(file=(open(REPORT_HTML_FILE_PATH, "rb")), **payload), + headers=self.get_header_with_auth( + self.service_token, content_type="multipart/form-data" + ), ) self.assertEqual(response.status_code, 200) @@ -205,9 +278,17 @@ def test_modify_report(self): ) self.assertEqual(200, response.status_code) - self.assertEqual("new_name", dict(response.headers)["File-Name"]) self.assertEqual("some_description", dict(response.headers)["File-Description"]) + # check that the file in the test folder and the one generated on the static folder are equal + with open(REPORT_HTML_FILE_PATH, "rb") as f: + file = f.read() + + content = response.get_data() + + self.assertEqual(file, content) + return response.json + def test_delete_report(self): item = self.test_new_report_html() response = self.client.delete( @@ -217,10 +298,7 @@ def test_delete_report(self): self.assertEqual(200, response.status_code) self.assertTrue("message" in response.json) - my_upload_path = ( - f"{current_app.config['UPLOAD_FOLDER']}/" - f"{self.payload['execution_id']}/{self.payload['name']}.html" - ) + my_upload_path = get_report_path(item) self.assertFalse(os.path.exists(my_upload_path)) diff --git a/libs/client/cornflow_client/airflow/dag_utilities.py b/libs/client/cornflow_client/airflow/dag_utilities.py index 87977bf71..5c11ea0a5 100644 --- a/libs/client/cornflow_client/airflow/dag_utilities.py +++ b/libs/client/cornflow_client/airflow/dag_utilities.py @@ -90,12 +90,33 @@ def connect_to_cornflow(secrets): return airflow_user -def try_to_save_error(client: CornFlow, exec_id: str, state=-1): +def try_to_save_error(client: CornFlow, exec_id: str, state=-1, api="dag/", **kwargs): """ Attempt at saving that the execution failed """ try: - client.raw.put_api_for_id("dag/", id=exec_id, payload=dict(state=state)) + client.raw.put_api_for_id(api, id=exec_id, payload=dict(state=state), **kwargs) + except Exception as e: + print(f"An exception trying to register the failed status: {e}") + + +def try_to_save_error_report( + client: CornFlow, id: int, state=-1, api="report/", **kwargs +): + """ + Attempt at saving that the report failed + It's different than the execution, because put uses form data in reports + """ + try: + client.raw.put_api_for_id( + api, + id=id, + payload=None, + data=dict(state=state), + headers={"content_type": "multipart/form-data"}, + post_url="edit", + **kwargs, + ) except Exception as e: print(f"An exception trying to register the failed status: {e}") @@ -112,7 +133,7 @@ def try_to_save_airflow_log(client: CornFlow, exec_id: str, ti, base_log_folder: with open(log_file, "r") as fd: log_file_txt = fd.read() try: - client.raw.put_api_for_id( + client.put_api_for_id( "dag/", id=exec_id, payload=dict(log_text=log_file_txt) ) except Exception as e: @@ -325,11 +346,17 @@ def cf_report( :return: """ # TODO: if this task fails, the dagrun should still be valid - ti = kwargs["ti"] try: client = connect_to_cornflow(secrets) exec_id = kwargs["dag_run"].conf["exec_id"] + execution_info = client.get_results(exec_id) + # TODO: if execution does not have results, quit: + if execution_info["state"] < 1: + # no need to write report since it's not requested + print("Execution has not been solved") + return None + config = execution_info["config"] report_config = config.get("report", {}) if not report_config: @@ -337,11 +364,25 @@ def cf_report( print("We did not find a report config") return None print("Starting to write the report") + report_name = report_config.get("name", "report") + # we assume the contents of the config match name + description + payload = dict( + execution_id=exec_id, + name=report_name, + description=report_config.get("description"), + state=0, # RUNNING + ) + # we create an empty report + report = client.create_report(**payload) + + def my_try_to_save(state): + return try_to_save_error_report(client, report["id"], state) + + # now we want to fill it with the report file execution_data = client.get_data(exec_id) input_data = execution_data["data"] solution_data = execution_data["solution_data"] - report_name = report_config.get("name", "report") # maybe all of this should be abstracted inside the app? # maybe the app should return an Experiment? experiment = app.get_solver(app.get_default_solver_name()) @@ -349,20 +390,27 @@ def cf_report( app.instance(input_data), app.solution(solution_data) ) report_path = os.path.abspath("./my_report.html") - print("Preparing to write the report") - my_experiment.generate_report(report_path=report_path, report_name=report_name) + if os.path.exists(report_path): + try: + os.remove(report_path) + except: + pass + print(f"Preparing to write the report: {report_name}") + try: + my_experiment.generate_report( + report_path=report_path, report_name=report_name + ) + except ModuleNotFoundError as e: + my_try_to_save(-10) + raise AirflowDagException("The generation of the report failed") + except: + my_try_to_save(-1) + raise AirflowDagException("The generation of the report failed") if not os.path.exists(report_path): + my_try_to_save(-1) raise AirflowDagException("The generation of the report failed") - - # we assume the contents of the config match name + description - payload = dict( - filename=report_path, - execution_id=exec_id, - name=report_name, - description=report_config.get("description"), - ) - print("Saving the report in cornflow") - client.create_report(**payload) + print("Saving the actual report in cornflow") + client.put_one_report(report["id"], payload=dict(state=1), filename=report_path) except CornFlowApiError: raise AirflowDagException("The writing of the report failed") except Exception as e: diff --git a/libs/client/cornflow_client/raw_cornflow_client.py b/libs/client/cornflow_client/raw_cornflow_client.py index 86fa6a729..972c5f215 100644 --- a/libs/client/cornflow_client/raw_cornflow_client.py +++ b/libs/client/cornflow_client/raw_cornflow_client.py @@ -540,30 +540,33 @@ def get_reports(self, params=None, encoding=None): @ask_token @prepare_encoding - def create_report(self, name, filename, execution_id, encoding=None, **kwargs): + def create_report(self, name, execution_id, filename=None, encoding=None, **kwargs): """ Creates a report for an execution :param str execution_id: id for the execution :param str name: the name of the report - :param file filename: the file object with the report (e.g., open(REPORT_FILE_PATH, "rb")) + :param file filename: the file name with the report :param str encoding: the type of encoding used in the call. Defaults to 'br' :param kwargs: optional data to write (description) """ - with open(filename, "rb") as _file: - result = self.create_api( - "report/", - data=dict(name=name, execution_id=execution_id, **kwargs), - files=dict(file=_file), - encoding=encoding, - headers={"content_type": "multipart/form-data"}, - ) + arguments = dict( + api="report/", + data=dict(name=name, execution_id=execution_id, **kwargs), + encoding=encoding, + headers={"content_type": "multipart/form-data"}, + ) + if filename is None: + result = self.create_api(**arguments) + else: + with open(filename, "rb") as _file: + result = self.create_api(**arguments, files=dict(file=_file)) return result @ask_token @prepare_encoding def get_one_report( - self, reference_id, folder_destination, file_name=None, encoding=None + self, reference_id, folder_destination=None, file_name=None, encoding=None ) -> Response: """ Gets one specific report and downloads it to disk @@ -576,8 +579,16 @@ def get_one_report( :rtype: :class:`Response` """ result = self.get_api_for_id(api="report", id=reference_id, encoding=encoding) + # TODO: if the report does not have a file, we just return the result object content = result.content - + if not content: + return result + if result.status_code != 200: + return result + if folder_destination is None: + raise ValueError( + "Argument folder_destination needs to be filled when there's a file" + ) if file_name is None: file_name = result.headers["Content-Disposition"].split("=")[1] path = os.path.normpath(os.path.join(folder_destination, file_name)) @@ -605,18 +616,32 @@ def delete_one_report(self, reference_id, encoding=None): @ask_token @log_call @prepare_encoding - def put_one_report(self, reference_id, payload, encoding=None) -> Response: + def put_one_report( + self, reference_id, payload, filename=None, encoding=None, **kwargs + ) -> Response: """ - Edits one specific report and downloads it to disk + Edits one specific report, potentially uploading a new file :param int reference_id: id of the report to download :param str encoding: the type of encoding used in the call. Defaults to 'br' :return: the response object :rtype: :class:`Response` """ - return self.put_api_for_id( - api="report", id=reference_id, payload=payload, encoding=encoding + arguments = dict( + api="report/", + id=reference_id, + payload=None, + data=payload, + encoding=encoding, + headers={"content_type": "multipart/form-data"}, + post_url="edit", ) + if filename is None: + result = self.put_api_for_id(**arguments) + else: + with open(filename, "rb") as _file: + result = self.put_api_for_id(**arguments, files=dict(file=_file)) + return result @ask_token @prepare_encoding diff --git a/libs/client/cornflow_client/tests/integration/test_airflow_integration.py b/libs/client/cornflow_client/tests/integration/test_airflow_integration.py deleted file mode 100644 index 3348fd96f..000000000 --- a/libs/client/cornflow_client/tests/integration/test_airflow_integration.py +++ /dev/null @@ -1,84 +0,0 @@ -""" - -""" -import json -import os -import time -from unittest import TestCase - -from cornflow_client import CornFlow -from cornflow_client.airflow.api import Airflow -from cornflow_client.constants import STATUS_OPTIMAL, STATUS_NOT_SOLVED -from cornflow_client.tests.const import PULP_EXAMPLE - -# Constants -path_to_tests_dir = os.path.dirname(os.path.abspath(__file__)) - - -# Helper functions -def _load_file(_file): - with open(_file) as f: - temp = json.load(f) - return temp - - -def _get_file(relative_path): - return os.path.join(path_to_tests_dir, relative_path) - - -class TestAirflowClient(TestCase): - def setUp(self): - self.client = Airflow(url="http://127.0.0.1:8080", user="admin", pwd="admin") - - def test_alive(self): - self.assertTrue(self.client.is_alive()) - - def test_connect_from_config(self): - client = Airflow.from_config( - { - "AIRFLOW_URL": "http://127.0.0.1:8080", - "AIRFLOW_USER": "admin", - "AIRFLOW_PWD": "admin", - } - ) - self.assertTrue(client.is_alive()) - - def test_bad_connection(self): - client = Airflow(url="http://127.0.0.1:8088", user="admin", pwd="admin!") - self.assertFalse(client.is_alive()) - - def test_update_schemas(self): - response = self.client.update_schemas() - self.assertEqual(200, response.status_code) - - def test_update_dag_registry(self): - response = self.client.update_dag_registry() - self.assertEqual(200, response.status_code) - - def test_run_dag(self): - data = _load_file(PULP_EXAMPLE) - cf_client = CornFlow(url="http://127.0.0.1:5050/") - cf_login = cf_client.login("admin", "Adminpassword1!") - instance = cf_client.create_instance(data, "test_example", "test_description") - execution = cf_client.create_execution( - instance_id=instance["id"], - config={"solver": "PULP_CBC_CMD", "timeLimit": 100}, - name="test_execution", - description="execution_description", - schema="solve_model_dag", - run=False, - ) - - # Check that execution is not run - status = cf_client.get_status(execution_id=execution["id"]) - self.assertEqual(-4, status["state"]) - - # Run the execution - response = self.client.run_dag(execution_id=execution["id"]) - self.assertEqual(200, response.status_code) - self.assertIn("dag_run_id", response.json().keys()) - - # Check that is optimal - time.sleep(10) - status = cf_client.get_status(execution_id=execution["id"]) - self.assertEqual(1, status["state"]) diff --git a/libs/client/cornflow_client/tests/integration/test_cornflow_integration.py b/libs/client/cornflow_client/tests/integration/test_cornflow_integration.py index d78e6e4d9..a936f50a7 100644 --- a/libs/client/cornflow_client/tests/integration/test_cornflow_integration.py +++ b/libs/client/cornflow_client/tests/integration/test_cornflow_integration.py @@ -688,12 +688,30 @@ def test_post_report_html(self): run=False, ) - response = self.client.create_report("new_report", HTML_REPORT, execution["id"]) + response = self.client.create_report("new_report", execution["id"], HTML_REPORT) self.assertEqual(response["execution_id"], execution["id"]) return response + def test_put_one_report(self): + response = self.test_post_report_html() + report_id = response["id"] + + client = CornFlow(url="http://127.0.0.1:5050/") + _ = client.login("user", "UserPassword1!") + + payload = {"name": "new_name", "description": "some_description"} + self.client.put_one_report(reference_id=report_id, payload=payload) + content, headers = client.get_one_report( + reference_id=report_id, folder_destination=TEST_FOLDER + ) + + self.assertEqual(headers["File-Description"], payload["description"]) + self.assertNotEqual(headers["File-Description"], "") + + _ = client.delete_one_report(reference_id=report_id) + def test_get_one_report(self): response = self.test_post_report_html() report_id = response["id"] @@ -705,11 +723,11 @@ def test_get_one_report(self): reference_id=report_id, folder_destination=TEST_FOLDER ) - self.assertEqual(headers["File-Name"], response["name"]) self.assertEqual(headers["File-Description"], response["description"]) # read from TEST FOLDER - with open(os.path.join(TEST_FOLDER, "new_report.html"), "r") as f: + my_file = os.path.join(TEST_FOLDER, response["file_url"]) + with open(my_file, "r") as f: file = f.read() # read from test/data folder @@ -719,7 +737,7 @@ def test_get_one_report(self): self.assertEqual(file, file_2) # remove file from TEST_FOLDER - os.remove(os.path.join(TEST_FOLDER, "new_report.html")) + os.remove(my_file) def test_get_all_reports(self): report_1 = self.test_post_report_html()["id"] @@ -735,28 +753,6 @@ def test_get_all_reports(self): client.delete_one_report(reference_id=report_1) client.delete_one_report(reference_id=report_2) - def test_put_one_report(self): - response = self.test_post_report_html() - report_id = response["id"] - - client = CornFlow(url="http://127.0.0.1:5050/") - _ = client.login("user", "UserPassword1!") - - payload = {"name": "new_name", "description": "some_description"} - - _ = client.put_one_report(reference_id=report_id, payload=payload) - - content, headers = client.get_one_report( - reference_id=report_id, folder_destination=TEST_FOLDER - ) - - self.assertEqual(headers["File-Name"], payload["name"]) - self.assertEqual(headers["File-Description"], payload["description"]) - self.assertNotEqual(headers["File-Name"], "new_report") - self.assertNotEqual(headers["File-Description"], "") - - _ = client.delete_one_report(reference_id=report_id) - def test_delete_one_report(self): response = self.test_post_report_html() report_id = response["id"] diff --git a/libs/client/cornflow_client/tests/integration/test_raw_cornflow_integration.py b/libs/client/cornflow_client/tests/integration/test_raw_cornflow_integration.py index f0a32fae7..84597f02e 100644 --- a/libs/client/cornflow_client/tests/integration/test_raw_cornflow_integration.py +++ b/libs/client/cornflow_client/tests/integration/test_raw_cornflow_integration.py @@ -776,7 +776,7 @@ def test_raises_post_deployed_dag(self): config_schema=dict(), ) - def test_post_report_html(self): + def test_post_report_html(self, uploadFile=True): client = CornFlow(url="http://127.0.0.1:5050/") _ = client.login("user", "UserPassword1!") @@ -794,10 +794,12 @@ def test_post_report_html(self): schema="solve_model_dag", run=False, ).json() - - response = self.client.raw.create_report( - "new_report", HTML_REPORT, execution["id"] - ) + if uploadFile: + response = self.client.raw.create_report( + "new_report", execution["id"], HTML_REPORT + ) + else: + response = self.client.raw.create_report("new_report", execution["id"]) self.assertEqual(response.status_code, 201) @@ -844,7 +846,7 @@ def test_get_all_reports(self): client.raw.delete_one_report(reference_id=report_2) def test_put_one_report(self): - response = self.test_post_report_html() + response = self.test_post_report_html(uploadFile=False) report_id = response.json()["id"] client = CornFlow(url="http://127.0.0.1:5050/") @@ -868,6 +870,40 @@ def test_put_one_report(self): delete = client.raw.delete_one_report(reference_id=report_id) self.assertEqual(delete.status_code, 200) + def test_put_one_report_file(self): + response = self.test_post_report_html() + report_id = response.json()["id"] + + client = CornFlow(url="http://127.0.0.1:5050/") + _ = client.login("user", "UserPassword1!") + + with open(HTML_REPORT, "rb") as _file: + payload = {"name": "new_name", "description": "some_description"} + response = client.raw.put_one_report( + reference_id=report_id, + payload=payload, + files=dict(file=_file), + headers={"content_type": "multipart/form-data"}, + ) + + response = client.raw.put_one_report( + reference_id=report_id, payload=payload + ) + + self.assertEqual(response.status_code, 200) + + new_report = client.raw.get_one_report( + reference_id=report_id, folder_destination=TEST_FOLDER + ) + + self.assertEqual(new_report.headers["File-Name"], payload["name"]) + self.assertEqual(new_report.headers["File-Description"], payload["description"]) + self.assertNotEqual(new_report.headers["File-Name"], "new_report") + self.assertNotEqual(new_report.headers["File-Description"], "") + + delete = client.raw.delete_one_report(reference_id=report_id) + self.assertEqual(delete.status_code, 200) + def test_delete_one_report(self): response = self.test_post_report_html() report_id = response.json()["id"] From 82369572ff1cde618ca6f98baceac26331b3f75c Mon Sep 17 00:00:00 2001 From: pchtsp Date: Wed, 24 Jul 2024 15:17:07 +0200 Subject: [PATCH 48/84] (previous was an incomplete commit) --- .github/workflows/test_cornflow_server.yml | 7 + cornflow-dags/DAG/tsp/core/experiment.py | 10 +- cornflow-dags/DAG/tsp/core/instance.py | 2 +- cornflow-dags/tests/test_dags.py | 96 +++++++- cornflow-server/cornflow/config.py | 5 - .../cornflow/endpoints/__init__.py | 7 +- cornflow-server/cornflow/endpoints/reports.py | 216 +++++++++++------- 7 files changed, 249 insertions(+), 94 deletions(-) diff --git a/.github/workflows/test_cornflow_server.yml b/.github/workflows/test_cornflow_server.yml index 6769fd2e2..68ef5af8e 100644 --- a/.github/workflows/test_cornflow_server.yml +++ b/.github/workflows/test_cornflow_server.yml @@ -52,6 +52,13 @@ jobs: uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} + - name: Set up Quarto + uses: quarto-dev/quarto-actions/setup@v2 + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + # To install LaTeX to build PDF book + tinytex: true - name: Copy DAG files run: | cd .. diff --git a/cornflow-dags/DAG/tsp/core/experiment.py b/cornflow-dags/DAG/tsp/core/experiment.py index 45a4e618e..277d6300a 100644 --- a/cornflow-dags/DAG/tsp/core/experiment.py +++ b/cornflow-dags/DAG/tsp/core/experiment.py @@ -7,7 +7,7 @@ from .solution import Solution import json, tempfile -from quarto import render +import quarto class Experiment(ExperimentCore): @@ -51,8 +51,6 @@ def get_objective(self) -> float: # if solution is empty, we return 0 if len(self.solution.data["route"]) == 0: return 0 - # we get a sorted list of nodes by position - arcs = self.solution.get_used_arcs() # we sum all arc weights in the solution return sum(self.get_used_arc_weights().values()) @@ -93,13 +91,17 @@ def generate_report(self, report_path: str, report_name="report") -> None: if not os.path.exists(path_to_qmd): raise FileNotFoundError(f"Report with path {path_to_qmd} does not exist.") path_to_output = path_without_ext + ".html" + try: + quarto.quarto.find_quarto() + except FileNotFoundError: + raise ModuleNotFoundError("Quarto is not installed.") with tempfile.TemporaryDirectory() as tmp: path = os.path.join(tmp, "experiment.json") # write a json with instance and solution to temp file self.to_json(path) # pass the path to the report to render # it generates a report with path = path_to_output - render(input=path_to_qmd, execute_params=dict(file_name=path)) + quarto.render(input=path_to_qmd, execute_params=dict(file_name=path)) # quarto always writes the report in the .qmd directory. # thus, we need to move it where we want to: os.replace(path_to_output, report_path) diff --git a/cornflow-dags/DAG/tsp/core/instance.py b/cornflow-dags/DAG/tsp/core/instance.py index 2f82110ee..ab86ad885 100644 --- a/cornflow-dags/DAG/tsp/core/instance.py +++ b/cornflow-dags/DAG/tsp/core/instance.py @@ -64,7 +64,7 @@ def to_tsplib95(self): def get_arcs(self) -> TupList: return self.data["arcs"] - def get_indexed_arcs(self) -> TupList: + def get_indexed_arcs(self) -> SuperDict: return self.data["arcs"].to_dict( result_col=None, indices=["n1", "n2"], is_list=False ) diff --git a/cornflow-dags/tests/test_dags.py b/cornflow-dags/tests/test_dags.py index 24d82f669..c9528021a 100644 --- a/cornflow-dags/tests/test_dags.py +++ b/cornflow-dags/tests/test_dags.py @@ -17,7 +17,11 @@ sys.modules["airflow.secrets.environment_variables"] = mymodule from cornflow_client import SchemaManager, ApplicationCore -from cornflow_client.airflow.dag_utilities import cf_solve +from cornflow_client.airflow.dag_utilities import ( + cf_solve, + cf_report, + AirflowDagException, +) from jsonschema import Draft7Validator from pytups import SuperDict @@ -193,6 +197,16 @@ def test_report(self): pass self.assertRaises(StopIteration, parser.feed, content) + def test_report_error(self): + tests = self.app.test_cases + my_experim = self.app.solvers["cpsat"](self.app.instance(tests[0]["instance"])) + my_experim.solve(dict()) + report_path = "./my_report.html" + my_fun = my_experim.generate_report( + report_path=report_path, report_name="wrong_name" + ) + self.assertRaises(FileNotFoundError, my_fun) + def test_export(self): tests = self.app.test_cases my_file_path = "export.json" @@ -203,6 +217,86 @@ def test_export(self): except FileNotFoundError: pass + @patch("cornflow_client.airflow.dag_utilities.connect_to_cornflow") + def test_complete_report(self, connectCornflow, config=None): + config = config or self.config + config = dict(**config, report=dict(name="report")) + tests = self.app.test_cases + for test_case in tests: + instance_data = test_case.get("instance") + solution_data = test_case.get("solution", None) + if solution_data is None: + solution_data = dict(route=[]) + + mock = Mock() + mock.get_data.return_value = dict( + data=instance_data, solution_data=solution_data + ) + mock.get_results.return_value = dict(config=config, state=1) + mock.create_report.return_value = dict(id=1) + connectCornflow.return_value = mock + dag_run = Mock() + dag_run.conf = dict(exec_id="exec_id") + cf_report(app=self.app, secrets="", dag_run=dag_run) + mock.create_report.assert_called_once() + mock.put_one_report.assert_called_once() + + @patch("cornflow_client.airflow.dag_utilities.connect_to_cornflow") + def test_complete_report_wrong_data(self, connectCornflow, config=None): + config = config or self.config + config = dict(**config, report=dict(name="report")) + tests = self.app.test_cases + for test_case in tests: + instance_data = test_case.get("instance") + solution_data = None + + mock = Mock() + mock.get_data.return_value = dict( + data=instance_data, solution_data=solution_data + ) + mock.get_results.return_value = dict(config=config, state=1) + mock.create_report.return_value = dict(id=1) + connectCornflow.return_value = mock + dag_run = Mock() + dag_run.conf = dict(exec_id="exec_id") + my_report = lambda: cf_report(app=self.app, secrets="", dag_run=dag_run) + self.assertRaises(AirflowDagException, my_report) + mock.create_report.assert_called_once() + mock.raw.put_api_for_id.assert_called_once() + args, kwargs = mock.raw.put_api_for_id.call_args + self.assertEqual(kwargs["data"], {"state": -1}) + + @patch("quarto.render") + @patch("cornflow_client.airflow.dag_utilities.connect_to_cornflow") + def test_complete_report_no_quarto(self, connectCornflow, render, config=None): + config = config or self.config + config = dict(**config, report=dict(name="report")) + tests = self.app.test_cases + render.side_effect = ModuleNotFoundError() + render.return_value = dict(a=1) + for test_case in tests: + instance_data = test_case.get("instance") + solution_data = test_case.get("solution", None) + if solution_data is None: + solution_data = dict(route=[]) + + mock = Mock() + mock.get_data.return_value = dict( + data=instance_data, + solution_data=solution_data, + ) + mock.get_results.return_value = dict(config=config, state=1) + mock.create_report.return_value = dict(id=1) + connectCornflow.return_value = mock + dag_run = Mock() + dag_run.conf = dict(exec_id="exec_id") + my_report = lambda: cf_report(app=self.app, secrets="", dag_run=dag_run) + self.assertRaises(AirflowDagException, my_report) + mock.create_report.assert_called_once() + mock.raw.put_api_for_id.assert_called_once() + args, kwargs = mock.raw.put_api_for_id.call_args + self.assertEqual(kwargs["data"], {"state": -10}) + class Vrp(BaseDAGTests.SolvingTests): def setUp(self): diff --git a/cornflow-server/cornflow/config.py b/cornflow-server/cornflow/config.py index 627b19c39..102e005f7 100644 --- a/cornflow-server/cornflow/config.py +++ b/cornflow-server/cornflow/config.py @@ -95,7 +95,6 @@ class Development(DefaultConfig): """ """ ENV = "development" - UPLOAD_FOLDER = os.getenv("UPLOAD_FOLDER", "/usr/src/app/static") class Testing(DefaultConfig): @@ -115,10 +114,6 @@ class Testing(DefaultConfig): AIRFLOW_PWD = os.getenv("AIRFLOW_PWD", "admin") OPEN_DEPLOYMENT = 1 LOG_LEVEL = int(os.getenv("LOG_LEVEL", 10)) - UPLOAD_FOLDER = os.getenv( - "UPLOAD_FOLDER", - os.path.abspath(os.path.join(os.path.dirname(__file__), "./static")), - ) class Production(DefaultConfig): diff --git a/cornflow-server/cornflow/endpoints/__init__.py b/cornflow-server/cornflow/endpoints/__init__.py index 6028d7d8b..341ed81a7 100644 --- a/cornflow-server/cornflow/endpoints/__init__.py +++ b/cornflow-server/cornflow/endpoints/__init__.py @@ -38,7 +38,7 @@ ExecutionRelaunchEndpoint, ) -from .reports import ReportEndpoint, ReportDetailsEndpoint +from .reports import ReportEndpoint, ReportDetailsEndpoint, ReportDetailsEditEndpoint from .health import HealthEndpoint from .instance import ( InstanceEndpoint, @@ -224,6 +224,11 @@ urls="/report//", endpoint="report-detail", ), + dict( + resource=ReportDetailsEditEndpoint, + urls="/report//edit/", + endpoint="report-detail-edit", + ), dict(resource=ReportEndpoint, urls="/report/", endpoint="report"), ] diff --git a/cornflow-server/cornflow/endpoints/reports.py b/cornflow-server/cornflow/endpoints/reports.py index b2d46421d..66b8f7f4f 100644 --- a/cornflow-server/cornflow/endpoints/reports.py +++ b/cornflow-server/cornflow/endpoints/reports.py @@ -7,6 +7,7 @@ from flask import current_app, request, send_from_directory from flask_apispec import marshal_with, use_kwargs, doc from werkzeug.utils import secure_filename +import uuid from cornflow.endpoints.meta_resource import BaseMetaResource from cornflow.models import ExecutionModel, ReportModel @@ -75,49 +76,24 @@ def post(self, **kwargs): if execution is None: raise ObjectDoesNotExist("The execution does not exist") - if "file" not in request.files: - return {"message": "No file part"}, 400 - - file = request.files["file"] - filename = secure_filename(file.filename) - filename_extension = filename.split(".")[-1] - - if filename_extension not in current_app.config["ALLOWED_EXTENSIONS"]: - return { - "message": f"Invalid file extension. " - f"Valid extensions are: {current_app.config['ALLOWED_EXTENSIONS']}" - }, 400 - - my_directory = f"{current_app.config['UPLOAD_FOLDER']}/{execution.id}" - - # we create a directory for the execution - if not os.path.exists(my_directory): - current_app.logger.info(f"Creating directory {my_directory}") - os.mkdir(my_directory) + # we're creating an empty report. + # which is possible + report = ReportModel(get_report_info(kwargs, execution, None)) - report_name = f"{secure_filename(kwargs['name'])}.{filename_extension}" - - save_path = os.path.normpath(os.path.join(my_directory, report_name)) + report.save() + return report, 201 - if "static" not in save_path or ".." in save_path: - raise NoPermission("Invalid file name") + file = request.files["file"] + report_name = new_file_name(file) - report = ReportModel( - { - "name": kwargs["name"], - "file_url": save_path, - "execution_id": kwargs["execution_id"], - "user_id": execution.user_id, - "description": kwargs.get("description", ""), - } - ) + report = ReportModel(get_report_info(kwargs, execution, report_name)) report.save() + # We try to save the file, if an error is raised then we delete the record on the database try: - # We try to save the file, if an error is raised then we delete the record on the database - file.save(save_path) + write_file(file, execution.id, report_name) return report, 201 except Exception as error: @@ -137,39 +113,13 @@ def __init__(self): self.foreign_data = {"execution_id": ExecutionModel} -class ReportDetailsEndpoint(ReportDetailsEndpointBase): - @doc(description="Get details of a report", tags=["Reports"], inherit=False) - @authenticate(auth_class=Auth()) - @marshal_with(ReportSchema) - @BaseMetaResource.get_data_or_404 - def get(self, idx): - """ - API method to get a report created by the user and its related info. - It requires authentication to be passed in the form of a token that has to be linked to - an existing session (login) made by a user. - - :param str idx: ID of the report. - :return: A dictionary with a message (error if authentication failed, or the report does not exist or - the data of the report) and an integer with the HTTP status code. - :rtype: Tuple(dict, integer) - """ - current_app.logger.info(f"User {self.get_user()} gets details of report {idx}") - report = self.get_detail(user=self.get_user(), idx=idx) - if report is None: - raise ObjectDoesNotExist - - directory, file = report.file_url.split(report.name) - file = f"{report.name}{file}" - directory = directory[:-1] +class ReportDetailsEditEndpoint(ReportDetailsEndpointBase): - response = send_from_directory(directory, file) - response.headers["File-Description"] = report.description - response.headers["File-Name"] = report.name - return response + ROLES_WITH_ACCESS = [SERVICE_ROLE] @doc(description="Edit a report", tags=["Reports"], inherit=False) @authenticate(auth_class=Auth()) - @use_kwargs(ReportEditRequest, location="json") + @use_kwargs(ReportEditRequest, location="form") def put(self, idx, **data): """ Edit an existing report @@ -179,34 +129,82 @@ def put(self, idx, **data): a message) and an integer with the HTTP status code. :rtype: Tuple(dict, integer) """ + # TODO: forbid non-service users from running put current_app.logger.info(f"User {self.get_user()} edits report {idx}") - report = self.get_detail(user=self.get_user(), idx=idx) + report = self.get_detail(idx=idx) + + if "file" not in request.files: + # we're creating an empty report. + # which is possible + report.update(data) + report.save() + return {"message": "Updated correctly"}, 200 + + # there's two cases, + # (1) the report already has a file + # (2) the report doesn't yet have a file + file = request.files["file"] + report_name = new_file_name(file) + old_name = report.file_url + # we update the report with the new content, including the new name + report.update(dict(**data, file_url=report_name)) + # We try to save the file, if an error is raised then we delete the record on the database try: - if report.name != data["name"]: - directory, file = report.file_url.split(report.name) + write_file(file, report.execution_id, report_name) + report.save() - new_location = ( - f"{os.path.join(directory, secure_filename(data['name']))}{file}" - ) - old_location = report.file_url + except Exception as error: + # we do not save the report + current_app.logger.error(error) + raise FileError(error=str(error)) - current_app.logger.debug(f"Old location: {old_location}") - current_app.logger.debug(f"New location: {new_location}") + # if it saves correctly, we delete the old file, if exists + # if unsuccessful, we still return 201 but log the error + if old_name is not None: + try: + os.remove(get_report_path(report)) + except OSError as error: + current_app.logger.error(error) + return {"message": "Updated correctly"}, 200 - os.rename(old_location, new_location) - data["file_url"] = new_location - except Exception as error: - current_app.logger.error(error) - return {"error": "Error moving file"}, 400 +class ReportDetailsEndpoint(ReportDetailsEndpointBase): + @doc(description="Get details of a report", tags=["Reports"], inherit=False) + @authenticate(auth_class=Auth()) + @marshal_with(ReportSchema) + @BaseMetaResource.get_data_or_404 + def get(self, idx): + """ + API method to get a report created by the user and its related info. + It requires authentication to be passed in the form of a token that has to be linked to + an existing session (login) made by a user. - report.update(data) + :param str idx: ID of the report. + :return: A dictionary with a message (error if authentication failed, or the report does not exist or + the data of the report) and an integer with the HTTP status code. + :rtype: Tuple(dict, integer) + """ + # TODO: are we able to download the name in the database and not as part of the file? + current_app.logger.info(f"User {self.get_user()} gets details of report {idx}") + report = self.get_detail(user=self.get_user(), idx=idx) - report.save() + if report is None: + print("error") + raise ObjectDoesNotExist - return {"message": "Updated correctly"}, 200 + # if there's no file, we do not return it: + if report.file_url is None: + return report, 200 + + my_dir = get_report_dir(report.execution_id) + print(my_dir) + print(report.file_url) + response = send_from_directory(my_dir, report.file_url) + response.headers["File-Description"] = report.description + response.headers["File-Name"] = report.file_url + return response @doc(description="Delete a report", tags=["Reports"], inherit=False) @authenticate(auth_class=Auth()) @@ -229,6 +227,60 @@ def delete(self, idx): raise ObjectDoesNotExist # delete file - os.remove(os.path.join(report.file_url)) + os.remove(get_report_path(report)) return self.delete_detail(user_id=self.get_user_id(), idx=idx) + + +def get_report_dir(execution_id): + return f"{current_app.config['UPLOAD_FOLDER']}/{execution_id}" + + +def get_report_path(report): + try: + return f"{get_report_dir(report['execution_id'])}/{report['file_url']}" + except: + return f"{get_report_dir(report.execution_id)}/{report.file_url}" + + +def new_file_name(file): + + filename = secure_filename(file.filename) + filename_extension = filename.split(".")[-1] + + if filename_extension not in current_app.config["ALLOWED_EXTENSIONS"]: + return { + "message": f"Invalid file extension. " + f"Valid extensions are: {current_app.config['ALLOWED_EXTENSIONS']}" + }, 400 + + report_name = f"{uuid.uuid4().hex}.{filename_extension}" + + return report_name + + +def write_file(file, execution_id, file_name): + my_directory = get_report_dir(execution_id) + + # we create a directory for the execution + if not os.path.exists(my_directory): + current_app.logger.info(f"Creating directory {my_directory}") + os.mkdir(my_directory) + + save_path = os.path.normpath(os.path.join(my_directory, file_name)) + + if "static" not in save_path or ".." in save_path: + raise NoPermission("Invalid file name") + file.save(save_path) + + +def get_report_info(data, execution, file_url=None): + return { + "name": data["name"], + "file_url": file_url, + "execution_id": execution.id, + "user_id": execution.user_id, + "description": data.get("description", ""), + "state": data.get("state"), + "state_message": data.get("state_message"), + } From 862f8c458bf9e38b7f3121e46f2f39b846592101 Mon Sep 17 00:00:00 2001 From: pchtsp Date: Wed, 24 Jul 2024 15:39:20 +0200 Subject: [PATCH 49/84] minor fixes to pass failing tests --- cornflow-dags/tests/test_dags.py | 2 +- cornflow-server/cornflow/endpoints/reports.py | 3 --- .../tests/integration/test_cornflowclient.py | 15 ++++++++++++++- cornflow-server/cornflow/tests/unit/test_cli.py | 2 +- .../client/cornflow_client/raw_cornflow_client.py | 13 +++++++++---- 5 files changed, 25 insertions(+), 10 deletions(-) diff --git a/cornflow-dags/tests/test_dags.py b/cornflow-dags/tests/test_dags.py index c9528021a..9239b96f8 100644 --- a/cornflow-dags/tests/test_dags.py +++ b/cornflow-dags/tests/test_dags.py @@ -202,7 +202,7 @@ def test_report_error(self): my_experim = self.app.solvers["cpsat"](self.app.instance(tests[0]["instance"])) my_experim.solve(dict()) report_path = "./my_report.html" - my_fun = my_experim.generate_report( + my_fun = lambda: my_experim.generate_report( report_path=report_path, report_name="wrong_name" ) self.assertRaises(FileNotFoundError, my_fun) diff --git a/cornflow-server/cornflow/endpoints/reports.py b/cornflow-server/cornflow/endpoints/reports.py index 66b8f7f4f..58d202366 100644 --- a/cornflow-server/cornflow/endpoints/reports.py +++ b/cornflow-server/cornflow/endpoints/reports.py @@ -191,7 +191,6 @@ def get(self, idx): report = self.get_detail(user=self.get_user(), idx=idx) if report is None: - print("error") raise ObjectDoesNotExist # if there's no file, we do not return it: @@ -199,8 +198,6 @@ def get(self, idx): return report, 200 my_dir = get_report_dir(report.execution_id) - print(my_dir) - print(report.file_url) response = send_from_directory(my_dir, report.file_url) response.headers["File-Description"] = report.description response.headers["File-Name"] = report.file_url diff --git a/cornflow-server/cornflow/tests/integration/test_cornflowclient.py b/cornflow-server/cornflow/tests/integration/test_cornflowclient.py index 0751bc6ad..643ff350f 100644 --- a/cornflow-server/cornflow/tests/integration/test_cornflowclient.py +++ b/cornflow-server/cornflow/tests/integration/test_cornflowclient.py @@ -230,6 +230,10 @@ def test_new_execution_with_tsp_report_wait(self): reports_info = try_until_condition(func, lambda v: v is not None, 10, 5) id_report = reports_info["id"] my_name = "./my_report.html" + try: + os.remove(my_name) + except: + pass self.client.raw.get_one_report(id_report, "./", my_name) self.assertTrue(os.path.exists(my_name)) try: @@ -246,6 +250,10 @@ def test_new_execution_with_timer_report_wait(self): reports_info = try_until_condition(func, lambda v: v is not None, 10, 5) id_report = reports_info["id"] my_name = "./my_report.html" + try: + os.remove(my_name) + except: + pass self.client.raw.get_one_report(id_report, "./", my_name) self.assertTrue(os.path.exists(my_name)) try: @@ -263,8 +271,13 @@ def test_new_execution_with_tsp_report_error(self): self.assertEqual(REPORT_STATE.ERROR, reports_info["state"]) id_report = reports_info["id"] my_name = "./my_report.html" + try: + os.remove(my_name) + except: + pass self.client.raw.get_one_report(id_report, "./", my_name) - self.assertTrue(os.path.exists(my_name)) + # if we did not write a file, we should not have it: + self.assertFalse(os.path.exists(my_name)) try: os.remove(my_name) except OSError: diff --git a/cornflow-server/cornflow/tests/unit/test_cli.py b/cornflow-server/cornflow/tests/unit/test_cli.py index 8633959ef..3596d52d6 100644 --- a/cornflow-server/cornflow/tests/unit/test_cli.py +++ b/cornflow-server/cornflow/tests/unit/test_cli.py @@ -20,7 +20,7 @@ class CLITests(TestCase): def setUp(self): db.create_all() - self.number_of_views = 51 + self.number_of_views = 52 self.number_of_permissions = 569 if int(os.getenv("CF_ALARMS_ENDPOINT")) != 1: self.number_of_views = 49 diff --git a/libs/client/cornflow_client/raw_cornflow_client.py b/libs/client/cornflow_client/raw_cornflow_client.py index 972c5f215..476ff1220 100644 --- a/libs/client/cornflow_client/raw_cornflow_client.py +++ b/libs/client/cornflow_client/raw_cornflow_client.py @@ -579,12 +579,17 @@ def get_one_report( :rtype: :class:`Response` """ result = self.get_api_for_id(api="report", id=reference_id, encoding=encoding) - # TODO: if the report does not have a file, we just return the result object - content = result.content - if not content: - return result if result.status_code != 200: return result + + # if it returned a json: we did not get a report + # we just return the result "as is". + try: + content = result.json() + return result + except requests.exceptions.JSONDecodeError: + # if it fails, we assume there's an html file + content = result.content if folder_destination is None: raise ValueError( "Argument folder_destination needs to be filled when there's a file" From fe26094c8a61cec2e4f2eb9a4d12f855766e476f Mon Sep 17 00:00:00 2001 From: pchtsp Date: Wed, 24 Jul 2024 15:58:20 +0200 Subject: [PATCH 50/84] another failing test --- cornflow-server/cornflow/tests/unit/test_cli.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/cornflow-server/cornflow/tests/unit/test_cli.py b/cornflow-server/cornflow/tests/unit/test_cli.py index 3596d52d6..3133f8af8 100644 --- a/cornflow-server/cornflow/tests/unit/test_cli.py +++ b/cornflow-server/cornflow/tests/unit/test_cli.py @@ -21,10 +21,10 @@ class CLITests(TestCase): def setUp(self): db.create_all() self.number_of_views = 52 - self.number_of_permissions = 569 - if int(os.getenv("CF_ALARMS_ENDPOINT")) != 1: - self.number_of_views = 49 - self.number_of_permissions = 514 + self.number_of_permissions = 574 + if int(os.getenv("CF_ALARMS_ENDPOINT", 0)) != 1: + self.number_of_views = 50 + self.number_of_permissions = 519 def tearDown(self): db.session.remove() From eba17e4a0be725fce1dec040932421c1a3218176 Mon Sep 17 00:00:00 2001 From: pchtsp Date: Wed, 24 Jul 2024 18:03:29 +0200 Subject: [PATCH 51/84] fixed some errors in tests. some print statements for debugging --- .../tests/integration/test_cornflowclient.py | 8 +++-- .../integration/test_cornflow_integration.py | 6 +++- .../test_raw_cornflow_integration.py | 35 +++++++------------ 3 files changed, 23 insertions(+), 26 deletions(-) diff --git a/cornflow-server/cornflow/tests/integration/test_cornflowclient.py b/cornflow-server/cornflow/tests/integration/test_cornflowclient.py index 643ff350f..19ebfedba 100644 --- a/cornflow-server/cornflow/tests/integration/test_cornflowclient.py +++ b/cornflow-server/cornflow/tests/integration/test_cornflowclient.py @@ -227,7 +227,7 @@ def test_new_execution_with_tsp_report(self): def test_new_execution_with_tsp_report_wait(self): execution = self.create_instance_and_execution_report() func = wait_until_report_finishes(self.client, execution["id"]) - reports_info = try_until_condition(func, lambda v: v is not None, 10, 5) + reports_info = try_until_condition(func, lambda v: v is not None, 20, 5) id_report = reports_info["id"] my_name = "./my_report.html" try: @@ -247,7 +247,7 @@ def test_new_execution_with_timer_report_wait(self): payload = dict(solver="default", schema="timer", data={"a": 1}, timeLimit=1) execution = self.create_instance_and_execution_report(**payload) func = wait_until_report_finishes(self.client, execution["id"]) - reports_info = try_until_condition(func, lambda v: v is not None, 10, 5) + reports_info = try_until_condition(func, lambda v: v is not None, 20, 5) id_report = reports_info["id"] my_name = "./my_report.html" try: @@ -267,7 +267,7 @@ def test_new_execution_with_tsp_report_error(self): func = wait_until_report_finishes( self.client, execution["id"], REPORT_STATE.ERROR ) - reports_info = try_until_condition(func, lambda v: v is not None, 10, 5) + reports_info = try_until_condition(func, lambda v: v is not None, 20, 5) self.assertEqual(REPORT_STATE.ERROR, reports_info["state"]) id_report = reports_info["id"] my_name = "./my_report.html" @@ -621,9 +621,11 @@ def wait_until_report_finishes( def func(): my_reports = client.raw.get_results(execution_id).json()["reports"] if len(my_reports) == 0: + print("no reports") return None first = my_reports[0] if first["state"] != report_status: + print(f"report state: {first['state']}") return None return first diff --git a/libs/client/cornflow_client/tests/integration/test_cornflow_integration.py b/libs/client/cornflow_client/tests/integration/test_cornflow_integration.py index a936f50a7..60fd79b1c 100644 --- a/libs/client/cornflow_client/tests/integration/test_cornflow_integration.py +++ b/libs/client/cornflow_client/tests/integration/test_cornflow_integration.py @@ -719,6 +719,11 @@ def test_get_one_report(self): client = CornFlow(url="http://127.0.0.1:5050/") _ = client.login("user", "UserPassword1!") + my_file = os.path.join(TEST_FOLDER, response["file_url"]) + try: + os.remove(my_file) + except OSError: + pass content, headers = client.get_one_report( reference_id=report_id, folder_destination=TEST_FOLDER ) @@ -726,7 +731,6 @@ def test_get_one_report(self): self.assertEqual(headers["File-Description"], response["description"]) # read from TEST FOLDER - my_file = os.path.join(TEST_FOLDER, response["file_url"]) with open(my_file, "r") as f: file = f.read() diff --git a/libs/client/cornflow_client/tests/integration/test_raw_cornflow_integration.py b/libs/client/cornflow_client/tests/integration/test_raw_cornflow_integration.py index 84597f02e..46a99b9cb 100644 --- a/libs/client/cornflow_client/tests/integration/test_raw_cornflow_integration.py +++ b/libs/client/cornflow_client/tests/integration/test_raw_cornflow_integration.py @@ -812,13 +812,14 @@ def test_get_one_report(self): client = CornFlow(url="http://127.0.0.1:5050/") _ = client.login("user", "UserPassword1!") + file_name = "new_report.html" response = client.raw.get_one_report( - reference_id=report_id, folder_destination=TEST_FOLDER + reference_id=report_id, folder_destination=TEST_FOLDER, file_name=file_name ) self.assertEqual(response.status_code, 200) # read from TEST FOLDER - with open(os.path.join(TEST_FOLDER, "new_report.html"), "r") as f: + with open(os.path.join(TEST_FOLDER, file_name), "r") as f: file = f.read() # read from test/data folder @@ -828,7 +829,7 @@ def test_get_one_report(self): self.assertEqual(file, file_2) # remove file from TEST_FOLDER - os.remove(os.path.join(TEST_FOLDER, "new_report.html")) + os.remove(os.path.join(TEST_FOLDER, file_name)) def test_get_all_reports(self): report_1 = self.test_post_report_html().json()["id"] @@ -846,7 +847,7 @@ def test_get_all_reports(self): client.raw.delete_one_report(reference_id=report_2) def test_put_one_report(self): - response = self.test_post_report_html(uploadFile=False) + response = self.test_post_report_html() report_id = response.json()["id"] client = CornFlow(url="http://127.0.0.1:5050/") @@ -854,7 +855,9 @@ def test_put_one_report(self): payload = {"name": "new_name", "description": "some_description"} - response = client.raw.put_one_report(reference_id=report_id, payload=payload) + response = self.client.raw.put_one_report( + reference_id=report_id, payload=payload + ) self.assertEqual(response.status_code, 200) @@ -862,33 +865,23 @@ def test_put_one_report(self): reference_id=report_id, folder_destination=TEST_FOLDER ) - self.assertEqual(new_report.headers["File-Name"], payload["name"]) self.assertEqual(new_report.headers["File-Description"], payload["description"]) - self.assertNotEqual(new_report.headers["File-Name"], "new_report") self.assertNotEqual(new_report.headers["File-Description"], "") delete = client.raw.delete_one_report(reference_id=report_id) self.assertEqual(delete.status_code, 200) def test_put_one_report_file(self): - response = self.test_post_report_html() + response = self.test_post_report_html(uploadFile=False) report_id = response.json()["id"] client = CornFlow(url="http://127.0.0.1:5050/") _ = client.login("user", "UserPassword1!") - with open(HTML_REPORT, "rb") as _file: - payload = {"name": "new_name", "description": "some_description"} - response = client.raw.put_one_report( - reference_id=report_id, - payload=payload, - files=dict(file=_file), - headers={"content_type": "multipart/form-data"}, - ) - - response = client.raw.put_one_report( - reference_id=report_id, payload=payload - ) + payload = {"name": "new_name", "description": "some_description"} + response = self.client.raw.put_one_report( + reference_id=report_id, payload=payload, filename=HTML_REPORT + ) self.assertEqual(response.status_code, 200) @@ -896,9 +889,7 @@ def test_put_one_report_file(self): reference_id=report_id, folder_destination=TEST_FOLDER ) - self.assertEqual(new_report.headers["File-Name"], payload["name"]) self.assertEqual(new_report.headers["File-Description"], payload["description"]) - self.assertNotEqual(new_report.headers["File-Name"], "new_report") self.assertNotEqual(new_report.headers["File-Description"], "") delete = client.raw.delete_one_report(reference_id=report_id) From 9452cbc44946644b71d168c1e715e475dd135020 Mon Sep 17 00:00:00 2001 From: Guillermo Gonzalez-Santander Date: Fri, 26 Jul 2024 10:21:20 +0200 Subject: [PATCH 52/84] Changes to Dockerfile toc reate the reports folder on the container --- cornflow-server/Dockerfile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/cornflow-server/Dockerfile b/cornflow-server/Dockerfile index bbf9bc501..0c7e62f04 100644 --- a/cornflow-server/Dockerfile +++ b/cornflow-server/Dockerfile @@ -30,13 +30,13 @@ ENV PYTHONUNBUFFERED 1 # install dependencies RUN pip install --upgrade pip -RUN pip install "cornflow==${CORNFLOW_VERSION}" +RUN pip install "cornflow@git+https://github.com/baobabsoluciones/cornflow@tsp_report#subdirectory=cornflow-server" # create folder for logs RUN mkdir -p /usr/src/app/log -# create folder for object storage +# create folder for static content RUN mkdir -p /usr/src/app/static # create folder for custom ssh keys From ecdebffe1ff259441bcbd4acf5a0c176912f09de Mon Sep 17 00:00:00 2001 From: Guillermo Gonzalez-Santander Date: Fri, 26 Jul 2024 12:07:49 +0200 Subject: [PATCH 53/84] Modified workflows and order of installation of cornflow-client dependency --- .github/workflows/test_cornflow_client.yml | 3 ++- .github/workflows/test_cornflow_server.yml | 3 ++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/.github/workflows/test_cornflow_client.yml b/.github/workflows/test_cornflow_client.yml index 07a82601a..4c3daf88b 100644 --- a/.github/workflows/test_cornflow_client.yml +++ b/.github/workflows/test_cornflow_client.yml @@ -91,8 +91,9 @@ jobs: CONSTRAINT_URL="https://raw.githubusercontent.com/apache/airflow/constraints-${AIRFLOW_VERSION}/constraints-${PYTHON_VERSION}.txt" python -m pip install "apache-airflow==${AIRFLOW_VERSION}" --constraint "${CONSTRAINT_URL}" python -m pip install -U orloge pulp - python -m pip install -U "git+https://github.com/baobabsoluciones/cornflow@${CLIENT_BRANCH}#subdirectory=libs/client" python -m pip install -U -r airflow_config/requirements.txt + python -m pip uninstall cornflow-client -y + python -m pip install -U "git+https://github.com/baobabsoluciones/cornflow@${CLIENT_BRANCH}#subdirectory=libs/client" airflow db init airflow users create -u admin -f admin -l admin -r Admin -p admin -e admin@example.org airflow webserver -p 8080 & diff --git a/.github/workflows/test_cornflow_server.yml b/.github/workflows/test_cornflow_server.yml index 68ef5af8e..5f3f00558 100644 --- a/.github/workflows/test_cornflow_server.yml +++ b/.github/workflows/test_cornflow_server.yml @@ -86,8 +86,9 @@ jobs: python -m pip install "apache-airflow==${AIRFLOW_VERSION}" --constraint "${CONSTRAINT_URL}" python -m pip install -U orloge pulp python -m pip uninstall cornflow-client -y - python -m pip install -U "git+https://github.com/baobabsoluciones/cornflow@${CLIENT_BRANCH}#subdirectory=libs/client" python -m pip install -U -r airflow_config/requirements.txt + python -m pip uninstall cornflow-client -y + python -m pip install -U "git+https://github.com/baobabsoluciones/cornflow@${CLIENT_BRANCH}#subdirectory=libs/client" airflow db init airflow users create \ --username admin \ From 8a017c95b19a0e8ca45cc5103f5c42317b877e8c Mon Sep 17 00:00:00 2001 From: Guillermo Gonzalez-Santander Date: Fri, 26 Jul 2024 12:09:04 +0200 Subject: [PATCH 54/84] Undo change on DOCKERFILE --- cornflow-server/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cornflow-server/Dockerfile b/cornflow-server/Dockerfile index 0c7e62f04..7db8eae21 100644 --- a/cornflow-server/Dockerfile +++ b/cornflow-server/Dockerfile @@ -30,7 +30,7 @@ ENV PYTHONUNBUFFERED 1 # install dependencies RUN pip install --upgrade pip -RUN pip install "cornflow@git+https://github.com/baobabsoluciones/cornflow@tsp_report#subdirectory=cornflow-server" +RUN pip install "cornflow==${CORNFLOW_VERSION}" # create folder for logs From 45e29881f7dcdde66ca137c47699dbd31a20044b Mon Sep 17 00:00:00 2001 From: Guillermo Gonzalez-Santander Date: Fri, 26 Jul 2024 12:52:42 +0200 Subject: [PATCH 55/84] Debug log to have the name on the test --- .../cornflow/tests/integration/test_cornflowclient.py | 1 + 1 file changed, 1 insertion(+) diff --git a/cornflow-server/cornflow/tests/integration/test_cornflowclient.py b/cornflow-server/cornflow/tests/integration/test_cornflowclient.py index 19ebfedba..1212d0df6 100644 --- a/cornflow-server/cornflow/tests/integration/test_cornflowclient.py +++ b/cornflow-server/cornflow/tests/integration/test_cornflowclient.py @@ -40,6 +40,7 @@ class TestCornflowClientBasic(CustomTestCaseLive): def setUp(self, create_all=False): super().setUp() self.items_to_check = ["name", "description"] + log.debug(f"Start test case name: {self.id()}") def check_status_evolution(self, execution, end_state=EXEC_STATE_CORRECT): statuses = [execution["state"]] From 4ca157b98482f2bebf099db0a45e42e1520ff11e Mon Sep 17 00:00:00 2001 From: Guillermo Gonzalez-Santander Date: Fri, 26 Jul 2024 13:01:47 +0200 Subject: [PATCH 56/84] Activated debug mode on testing configuration --- .github/workflows/test_cornflow_server.yml | 2 +- cornflow-server/cornflow/config.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/test_cornflow_server.yml b/.github/workflows/test_cornflow_server.yml index 5f3f00558..078a057a9 100644 --- a/.github/workflows/test_cornflow_server.yml +++ b/.github/workflows/test_cornflow_server.yml @@ -168,7 +168,7 @@ jobs: FLASK_ENV: testing OPEN_DEPLOYMENT: 1 DATABASE_URL: sqlite:///cornflow_test.db - LOG_LEVEL: 30 + LOG_LEVEL: 10 - name: Upload coverage to codecov uses: codecov/codecov-action@v3 with: diff --git a/cornflow-server/cornflow/config.py b/cornflow-server/cornflow/config.py index 102e005f7..bfd0fe761 100644 --- a/cornflow-server/cornflow/config.py +++ b/cornflow-server/cornflow/config.py @@ -102,7 +102,7 @@ class Testing(DefaultConfig): ENV = "testing" SQLALCHEMY_TRACK_MODIFICATIONS = False - DEBUG = False + DEBUG = True TESTING = True PROPAGATE_EXCEPTIONS = True SECRET_TOKEN_KEY = "TESTINGSECRETKEY" From 65eafb251858b6b45a950bbb424c9309aa20b2df Mon Sep 17 00:00:00 2001 From: Guillermo Gonzalez-Santander Date: Fri, 26 Jul 2024 13:43:13 +0200 Subject: [PATCH 57/84] Added error logs on log in --- cornflow-dags/DAG/dag_timer.py | 1 - cornflow-server/cornflow/endpoints/login.py | 32 +++++++++++++++---- .../tests/integration/test_cornflowclient.py | 4 ++- 3 files changed, 29 insertions(+), 8 deletions(-) diff --git a/cornflow-dags/DAG/dag_timer.py b/cornflow-dags/DAG/dag_timer.py index 3058f3da5..82c4e986e 100644 --- a/cornflow-dags/DAG/dag_timer.py +++ b/cornflow-dags/DAG/dag_timer.py @@ -36,7 +36,6 @@ def check_solution(self, *args, **kwargs): return dict() def generate_report(self, report_path: str, report_name="report") -> None: - html = ET.Element("html") body = ET.Element("body") html.append(body) diff --git a/cornflow-server/cornflow/endpoints/login.py b/cornflow-server/cornflow/endpoints/login.py index 4bae849d4..cea0b7254 100644 --- a/cornflow-server/cornflow/endpoints/login.py +++ b/cornflow-server/cornflow/endpoints/login.py @@ -34,6 +34,7 @@ class LoginBaseEndpoint(BaseMetaResource): """ Base endpoint to perform a login action from a user """ + def __init__(self): super().__init__() self.ldap_class = LDAPBase @@ -63,6 +64,7 @@ def log_in(self, **kwargs): try: token = self.auth_class.generate_token(user.id) except Exception as e: + current_app.logger.error(f"Error in generating user token: {str(e)}") raise InvalidUsage(f"Error in generating user token: {str(e)}", 400) response.update({"token": token, "id": user.id}) @@ -81,9 +83,11 @@ def auth_db_authenticate(self, username, password): user = self.data_model.get_one_object(username=username) if not user: + current_app.logger.error(f"Error on login user does not exist") raise InvalidCredentials() if not user.check_hash(password): + current_app.logger.error(f"Error on login invalid credentials") raise InvalidCredentials() return user @@ -102,7 +106,9 @@ def auth_ldap_authenticate(self, username, password): raise InvalidCredentials() user = self.data_model.get_one_object(username=username) if not user: - current_app.logger.info(f"LDAP user {username} does not exist and is created") + current_app.logger.info( + f"LDAP user {username} does not exist and is created" + ) email = ldap_obj.get_user_email(username) if not email: email = "" @@ -122,10 +128,14 @@ def auth_ldap_authenticate(self, username, password): except IntegrityError as e: db.session.rollback() - current_app.logger.error(f"Integrity error on user role assignment on log in: {e}") + current_app.logger.error( + f"Integrity error on user role assignment on log in: {e}" + ) except DBAPIError as e: db.session.rollback() - current_app.logger.error(f"Unknown error on user role assignment on log in: {e}") + current_app.logger.error( + f"Unknown error on user role assignment on log in: {e}" + ) return user @@ -163,7 +173,9 @@ def auth_oid_authenticate(self, token): user = self.data_model.get_one_object(username=username) if not user: - current_app.logger.info(f"OpenID user {username} does not exist and is created") + current_app.logger.info( + f"OpenID user {username} does not exist and is created" + ) data = {"username": username, "email": username} @@ -183,7 +195,11 @@ def auth_oid_authenticate(self, token): def check_last_password_change(user): if user.pwd_last_change: - if user.pwd_last_change + timedelta(days=int(current_app.config["PWD_ROTATION_TIME"])) < datetime.utcnow(): + if ( + user.pwd_last_change + + timedelta(days=int(current_app.config["PWD_ROTATION_TIME"])) + < datetime.utcnow() + ): return True return False @@ -210,7 +226,11 @@ def post(self, **kwargs): :rtype: Tuple(dict, integer) """ - return self.log_in(**kwargs) + try: + return self.log_in(**kwargs) + except Exception as e: + current_app.logger.error(f"Final exception: {str(e)}") + raise e class LoginOpenAuthEndpoint(LoginBaseEndpoint): diff --git a/cornflow-server/cornflow/tests/integration/test_cornflowclient.py b/cornflow-server/cornflow/tests/integration/test_cornflowclient.py index 1212d0df6..246f6389b 100644 --- a/cornflow-server/cornflow/tests/integration/test_cornflowclient.py +++ b/cornflow-server/cornflow/tests/integration/test_cornflowclient.py @@ -245,7 +245,9 @@ def test_new_execution_with_tsp_report_wait(self): # read header of file? we can parse it with beatifulsoup def test_new_execution_with_timer_report_wait(self): - payload = dict(solver="default", schema="timer", data={"a": 1}, timeLimit=1) + payload = dict( + solver="default", schema="timer", data={"seconds": 1}, timeLimit=1 + ) execution = self.create_instance_and_execution_report(**payload) func = wait_until_report_finishes(self.client, execution["id"]) reports_info = try_until_condition(func, lambda v: v is not None, 20, 5) From de3f6364d34cc521a7932cb42e21d54395b0f0ea Mon Sep 17 00:00:00 2001 From: Guillermo Gonzalez-Santander Date: Fri, 26 Jul 2024 14:08:31 +0200 Subject: [PATCH 58/84] Changed name of service user on cornflow integration testing yaml --- .github/workflows/test_cornflow_server.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test_cornflow_server.yml b/.github/workflows/test_cornflow_server.yml index 078a057a9..a9a02c56c 100644 --- a/.github/workflows/test_cornflow_server.yml +++ b/.github/workflows/test_cornflow_server.yml @@ -108,7 +108,7 @@ jobs: AIRFLOW__API__AUTH_BACKEND: airflow.api.auth.backend.basic_auth AIRFLOW__WEBSERVER__SECRET_KEY: e9adafa751fd35adfc1fdd3285019be15eea0758f76e38e1e37a1154fb36 AIRFLOW__CORE__LOAD_EXAMPLES: 0 - AIRFLOW_CONN_CF_URI: http://airflow:Airflow_test_password1@localhost:5050 + AIRFLOW_CONN_CF_URI: http://service_user:Airflow_test_password1@localhost:5050 - name: Run unit tests run: | coverage run --source=./cornflow/ --rcfile=./.coveragerc -m unittest discover -s cornflow/tests/unit From ec91924420ab259341e31f62514729e0689e8aad Mon Sep 17 00:00:00 2001 From: Guillermo Gonzalez-Santander Date: Fri, 26 Jul 2024 16:16:28 +0200 Subject: [PATCH 59/84] Some small changes to tests to have them more organized --- .github/workflows/test_cornflow_server.yml | 2 +- ...om_liveServer.py => custom_live_server.py} | 0 .../tests/integration/test_commands.py | 2 +- .../tests/integration/test_cornflowclient.py | 109 ++++++++---------- cornflow-server/requirements.txt | 2 +- 5 files changed, 49 insertions(+), 66 deletions(-) rename cornflow-server/cornflow/tests/{custom_liveServer.py => custom_live_server.py} (100%) diff --git a/.github/workflows/test_cornflow_server.yml b/.github/workflows/test_cornflow_server.yml index a9a02c56c..b082b1abf 100644 --- a/.github/workflows/test_cornflow_server.yml +++ b/.github/workflows/test_cornflow_server.yml @@ -168,7 +168,7 @@ jobs: FLASK_ENV: testing OPEN_DEPLOYMENT: 1 DATABASE_URL: sqlite:///cornflow_test.db - LOG_LEVEL: 10 + LOG_LEVEL: 30 - name: Upload coverage to codecov uses: codecov/codecov-action@v3 with: diff --git a/cornflow-server/cornflow/tests/custom_liveServer.py b/cornflow-server/cornflow/tests/custom_live_server.py similarity index 100% rename from cornflow-server/cornflow/tests/custom_liveServer.py rename to cornflow-server/cornflow/tests/custom_live_server.py diff --git a/cornflow-server/cornflow/tests/integration/test_commands.py b/cornflow-server/cornflow/tests/integration/test_commands.py index 192874191..b8c0c6e72 100644 --- a/cornflow-server/cornflow/tests/integration/test_commands.py +++ b/cornflow-server/cornflow/tests/integration/test_commands.py @@ -3,7 +3,7 @@ from cornflow.commands.dag import register_deployed_dags_command from cornflow.models import DeployedDAG from cornflow.tests.const import PUBLIC_DAGS -from cornflow.tests.custom_liveServer import CustomTestCaseLive +from cornflow.tests.custom_live_server import CustomTestCaseLive class TestCornflowCommands(CustomTestCaseLive): diff --git a/cornflow-server/cornflow/tests/integration/test_cornflowclient.py b/cornflow-server/cornflow/tests/integration/test_cornflowclient.py index 246f6389b..0bde8310b 100644 --- a/cornflow-server/cornflow/tests/integration/test_cornflowclient.py +++ b/cornflow-server/cornflow/tests/integration/test_cornflowclient.py @@ -1,20 +1,18 @@ """ - +Main script to run the integration tests of cornflow-server """ -# Full imports -import json -import os -import pulp +import json import logging as log +import os import time +from typing import Callable, Any -# Imports from environment +import pulp from cornflow_client import CornFlowApiError from cornflow_client.constants import INSTANCE_SCHEMA, SOLUTION_SCHEMA -# Import internal modules from cornflow.app import create_app from cornflow.shared.const import ( EXEC_STATE_CORRECT, @@ -25,9 +23,7 @@ REPORT_STATE, ) from cornflow.tests.const import INSTANCE_PATH, CASE_PATH, INSTANCE_MPS, INSTANCE_TSP -from cornflow.tests.custom_liveServer import CustomTestCaseLive - -from typing import Callable, Any +from cornflow.tests.custom_live_server import CustomTestCaseLive def load_file(_file: str): @@ -40,7 +36,7 @@ class TestCornflowClientBasic(CustomTestCaseLive): def setUp(self, create_all=False): super().setUp() self.items_to_check = ["name", "description"] - log.debug(f"Start test case name: {self.id()}") + log.info(f"Start test case name: {self.id()}") def check_status_evolution(self, execution, end_state=EXEC_STATE_CORRECT): statuses = [execution["state"]] @@ -189,6 +185,39 @@ def create_timer_instance_and_execution(self, seconds=5): ) return self.create_new_execution(payload) + @staticmethod + def try_until_condition( + func: Callable, + condition: Callable[[Any], bool], + number_of_times: int = 10, + sleep_time: float = 10, + ): + for i in range(number_of_times): + time.sleep(sleep_time) + result = func() + if condition(result): + return result + raise TimeoutError( + "Timed out after {} seconds".format(number_of_times * sleep_time) + ) + + @staticmethod + def wait_until_report_finishes( + client, execution_id, report_status=REPORT_STATE.CORRECT + ): + def func(): + my_reports = client.raw.get_results(execution_id).json()["reports"] + if len(my_reports) == 0: + print("no reports") + return None + first = my_reports[0] + if first["state"] != report_status: + print(f"report state: {first['state']}") + return None + return first + + return func + class TestCornflowClientOpen(TestCornflowClientBasic): # TODO: user management @@ -227,8 +256,8 @@ def test_new_execution_with_tsp_report(self): # def test_new_execution_with_tsp_report_wait(self): execution = self.create_instance_and_execution_report() - func = wait_until_report_finishes(self.client, execution["id"]) - reports_info = try_until_condition(func, lambda v: v is not None, 20, 5) + func = self.wait_until_report_finishes(self.client, execution["id"]) + reports_info = self.try_until_condition(func, lambda v: v is not None, 20, 5) id_report = reports_info["id"] my_name = "./my_report.html" try: @@ -249,8 +278,8 @@ def test_new_execution_with_timer_report_wait(self): solver="default", schema="timer", data={"seconds": 1}, timeLimit=1 ) execution = self.create_instance_and_execution_report(**payload) - func = wait_until_report_finishes(self.client, execution["id"]) - reports_info = try_until_condition(func, lambda v: v is not None, 20, 5) + func = self.wait_until_report_finishes(self.client, execution["id"]) + reports_info = self.try_until_condition(func, lambda v: v is not None, 20, 5) id_report = reports_info["id"] my_name = "./my_report.html" try: @@ -267,10 +296,10 @@ def test_new_execution_with_timer_report_wait(self): def test_new_execution_with_tsp_report_error(self): payload = dict(report_name="wrong_name") execution = self.create_instance_and_execution_report(**payload) - func = wait_until_report_finishes( + func = self.wait_until_report_finishes( self.client, execution["id"], REPORT_STATE.ERROR ) - reports_info = try_until_condition(func, lambda v: v is not None, 20, 5) + reports_info = self.try_until_condition(func, lambda v: v is not None, 20, 5) self.assertEqual(REPORT_STATE.ERROR, reports_info["state"]) id_report = reports_info["id"] my_name = "./my_report.html" @@ -406,19 +435,6 @@ class TestCornflowClientAdmin(TestCornflowClientBasic): def setUp(self, create_all=False): super().setUp() - # we create a service user: - self.create_service_user( - dict(username="airflow", pwd="Airflow_test_password1", email="af@cf.com") - ) - - self.create_service_user( - dict( - username="service_user@cornflow.com", - pwd="Serviceuser_1234", - email="service_user@cornflow.com", - ) - ) - # we create an admin user # we guarantee that the admin is there for airflow self.client.token = self.create_admin( @@ -600,36 +616,3 @@ def test_check_case(self): api="case", id=case["id"], post_url="data", encoding="br" ).json() self.assertIsNotNone(response["checks"]) - - -def try_until_condition( - func: Callable, - condition: Callable[[Any], bool], - number_of_times: int = 10, - sleep_time: float = 10, -): - for i in range(number_of_times): - time.sleep(sleep_time) - result = func() - if condition(result): - return result - raise TimeoutError( - "Timed out after {} seconds".format(number_of_times * sleep_time) - ) - - -def wait_until_report_finishes( - client, execution_id, report_status=REPORT_STATE.CORRECT -): - def func(): - my_reports = client.raw.get_results(execution_id).json()["reports"] - if len(my_reports) == 0: - print("no reports") - return None - first = my_reports[0] - if first["state"] != report_status: - print(f"report state: {first['state']}") - return None - return first - - return func diff --git a/cornflow-server/requirements.txt b/cornflow-server/requirements.txt index 493278e62..ff1cdca59 100644 --- a/cornflow-server/requirements.txt +++ b/cornflow-server/requirements.txt @@ -22,7 +22,7 @@ ldap3<=2.9.1 marshmallow<=3.19.0 PuLP<=2.7.0 psycopg2<=2.95 -PyJWT<=2.6.0 +PyJWT>=2.0.0,<=2.6.0 pytups>=0.86.2 requests<=2.31.0 SQLAlchemy==1.3.21 From 94e871c5c8b2c6833acd7370a1354b67cb5415a0 Mon Sep 17 00:00:00 2001 From: pchtsp Date: Mon, 29 Jul 2024 08:58:04 +0200 Subject: [PATCH 60/84] take out prints --- .../cornflow/tests/integration/test_cornflowclient.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/cornflow-server/cornflow/tests/integration/test_cornflowclient.py b/cornflow-server/cornflow/tests/integration/test_cornflowclient.py index 0bde8310b..be297d2a1 100644 --- a/cornflow-server/cornflow/tests/integration/test_cornflowclient.py +++ b/cornflow-server/cornflow/tests/integration/test_cornflowclient.py @@ -2,7 +2,6 @@ Main script to run the integration tests of cornflow-server """ - import json import logging as log import os @@ -203,16 +202,14 @@ def try_until_condition( @staticmethod def wait_until_report_finishes( - client, execution_id, report_status=REPORT_STATE.CORRECT + client, execution_id: str, report_status=REPORT_STATE.CORRECT ): def func(): my_reports = client.raw.get_results(execution_id).json()["reports"] if len(my_reports) == 0: - print("no reports") return None first = my_reports[0] if first["state"] != report_status: - print(f"report state: {first['state']}") return None return first From fb529b03f6b72bb3ad2fb57c92b29d1bab7d5a8a Mon Sep 17 00:00:00 2001 From: pchtsp Date: Mon, 29 Jul 2024 09:11:16 +0200 Subject: [PATCH 61/84] update swagger REST API doc --- cornflow-server/cornflow/static/v1.json | 5891 ++++++++++++++--------- 1 file changed, 3543 insertions(+), 2348 deletions(-) diff --git a/cornflow-server/cornflow/static/v1.json b/cornflow-server/cornflow/static/v1.json index 86b2017cc..4340639bd 100644 --- a/cornflow-server/cornflow/static/v1.json +++ b/cornflow-server/cornflow/static/v1.json @@ -1,2659 +1,3854 @@ { - "definitions": { - "ActionsResponse": { - "properties": { - "id": { - "type": "integer" - }, - "name": { - "readOnly": true - } - }, - "type": "object" - }, - "ApiViewResponse": { - "properties": { - "description": { - "type": "string" - }, - "id": { - "type": "integer" - }, - "name": { - "readOnly": true - }, - "url_rule": { - "type": "string" - } - }, - "type": "object" - }, - "CaseBase": { - "properties": { - "checks": {}, - "created_at": { - "format": "date-time", - "type": "string" - }, - "data": {}, - "data_hash": { - "type": "string" - }, - "dependents": { - "items": { - "type": "integer" + "paths": { + "/instance/": { + "get": { + "parameters": [ + { + "in": "query", + "name": "limit", + "type": "integer", + "required": false }, - "type": "array" - }, - "description": { - "type": "string" - }, - "id": { - "type": "integer" - }, - "is_dir": {}, - "name": { - "type": "string" - }, - "path": { - "type": "string" - }, - "schema": { - "type": "string" - }, - "solution": {}, - "solution_checks": {}, - "solution_hash": { - "type": "string" - }, - "updated_at": { - "format": "date-time", - "type": "string" + { + "in": "query", + "name": "offset", + "type": "integer", + "required": false + }, + { + "in": "query", + "name": "creation_date_gte", + "type": "string", + "format": "date-time", + "required": false + }, + { + "in": "query", + "name": "creation_date_lte", + "type": "string", + "format": "date-time", + "required": false + }, + { + "in": "query", + "name": "id", + "type": "string", + "required": false + }, + { + "in": "query", + "name": "name", + "type": "string", + "required": false + }, + { + "in": "query", + "name": "deletion_date_gte", + "type": "string", + "format": "date-time", + "required": false + }, + { + "in": "query", + "name": "deletion_date_lte", + "type": "string", + "format": "date-time", + "required": false + }, + { + "in": "query", + "name": "update_date_gte", + "type": "string", + "format": "date-time", + "required": false + }, + { + "in": "query", + "name": "update_date_lte", + "type": "string", + "format": "date-time", + "required": false + }, + { + "in": "query", + "name": "user_name", + "type": "string", + "required": false + }, + { + "in": "query", + "name": "last_name", + "type": "string", + "required": false + }, + { + "in": "query", + "name": "email", + "type": "string", + "required": false + }, + { + "in": "query", + "name": "role_id", + "type": "integer", + "required": false + }, + { + "in": "query", + "name": "url_rule", + "type": "string", + "required": false + }, + { + "in": "query", + "name": "description", + "type": "string", + "required": false + }, + { + "in": "query", + "name": "schema", + "type": "string", + "required": false + } + ], + "responses": { + "default": { + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/InstanceEndpointResponse" + } + }, + "description": "" + } }, - "user_id": { - "type": "integer" - } + "description": "Get all instances", + "tags": [ + "Instances" + ] }, - "type": "object" + "post": { + "parameters": [], + "responses": {}, + "description": "Create an instance", + "tags": [ + "Instances" + ] + } }, - "CaseCompareResponse": { - "properties": { - "data_patch": { - "items": { - "$ref": "#/definitions/PatchOperation" - }, - "type": "array" + "/instance/{idx}/": { + "get": { + "parameters": [ + { + "in": "path", + "name": "idx", + "required": true, + "type": "string" + } + ], + "responses": { + "default": { + "schema": { + "$ref": "#/definitions/InstanceDetailsEndpointResponse" + }, + "description": "" + } }, - "solution_patch": { - "items": { - "$ref": "#/definitions/PatchOperation" + "description": "Get one instance", + "tags": [ + "Instances" + ] + }, + "put": { + "parameters": [ + { + "in": "body", + "required": false, + "name": "body", + "schema": { + "$ref": "#/definitions/InstanceEditRequest" + } }, - "type": "array" - } + { + "in": "path", + "name": "idx", + "required": true, + "type": "string" + } + ], + "responses": {}, + "description": "Edit an instance", + "tags": [ + "Instances" + ] }, - "type": "object" + "delete": { + "parameters": [ + { + "in": "path", + "name": "idx", + "required": true, + "type": "string" + } + ], + "responses": {}, + "description": "Delete an instance", + "tags": [ + "Instances" + ] + } }, - "CaseEditRequest": { - "properties": { - "description": { - "type": "string" + "/instance/{idx}/data/": { + "get": { + "parameters": [ + { + "in": "path", + "name": "idx", + "required": true, + "type": "string" + } + ], + "responses": { + "default": { + "schema": { + "$ref": "#/definitions/InstanceDataEndpointResponse" + }, + "description": "" + } }, - "name": { - "type": "string" - } - }, - "type": "object" + "description": "Get input data of an instance", + "tags": [ + "Instances" + ] + } }, - "CaseFromInstanceExecution": { - "properties": { - "description": { - "type": "string" - }, - "execution_id": { - "type": "string" - }, - "instance_id": { - "type": "string" - }, - "name": { - "type": "string" - }, - "parent_id": { - "type": "integer", - "x-nullable": true - }, - "schema": { - "type": "string" - } - }, - "required": [ - "name", - "schema" - ], - "type": "object" - }, - "CaseListResponse": { - "properties": { - "created_at": { - "format": "date-time", - "type": "string" - }, - "data_hash": { - "type": "string" - }, - "dependents": { - "items": { - "type": "integer" + "/instancefile/": { + "post": { + "parameters": [ + { + "in": "formData", + "name": "name", + "type": "string", + "required": true }, - "type": "array" - }, - "description": { - "type": "string" - }, - "id": { - "type": "integer" - }, - "is_dir": {}, - "name": { - "type": "string" - }, - "path": { - "type": "string" - }, - "schema": { - "type": "string" - }, - "solution_hash": { - "type": "string" - }, - "updated_at": { - "format": "date-time", - "type": "string" + { + "in": "formData", + "name": "description", + "type": "string", + "required": false + }, + { + "in": "formData", + "name": "minimize", + "type": "boolean", + "required": false + } + ], + "responses": { + "default": { + "schema": { + "$ref": "#/definitions/InstanceDetailsEndpointResponse" + }, + "description": "" + } }, - "user_id": { - "type": "integer" - } - }, - "type": "object" + "description": "Create an instance from an mps file", + "tags": [ + "Instances" + ] + } }, - "CaseToInstanceResponse": { - "properties": { - "id": { - "type": "string" + "/data-check/execution/{idx}/": { + "post": { + "parameters": [ + { + "in": "path", + "name": "idx", + "required": true, + "type": "string" + } + ], + "responses": { + "default": { + "schema": { + "$ref": "#/definitions/ExecutionDetailsEndpointResponse" + }, + "description": "" + } }, - "schema": { - "type": "string" - } - }, - "required": [ - "id", - "schema" - ], - "type": "object" + "description": "Create a data check execution", + "tags": [ + "Data checks" + ] + } }, - "Config": { - "properties": { - "gapAbs": { - "type": "number" - }, - "gapRel": { - "type": "number" - }, - "keepFiles": { - "type": "boolean" - }, - "logPath": { - "type": "string" - }, - "maxMemory": { - "type": "integer" - }, - "maxNodes": { - "type": "integer" - }, - "mip": { - "type": "boolean" - }, - "msg": { - "type": "boolean" - }, - "options": { - "items": { + "/data-check/instance/{idx}/": { + "post": { + "parameters": [ + { + "in": "path", + "name": "idx", + "required": true, "type": "string" - }, - "type": "array" - }, - "solver": { - "default": "PULP_CBC_CMD", - "type": "string" - }, - "threads": { - "type": "integer" - }, - "timeLimit": { - "type": "integer" + } + ], + "responses": { + "default": { + "schema": { + "$ref": "#/definitions/ExecutionDetailsEndpointResponse" + }, + "description": "" + } }, - "warmStart": { - "type": "boolean" - } - }, - "type": "object" + "description": "Create a data check execution for an existing instance", + "tags": [ + "Data checks" + ] + } }, - "DeployedDAG": { - "properties": { - "description": { - "type": "string", - "x-nullable": true + "/data-check/case/{idx}/": { + "post": { + "parameters": [ + { + "in": "path", + "name": "idx", + "required": true, + "type": "integer", + "format": "int32" + } + ], + "responses": { + "default": { + "schema": { + "$ref": "#/definitions/ExecutionDetailsEndpointResponse" + }, + "description": "" + } }, - "id": { - "type": "string" - } - }, - "required": [ - "id" - ], - "type": "object" + "description": "Create a data check execution for an existing case", + "tags": [ + "Data checks" + ] + } }, - "ExecutionDagPostRequest": { - "properties": { - "checks": {}, - "config": { - "$ref": "#/definitions/Config" - }, - "data": {}, - "description": { - "type": "string" - }, - "instance_id": { - "type": "string" - }, - "log_json": { - "$ref": "#/definitions/Log" - }, - "log_text": { - "type": "string" - }, - "name": { - "type": "string" - }, - "schema": { - "type": "string" - }, - "solution_schema": { - "type": "string", - "x-nullable": true + "/execution/{idx}/": { + "get": { + "parameters": [ + { + "in": "path", + "name": "idx", + "required": true, + "type": "string" + } + ], + "responses": { + "default": { + "schema": { + "$ref": "#/definitions/ExecutionDetailsEndpointWithIndicatorsResponse" + }, + "description": "" + } }, - "state": { - "type": "integer" - } + "description": "Get details of an execution", + "tags": [ + "Executions" + ] }, - "required": [ - "config", - "instance_id", - "name" - ], - "type": "object" - }, - "ExecutionDagRequest": { - "properties": { - "checks": {}, - "data": {}, - "log_json": { - "$ref": "#/definitions/Log" - }, - "log_text": { - "type": "string" - }, - "solution_schema": { - "type": "string", - "x-nullable": true - }, - "state": { - "type": "integer" - } + "put": { + "parameters": [ + { + "in": "body", + "required": false, + "name": "body", + "schema": { + "$ref": "#/definitions/ExecutionEditRequest" + } + }, + { + "in": "path", + "name": "idx", + "required": true, + "type": "string" + } + ], + "responses": {}, + "description": "Edit an execution", + "tags": [ + "Executions" + ] }, - "type": "object" - }, - "ExecutionDataEndpointResponse": { - "properties": { - "checks": {}, - "config": { - "$ref": "#/definitions/Config" - }, - "created_at": { - "format": "date-time", - "type": "string" - }, - "data": {}, - "data_hash": { - "type": "string" - }, - "description": { - "type": "string" - }, - "id": { - "type": "string" - }, - "instance_id": { - "type": "string" - }, - "message": { - "type": "string" - }, - "name": { - "type": "string" - }, - "schema": { - "type": "string" - }, - "state": { - "type": "integer" - }, - "user_id": { - "type": "integer" - } + "post": { + "parameters": [ + { + "in": "path", + "name": "idx", + "required": true, + "type": "string" + } + ], + "responses": {}, + "description": "Stop an execution", + "tags": [ + "Executions" + ] }, - "type": "object" + "delete": { + "parameters": [ + { + "in": "path", + "name": "idx", + "required": true, + "type": "string" + } + ], + "responses": {}, + "description": "Delete an execution", + "tags": [ + "Executions" + ] + } }, - "ExecutionDetailsEndpointResponse": { - "properties": { - "config": { - "$ref": "#/definitions/Config" - }, - "created_at": { - "format": "date-time", - "type": "string" - }, - "data_hash": { - "type": "string" - }, - "description": { - "type": "string" - }, - "id": { - "type": "string" - }, - "instance_id": { - "type": "string" - }, - "message": { - "type": "string" - }, - "name": { - "type": "string" - }, - "schema": { - "type": "string" - }, - "state": { - "type": "integer" - }, - "user_id": { - "type": "integer" - } + "/execution/{idx}/status/": { + "put": { + "parameters": [ + { + "in": "body", + "required": false, + "name": "body", + "schema": { + "$ref": "#/definitions/ExecutionStatusEndpointUpdate" + } + }, + { + "in": "path", + "name": "idx", + "required": true, + "type": "string" + } + ], + "responses": {}, + "description": "Change status of an execution", + "tags": [ + "Executions" + ] }, - "type": "object" - }, - "ExecutionDetailsEndpointWithIndicatorsResponse": { - "properties": { - "config": { - "$ref": "#/definitions/Config" - }, - "created_at": { - "format": "date-time", - "type": "string" - }, - "data_hash": { - "type": "string" - }, - "description": { - "type": "string" - }, - "id": { - "type": "string" - }, - "instance_id": { - "type": "string" - }, - "message": { - "type": "string" - }, - "name": { - "type": "string" - }, - "schema": { - "type": "string" - }, - "state": { - "type": "integer" - }, - "user_id": { - "type": "integer" + "get": { + "parameters": [ + { + "in": "path", + "name": "idx", + "required": true, + "type": "string" + } + ], + "responses": { + "default": { + "schema": { + "$ref": "#/definitions/ExecutionStatusEndpointResponse" + }, + "description": "" + } }, - "indicators": { - "type": "string" - } - }, - "type": "object" - }, - "ReLaunchExecutionRequest": { - "properties": { - "config": { - "$ref": "#/definitions/Config" - } - }, - "type": "object" + "description": "Get status of an execution", + "tags": [ + "Executions" + ] + } }, - "ExecutionEditRequest": { - "properties": { - "description": { - "type": "string" - }, - "name": { - "type": "string" + "/execution/{idx}/data/": { + "get": { + "parameters": [ + { + "in": "path", + "name": "idx", + "required": true, + "type": "string" + } + ], + "responses": { + "default": { + "schema": { + "$ref": "#/definitions/ExecutionDataEndpointResponse" + }, + "description": "" + } }, - "data": {} - }, - "type": "object" + "description": "Get solution data of an execution", + "tags": [ + "Executions" + ] + } }, - "ExecutionLogEndpointResponse": { - "properties": { - "config": { - "$ref": "#/definitions/Config" - }, - "created_at": { - "format": "date-time", - "type": "string" - }, - "data_hash": { - "type": "string" - }, - "description": { - "type": "string" - }, - "id": { - "type": "string" - }, - "instance_id": { - "type": "string" - }, - "log": { - "$ref": "#/definitions/Log" - }, - "message": { - "type": "string" - }, - "name": { - "type": "string" - }, - "schema": { - "type": "string" - }, - "state": { - "type": "integer" + "/execution/{idx}/log/": { + "get": { + "parameters": [ + { + "in": "path", + "name": "idx", + "required": true, + "type": "string" + } + ], + "responses": { + "default": { + "schema": { + "$ref": "#/definitions/ExecutionLogEndpointResponse" + }, + "description": "" + } }, - "user_id": { - "type": "integer" - } - }, - "type": "object" + "description": "Get log of an execution", + "tags": [ + "Executions" + ] + } }, - "ExecutionRequest": { - "properties": { - "config": { - "$ref": "#/definitions/Config" - }, - "description": { - "type": "string" - }, - "instance_id": { - "type": "string" - }, - "name": { - "type": "string" - }, - "schema": { - "type": "string" - }, - "data": {} - }, - "required": [ - "config", - "instance_id", - "name" - ], - "type": "object" + "/execution/{idx}/relaunch/": { + "post": { + "parameters": [ + { + "in": "body", + "required": false, + "name": "body", + "schema": { + "$ref": "#/definitions/ReLaunchExecutionRequest" + } + }, + { + "in": "path", + "name": "idx", + "required": true, + "type": "string" + } + ], + "responses": {}, + "description": "Re-launch an execution", + "tags": [ + "Executions" + ] + } }, - "ExecutionStatusEndpointResponse": { - "properties": { - "data_hash": { - "readOnly": true, - "type": "string" - }, - "id": { - "type": "string" - }, - "message": { - "type": "string" + "/execution/": { + "get": { + "parameters": [ + { + "in": "query", + "name": "limit", + "type": "integer", + "required": false + }, + { + "in": "query", + "name": "offset", + "type": "integer", + "required": false + }, + { + "in": "query", + "name": "creation_date_gte", + "type": "string", + "format": "date-time", + "required": false + }, + { + "in": "query", + "name": "creation_date_lte", + "type": "string", + "format": "date-time", + "required": false + }, + { + "in": "query", + "name": "id", + "type": "string", + "required": false + }, + { + "in": "query", + "name": "name", + "type": "string", + "required": false + }, + { + "in": "query", + "name": "deletion_date_gte", + "type": "string", + "format": "date-time", + "required": false + }, + { + "in": "query", + "name": "deletion_date_lte", + "type": "string", + "format": "date-time", + "required": false + }, + { + "in": "query", + "name": "update_date_gte", + "type": "string", + "format": "date-time", + "required": false + }, + { + "in": "query", + "name": "update_date_lte", + "type": "string", + "format": "date-time", + "required": false + }, + { + "in": "query", + "name": "user_name", + "type": "string", + "required": false + }, + { + "in": "query", + "name": "last_name", + "type": "string", + "required": false + }, + { + "in": "query", + "name": "email", + "type": "string", + "required": false + }, + { + "in": "query", + "name": "role_id", + "type": "integer", + "required": false + }, + { + "in": "query", + "name": "url_rule", + "type": "string", + "required": false + }, + { + "in": "query", + "name": "description", + "type": "string", + "required": false + }, + { + "in": "query", + "name": "schema", + "type": "string", + "required": false + } + ], + "responses": { + "default": { + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/ExecutionDetailsWithIndicatorsAndLogResponse" + } + }, + "description": "" + } }, - "state": { - "type": "integer" - } + "description": "Get all executions", + "tags": [ + "Executions" + ] }, - "type": "object" - }, - "FirstSolution": { - "properties": { - "BestInteger": { - "type": "number", - "x-nullable": true - }, - "CutsBestBound": { - "type": "number", - "x-nullable": true - }, - "Node": { - "type": "integer", - "x-nullable": true + "post": { + "parameters": [ + { + "in": "body", + "required": false, + "name": "body", + "schema": { + "$ref": "#/definitions/ExecutionRequest" + } + } + ], + "responses": { + "default": { + "schema": { + "$ref": "#/definitions/ExecutionDetailsEndpointResponse" + }, + "description": "" + } }, - "NodesLeft": { - "type": "integer", - "x-nullable": true - } - }, - "required": [ - "BestInteger", - "CutsBestBound", - "Node", - "NodesLeft" - ], - "type": "object" + "description": "Create an execution", + "tags": [ + "Executions" + ] + } }, - "HealthResponse": { - "properties": { - "airflow_status": { - "type": "string" - }, - "cornflow_status": { - "type": "string" - } + "/dag/{idx}/": { + "put": { + "parameters": [ + { + "in": "body", + "required": false, + "name": "body", + "schema": { + "$ref": "#/definitions/ExecutionDagRequest" + } + }, + { + "in": "path", + "name": "idx", + "required": true, + "type": "string" + } + ], + "responses": {}, + "description": "Edit an execution", + "tags": [ + "DAGs" + ] }, - "type": "object" - }, - "InstanceCheckRequest": { - "properties": { - "checks": {} - }, - "type": "object" + "get": { + "parameters": [ + { + "in": "path", + "name": "idx", + "required": true, + "type": "string" + } + ], + "responses": {}, + "description": "Get input data and configuration for an execution", + "tags": [ + "DAGs" + ] + } }, - "InstanceDataEndpointResponse": { - "properties": { - "checks": { - "x-nullable": true - }, - "created_at": { - "format": "date-time", - "type": "string" - }, - "data": {}, - "data_hash": { - "type": "string" - }, - "description": { - "type": "string" - }, - "id": { - "type": "string" - }, - "name": { - "type": "string" - }, - "schema": { - "type": "string" + "/dag/": { + "post": { + "parameters": [ + { + "in": "body", + "required": false, + "name": "body", + "schema": { + "$ref": "#/definitions/ExecutionDagPostRequest" + } + } + ], + "responses": { + "default": { + "schema": { + "$ref": "#/definitions/ExecutionDetailsEndpointResponse" + }, + "description": "" + } }, - "user_id": { - "type": "integer" - } - }, - "required": [ - "data" - ], - "type": "object" + "description": "Create an execution manually.", + "tags": [ + "DAGs" + ] + } }, - "InstanceDetailsEndpointResponse": { - "properties": { - "created_at": { - "format": "date-time", - "type": "string" - }, - "data_hash": { - "type": "string" - }, - "description": { - "type": "string" - }, - "executions": { - "items": { - "$ref": "#/definitions/ExecutionDetailsEndpointResponse" + "/dag/instance/{idx}/": { + "put": { + "parameters": [ + { + "in": "body", + "required": false, + "name": "body", + "schema": { + "$ref": "#/definitions/InstanceCheckRequest" + } }, - "type": "array" - }, - "id": { - "type": "string" - }, - "name": { - "type": "string" - }, - "schema": { - "type": "string" - }, - "user_id": { - "type": "integer" - } - }, - "type": "object" + { + "in": "path", + "name": "idx", + "required": true, + "type": "string" + } + ], + "responses": {}, + "description": "Endpoint to save instance checks performed on the DAG", + "tags": [ + "DAGs" + ] + } }, - "InstanceEditRequest": { - "properties": { - "description": { - "type": "string" - }, - "name": { - "type": "string" - }, - "data": {} - }, - "type": "object" + "/dag/case/{idx}/": { + "put": { + "parameters": [ + { + "in": "body", + "required": false, + "name": "body", + "schema": { + "$ref": "#/definitions/CaseCheckRequest" + } + }, + { + "in": "path", + "name": "idx", + "required": true, + "type": "integer", + "format": "int32" + } + ], + "responses": {}, + "description": "Endpoint to save case checks performed on the DAG", + "tags": [ + "DAGs" + ] + } }, - "InstanceEndpointResponse": { - "properties": { - "created_at": { - "format": "date-time", - "type": "string" - }, - "data_hash": { - "type": "string" - }, - "description": { - "type": "string" - }, - "id": { - "type": "string" - }, - "name": { - "type": "string" - }, - "schema": { - "type": "string" + "/dag/deployed/": { + "get": { + "parameters": [], + "responses": { + "default": { + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/DeployedDAG" + } + }, + "description": "" + } }, - "user_id": { - "type": "integer" - } + "description": "Get list of deployed dags registered on the data base", + "tags": [ + "DeployedDAGs" + ] }, - "type": "object" - }, - "Log": { - "properties": { - "best_bound": { - "type": "number", - "x-nullable": true - }, - "best_solution": { - "type": "number", - "x-nullable": true - }, - "cut_info": { - "x-nullable": true - }, - "first_relaxed": { - "type": "number", - "x-nullable": true - }, - "first_solution": { - "allOf": [ - { - "$ref": "#/definitions/FirstSolution" - } - ], - "x-nullable": true - }, - "gap": { - "type": "number", - "x-nullable": true - }, - "matrix": { - "allOf": [ - { - "$ref": "#/definitions/Matrix" + "post": { + "parameters": [ + { + "in": "body", + "required": false, + "name": "body", + "schema": { + "$ref": "#/definitions/DeployedDAG" } - ], - "x-nullable": true + } + ], + "responses": { + "default": { + "schema": { + "$ref": "#/definitions/DeployedDAG" + }, + "description": "" + } }, - "matrix_post": { - "allOf": [ - { - "$ref": "#/definitions/Matrix" + "description": "Post a new deployed dag", + "tags": [ + "DeployedDAGs" + ] + } + }, + "/dag/deployed/{idx}/": { + "put": { + "parameters": [ + { + "in": "body", + "required": false, + "name": "body", + "schema": { + "$ref": "#/definitions/DeployedDAGEdit" } - ], - "x-nullable": true + }, + { + "in": "path", + "name": "idx", + "required": true, + "type": "string" + } + ], + "responses": {}, + "description": "Endpoint to update the schemas of a deployed DAG", + "tags": [ + "DAGs" + ] + } + }, + "/user/": { + "get": { + "parameters": [], + "responses": { + "default": { + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/UserEndpointResponse" + } + }, + "description": "" + } }, - "nodes": { - "type": "integer", - "x-nullable": true + "description": "Get all users", + "tags": [ + "Users" + ] + } + }, + "/user/{user_id}/": { + "get": { + "parameters": [ + { + "in": "path", + "name": "user_id", + "required": true, + "type": "integer", + "format": "int32" + } + ], + "responses": { + "default": { + "schema": { + "$ref": "#/definitions/UserDetailsEndpointResponse" + }, + "description": "" + } }, - "presolve": { - "allOf": [ - { - "$ref": "#/definitions/Presolve" + "description": "Get a user", + "tags": [ + "Users" + ] + }, + "put": { + "parameters": [ + { + "in": "body", + "required": false, + "name": "body", + "schema": { + "$ref": "#/definitions/UserEditRequest" } - ], - "x-nullable": true + }, + { + "in": "path", + "name": "user_id", + "required": true, + "type": "integer", + "format": "int32" + } + ], + "responses": {}, + "description": "Edit a user", + "tags": [ + "Users" + ] + }, + "delete": { + "parameters": [ + { + "in": "path", + "name": "user_id", + "required": true, + "type": "integer", + "format": "int32" + } + ], + "responses": {}, + "description": "Delete a user", + "tags": [ + "Users" + ] + } + }, + "/user/{user_id}/{make_admin}/": { + "put": { + "parameters": [ + { + "in": "path", + "name": "user_id", + "required": true, + "type": "integer", + "format": "int32" + }, + { + "in": "path", + "name": "make_admin", + "required": true, + "type": "integer", + "format": "int32" + } + ], + "responses": { + "default": { + "schema": { + "$ref": "#/definitions/UserEndpointResponse" + }, + "description": "" + } }, - "progress": { - "$ref": "#/definitions/Progress" + "description": "Toggle user into admin", + "tags": [ + "Users" + ] + } + }, + "/token/": { + "get": { + "parameters": [], + "responses": { + "default": { + "schema": { + "$ref": "#/definitions/TokenEndpointResponse" + }, + "description": "" + } }, - "rootTime": { - "type": "number", - "x-nullable": true + "description": "Check token", + "tags": [ + "Users" + ] + } + }, + "/schema/": { + "get": { + "parameters": [], + "responses": { + "default": { + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/SchemaListApp" + } + }, + "description": "" + } + }, + "description": "Get list of available apps", + "tags": [ + "Schemas" + ] + } + }, + "/schema/{dag_name}/": { + "get": { + "parameters": [ + { + "in": "path", + "name": "dag_name", + "required": true, + "type": "string" + } + ], + "responses": { + "default": { + "schema": { + "$ref": "#/definitions/SchemaOneApp" + }, + "description": "" + } + }, + "description": "Get instance, solution and config schema", + "tags": [ + "Schemas" + ] + } + }, + "/example/{dag_name}/": { + "get": { + "parameters": [ + { + "in": "path", + "name": "dag_name", + "required": true, + "type": "string" + } + ], + "responses": { + "default": { + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/ExampleListData" + } + }, + "description": "" + } + }, + "description": "Get lsit of example data from DAG", + "tags": [ + "DAG" + ] + } + }, + "/example/{dag_name}/{example_name}/": { + "get": { + "parameters": [ + { + "in": "path", + "name": "dag_name", + "required": true, + "type": "string" + }, + { + "in": "path", + "name": "example_name", + "required": true, + "type": "string" + } + ], + "responses": { + "default": { + "schema": { + "$ref": "#/definitions/ExampleDetailData" + }, + "description": "" + } + }, + "description": "Get example data from DAG", + "tags": [ + "DAG" + ] + } + }, + "/health/": { + "get": { + "parameters": [], + "responses": { + "default": { + "schema": { + "$ref": "#/definitions/HealthResponse" + }, + "description": "" + } + }, + "description": "Health check", + "tags": [ + "Health" + ] + } + }, + "/case/instance/": { + "post": { + "parameters": [ + { + "in": "body", + "required": false, + "name": "body", + "schema": { + "$ref": "#/definitions/CaseFromInstanceExecution" + } + } + ], + "responses": { + "default": { + "schema": { + "$ref": "#/definitions/CaseListResponse" + }, + "description": "" + } + }, + "description": "Create a new case from instance and execution", + "tags": [ + "Cases" + ] + } + }, + "/case/{idx}/copy/": { + "post": { + "parameters": [ + { + "in": "path", + "name": "idx", + "required": true, + "type": "integer", + "format": "int32" + } + ], + "responses": { + "default": { + "schema": { + "$ref": "#/definitions/CaseListResponse" + }, + "description": "" + } + }, + "description": "Copies a case to a new one", + "tags": [ + "Cases" + ] + } + }, + "/case/": { + "get": { + "parameters": [ + { + "in": "query", + "name": "limit", + "type": "integer", + "required": false + }, + { + "in": "query", + "name": "offset", + "type": "integer", + "required": false + }, + { + "in": "query", + "name": "creation_date_gte", + "type": "string", + "format": "date-time", + "required": false + }, + { + "in": "query", + "name": "creation_date_lte", + "type": "string", + "format": "date-time", + "required": false + }, + { + "in": "query", + "name": "id", + "type": "string", + "required": false + }, + { + "in": "query", + "name": "name", + "type": "string", + "required": false + }, + { + "in": "query", + "name": "deletion_date_gte", + "type": "string", + "format": "date-time", + "required": false + }, + { + "in": "query", + "name": "deletion_date_lte", + "type": "string", + "format": "date-time", + "required": false + }, + { + "in": "query", + "name": "update_date_gte", + "type": "string", + "format": "date-time", + "required": false + }, + { + "in": "query", + "name": "update_date_lte", + "type": "string", + "format": "date-time", + "required": false + }, + { + "in": "query", + "name": "user_name", + "type": "string", + "required": false + }, + { + "in": "query", + "name": "last_name", + "type": "string", + "required": false + }, + { + "in": "query", + "name": "email", + "type": "string", + "required": false + }, + { + "in": "query", + "name": "role_id", + "type": "integer", + "required": false + }, + { + "in": "query", + "name": "url_rule", + "type": "string", + "required": false + }, + { + "in": "query", + "name": "description", + "type": "string", + "required": false + }, + { + "in": "query", + "name": "schema", + "type": "string", + "required": false + } + ], + "responses": { + "default": { + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/CaseListAllWithIndicators" + } + }, + "description": "" + } + }, + "description": "Get all cases", + "tags": [ + "Cases" + ] + }, + "post": { + "parameters": [], + "responses": {}, + "description": "Create a new case from raw data", + "tags": [ + "Cases" + ] + } + }, + "/case/{idx}/": { + "get": { + "parameters": [ + { + "in": "path", + "name": "idx", + "required": true, + "type": "integer", + "format": "int32" + } + ], + "responses": { + "default": { + "schema": { + "$ref": "#/definitions/CaseListAllWithIndicators" + }, + "description": "" + } + }, + "description": "Get one case", + "tags": [ + "Cases" + ] + }, + "put": { + "parameters": [ + { + "in": "body", + "required": false, + "name": "body", + "schema": { + "$ref": "#/definitions/CaseEditRequest" + } + }, + { + "in": "path", + "name": "idx", + "required": true, + "type": "integer", + "format": "int32" + } + ], + "responses": {}, + "description": "Edit a case", + "tags": [ + "Cases" + ] + }, + "delete": { + "parameters": [ + { + "in": "path", + "name": "idx", + "required": true, + "type": "integer", + "format": "int32" + } + ], + "responses": {}, + "description": "Delete a case", + "tags": [ + "Cases" + ] + } + }, + "/case/{idx}/data/": { + "patch": { + "parameters": [ + { + "in": "path", + "name": "idx", + "required": true, + "type": "integer", + "format": "int32" + } + ], + "responses": {}, + "description": "Patches the data of a given case", + "tags": [ + "Cases" + ] + }, + "get": { + "parameters": [ + { + "in": "path", + "name": "idx", + "required": true, + "type": "integer", + "format": "int32" + } + ], + "responses": { + "default": { + "schema": { + "$ref": "#/definitions/CaseBase" + }, + "description": "" + } + }, + "description": "Get data of a case", + "tags": [ + "Cases" + ] + }, + "put": { + "parameters": [ + { + "in": "body", + "required": false, + "name": "body", + "schema": { + "$ref": "#/definitions/CaseEditRequest" + } + }, + { + "in": "path", + "name": "idx", + "required": true, + "type": "integer", + "format": "int32" + } + ], + "responses": {}, + "description": "Edit a case", + "tags": [ + "Cases" + ] + }, + "delete": { + "parameters": [ + { + "in": "path", + "name": "idx", + "required": true, + "type": "integer", + "format": "int32" + } + ], + "responses": {}, + "description": "Delete a case", + "tags": [ + "Cases" + ] + } + }, + "/case/{idx}/instance/": { + "post": { + "parameters": [ + { + "in": "path", + "name": "idx", + "required": true, + "type": "integer", + "format": "int32" + } + ], + "responses": { + "default": { + "schema": { + "$ref": "#/definitions/CaseToInstanceResponse" + }, + "description": "" + } + }, + "description": "Copies the information stored in a case into a new instance", + "tags": [ + "Cases" + ] + } + }, + "/case/{idx1}/{idx2}/": { + "get": { + "parameters": [ + { + "in": "query", + "name": "data", + "type": "boolean", + "required": false + }, + { + "in": "query", + "name": "solution", + "type": "boolean", + "required": false + }, + { + "in": "path", + "name": "idx2", + "required": true, + "type": "integer", + "format": "int32" + }, + { + "in": "path", + "name": "idx1", + "required": true, + "type": "integer", + "format": "int32" + } + ], + "responses": { + "default": { + "schema": { + "$ref": "#/definitions/CaseCompareResponse" + }, + "description": "" + } + }, + "description": "Compares the data and / or solution of two given cases", + "tags": [ + "Cases" + ] + } + }, + "/action/": { + "get": { + "parameters": [], + "responses": { + "default": { + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/ActionsResponse" + } + }, + "description": "" + } + }, + "description": "Get all the actions", + "tags": [ + "Actions" + ] + } + }, + "/permission/": { + "get": { + "parameters": [], + "responses": { + "default": { + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/PermissionViewRoleResponse" + } + }, + "description": "" + } + }, + "description": "Get all the permissions assigned to the roles", + "tags": [ + "PermissionViewRole" + ] + }, + "post": { + "parameters": [ + { + "in": "body", + "required": false, + "name": "body", + "schema": { + "$ref": "#/definitions/PermissionViewRoleRequest" + } + } + ], + "responses": { + "default": { + "schema": { + "$ref": "#/definitions/PermissionViewRoleResponse" + }, + "description": "" + } + }, + "description": "Create a new permission", + "tags": [ + "PermissionViewRole" + ] + } + }, + "/permission/{idx}/": { + "get": { + "parameters": [ + { + "in": "path", + "name": "idx", + "required": true, + "type": "integer", + "format": "int32" + } + ], + "responses": { + "default": { + "schema": { + "$ref": "#/definitions/PermissionViewRoleResponse" + }, + "description": "" + } + }, + "description": "Get one permission", + "tags": [ + "PermissionViewRole" + ] + }, + "put": { + "parameters": [ + { + "in": "body", + "required": false, + "name": "body", + "schema": { + "$ref": "#/definitions/PermissionViewRoleEditRequest" + } + }, + { + "in": "path", + "name": "idx", + "required": true, + "type": "integer", + "format": "int32" + } + ], + "responses": {}, + "description": "Edit a permission", + "tags": [ + "PermissionViewRole" + ] + }, + "delete": { + "parameters": [ + { + "in": "path", + "name": "idx", + "required": true, + "type": "integer", + "format": "int32" + } + ], + "responses": {}, + "description": "Delete a permission", + "tags": [ + "PermissionViewRole" + ] + } + }, + "/roles/": { + "get": { + "parameters": [], + "responses": { + "default": { + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/RolesResponse" + } + }, + "description": "" + } + }, + "description": "Gets all the roles", + "tags": [ + "Roles" + ] + }, + "post": { + "parameters": [ + { + "in": "body", + "required": false, + "name": "body", + "schema": { + "$ref": "#/definitions/RolesRequest" + } + } + ], + "responses": { + "default": { + "schema": { + "$ref": "#/definitions/RolesResponse" + }, + "description": "" + } + }, + "description": "Creates a new role", + "tags": [ + "Roles" + ] + } + }, + "/roles/{idx}/": { + "get": { + "parameters": [ + { + "in": "path", + "name": "idx", + "required": true, + "type": "integer", + "format": "int32" + } + ], + "responses": { + "default": { + "schema": { + "$ref": "#/definitions/RolesResponse" + }, + "description": "" + } + }, + "description": "Gets one role", + "tags": [ + "Roles" + ] + }, + "put": { + "parameters": [ + { + "in": "body", + "required": false, + "name": "body", + "schema": { + "$ref": "#/definitions/RolesResponse" + } + }, + { + "in": "path", + "name": "idx", + "required": true, + "type": "integer", + "format": "int32" + } + ], + "responses": {}, + "description": "Modifies one role", + "tags": [ + "Roles" + ] + }, + "delete": { + "parameters": [ + { + "in": "path", + "name": "idx", + "required": true, + "type": "integer", + "format": "int32" + } + ], + "responses": {}, + "description": "Deletes one role", + "tags": [ + "Roles" + ] + } + }, + "/apiview/": { + "get": { + "parameters": [], + "responses": { + "default": { + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/ViewResponse" + } + }, + "description": "" + } + }, + "description": "Get all the api views", + "tags": [ + "ApiViews" + ] + } + }, + "/user/role/": { + "get": { + "parameters": [], + "responses": { + "default": { + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/UserRoleResponse" + } + }, + "description": "" + } + }, + "description": "Gets all the user role assignments", + "tags": [ + "User roles" + ] + }, + "post": { + "parameters": [ + { + "in": "body", + "required": false, + "name": "body", + "schema": { + "$ref": "#/definitions/UserRoleRequest" + } + } + ], + "responses": { + "default": { + "schema": { + "$ref": "#/definitions/UserRoleResponse" + }, + "description": "" + } + }, + "description": "Creates a new role assignment", + "tags": [ + "User roles" + ] + } + }, + "/user/role/{user_id}/{role_id}/": { + "get": { + "parameters": [ + { + "in": "path", + "name": "user_id", + "required": true, + "type": "integer", + "format": "int32" + }, + { + "in": "path", + "name": "role_id", + "required": true, + "type": "integer", + "format": "int32" + } + ], + "responses": { + "default": { + "schema": { + "$ref": "#/definitions/UserRoleResponse" + }, + "description": "" + } + }, + "description": "Gets one user role assignment", + "tags": [ + "User roles" + ] + }, + "delete": { + "parameters": [ + { + "in": "path", + "name": "user_id", + "required": true, + "type": "integer", + "format": "int32" + }, + { + "in": "path", + "name": "role_id", + "required": true, + "type": "integer", + "format": "int32" + } + ], + "responses": {}, + "description": "Deletes one user role assignment", + "tags": [ + "User roles" + ] + } + }, + "/user/recover-password/": { + "put": { + "parameters": [ + { + "in": "body", + "required": false, + "name": "body", + "schema": { + "$ref": "#/definitions/RecoverPasswordRequest" + } + } + ], + "responses": {}, + "description": "Send email to create new password", + "tags": [ + "Users" + ] + } + }, + "/licences/": { + "get": { + "parameters": [], + "responses": {}, + "description": "Get list of all the libraries and their license information", + "tags": [ + "Licenses" + ] + } + }, + "/table/{table_name}/": { + "get": { + "parameters": [ + { + "in": "query", + "name": "limit", + "type": "integer", + "required": false + }, + { + "in": "query", + "name": "offset", + "type": "integer", + "required": false + }, + { + "in": "query", + "name": "creation_date_gte", + "type": "string", + "format": "date-time", + "required": false + }, + { + "in": "query", + "name": "creation_date_lte", + "type": "string", + "format": "date-time", + "required": false + }, + { + "in": "query", + "name": "id", + "type": "string", + "required": false + }, + { + "in": "query", + "name": "name", + "type": "string", + "required": false + }, + { + "in": "query", + "name": "deletion_date_gte", + "type": "string", + "format": "date-time", + "required": false + }, + { + "in": "query", + "name": "deletion_date_lte", + "type": "string", + "format": "date-time", + "required": false + }, + { + "in": "query", + "name": "update_date_gte", + "type": "string", + "format": "date-time", + "required": false + }, + { + "in": "query", + "name": "update_date_lte", + "type": "string", + "format": "date-time", + "required": false + }, + { + "in": "query", + "name": "user_name", + "type": "string", + "required": false + }, + { + "in": "query", + "name": "last_name", + "type": "string", + "required": false + }, + { + "in": "query", + "name": "email", + "type": "string", + "required": false + }, + { + "in": "query", + "name": "role_id", + "type": "integer", + "required": false + }, + { + "in": "query", + "name": "url_rule", + "type": "string", + "required": false + }, + { + "in": "query", + "name": "description", + "type": "string", + "required": false + }, + { + "in": "query", + "name": "schema", + "type": "string", + "required": false + }, + { + "in": "path", + "name": "table_name", + "required": true, + "type": "string" + } + ], + "responses": {}, + "description": "Get all rows of a table", + "tags": [ + "Tables" + ] + } + }, + "/table/{table_name}/{idx}/": { + "get": { + "parameters": [ + { + "in": "path", + "name": "table_name", + "required": true, + "type": "string" + }, + { + "in": "path", + "name": "idx", + "required": true, + "type": "string" + } + ], + "responses": {}, + "description": "Get a row", + "tags": [ + "Tables" + ] + } + }, + "/report/{idx}/": { + "get": { + "parameters": [ + { + "in": "path", + "name": "idx", + "required": true, + "type": "integer", + "format": "int32" + } + ], + "responses": { + "default": { + "schema": { + "$ref": "#/definitions/Report" + }, + "description": "" + } + }, + "description": "Get details of a report", + "tags": [ + "Reports" + ] + }, + "delete": { + "parameters": [ + { + "in": "path", + "name": "idx", + "required": true, + "type": "integer", + "format": "int32" + } + ], + "responses": {}, + "description": "Delete a report", + "tags": [ + "Reports" + ] + } + }, + "/report/{idx}/edit/": { + "put": { + "parameters": [ + { + "in": "formData", + "name": "name", + "type": "string", + "required": false + }, + { + "in": "formData", + "name": "description", + "type": "string", + "required": false + }, + { + "in": "formData", + "name": "file_url", + "type": "string", + "required": false + }, + { + "in": "formData", + "name": "state", + "type": "integer", + "required": false + }, + { + "in": "formData", + "name": "state_message", + "type": "string", + "required": false + }, + { + "in": "path", + "name": "idx", + "required": true, + "type": "integer", + "format": "int32" + } + ], + "responses": {}, + "description": "Edit a report", + "tags": [ + "Reports" + ] + } + }, + "/report/": { + "get": { + "parameters": [ + { + "in": "query", + "name": "limit", + "type": "integer", + "required": false + }, + { + "in": "query", + "name": "offset", + "type": "integer", + "required": false + }, + { + "in": "query", + "name": "creation_date_gte", + "type": "string", + "format": "date-time", + "required": false + }, + { + "in": "query", + "name": "creation_date_lte", + "type": "string", + "format": "date-time", + "required": false + }, + { + "in": "query", + "name": "id", + "type": "string", + "required": false + }, + { + "in": "query", + "name": "name", + "type": "string", + "required": false + }, + { + "in": "query", + "name": "deletion_date_gte", + "type": "string", + "format": "date-time", + "required": false + }, + { + "in": "query", + "name": "deletion_date_lte", + "type": "string", + "format": "date-time", + "required": false + }, + { + "in": "query", + "name": "update_date_gte", + "type": "string", + "format": "date-time", + "required": false + }, + { + "in": "query", + "name": "update_date_lte", + "type": "string", + "format": "date-time", + "required": false + }, + { + "in": "query", + "name": "user_name", + "type": "string", + "required": false + }, + { + "in": "query", + "name": "last_name", + "type": "string", + "required": false + }, + { + "in": "query", + "name": "email", + "type": "string", + "required": false + }, + { + "in": "query", + "name": "role_id", + "type": "integer", + "required": false + }, + { + "in": "query", + "name": "url_rule", + "type": "string", + "required": false + }, + { + "in": "query", + "name": "description", + "type": "string", + "required": false + }, + { + "in": "query", + "name": "execution_id", + "type": "string", + "required": false + } + ], + "responses": { + "default": { + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/Report" + } + }, + "description": "" + } + }, + "description": "Get all reports", + "tags": [ + "Reports" + ] + }, + "post": { + "parameters": [ + { + "in": "formData", + "name": "name", + "type": "string", + "required": true + }, + { + "in": "formData", + "name": "description", + "type": "string", + "required": false + }, + { + "in": "formData", + "name": "execution_id", + "type": "string", + "required": true + }, + { + "in": "formData", + "name": "state", + "type": "integer", + "required": false + }, + { + "in": "formData", + "name": "state_message", + "type": "string", + "required": false + } + ], + "responses": { + "default": { + "schema": { + "$ref": "#/definitions/Report" + }, + "description": "" + } + }, + "description": "Create a report", + "tags": [ + "Reports" + ] + } + } + }, + "info": { + "title": "Cornflow API docs", + "version": "v1" + }, + "swagger": "2.0.0", + "definitions": { + "InstanceEndpointResponse": { + "type": "object", + "properties": { + "id": { + "type": "string" }, - "sol_code": { - "type": "integer", - "x-nullable": true + "description": { + "type": "string" }, - "solver": { - "type": "string", - "x-nullable": true + "data_hash": { + "type": "string" }, - "status": { + "created_at": { "type": "string", - "x-nullable": true - }, - "status_code": { - "type": "integer", - "x-nullable": true + "format": "date-time" }, - "time": { - "type": "number", - "x-nullable": true + "user_id": { + "type": "integer" }, - "version": { - "type": "string", - "x-nullable": true - } - }, - "type": "object" - }, - "LoginEndpointRequest": { - "properties": { - "password": { + "schema": { "type": "string" }, - "username": { + "name": { "type": "string" } }, "required": [ - "password", - "username" - ], - "type": "object" - }, - "Matrix": { - "properties": { - "constraints": { - "type": "integer" - }, - "nonzeros": { - "type": "integer" - }, - "variables": { - "type": "integer" - } - }, - "type": "object" + "id" + ] }, - "PatchOperation": { + "InstanceEditRequest": { + "type": "object", "properties": { - "op": { + "name": { "type": "string" }, - "path": { + "description": { "type": "string" }, - "value": {} - }, - "required": [ - "op", - "path" - ], - "type": "object" + "data": {} + } }, - "PermissionViewRoleEditRequest": { + "ReportSchemaBase": { + "type": "object", "properties": { - "action": { + "id": { + "type": "integer", "readOnly": true }, - "action_id": { - "type": "integer" - }, - "api_view": { - "readOnly": true + "file_url": { + "type": "string" }, - "api_view_id": { + "state": { "type": "integer" }, - "role": { - "readOnly": true - }, - "role_id": { - "type": "integer" + "name": { + "type": "string" } }, - "type": "object" + "required": [ + "name" + ] }, - "PermissionViewRoleRequest": { + "ConfigSchemaResponse": { + "type": "object", "properties": { - "action_id": { + "maxMemory": { "type": "integer" }, - "api_view_id": { + "timeLimit": { "type": "integer" }, - "role_id": { + "gapRel": { + "type": "number" + }, + "maxNodes": { "type": "integer" - } - }, - "type": "object" - }, - "PermissionViewRoleResponse": { - "properties": { - "action": { - "readOnly": true }, - "action_id": { + "seconds": { "type": "integer" }, - "api_view": { - "readOnly": true + "solver": { + "type": "string", + "default": "PULP_CBC_CMD" }, - "api_view_id": { - "type": "integer" + "checks_only": { + "type": "boolean" }, - "id": { - "type": "integer" + "warmStart": { + "type": "boolean" }, - "role": { - "readOnly": true + "gapAbs": { + "type": "number" }, - "role_id": { - "type": "integer" - } - }, - "type": "object" - }, - "Presolve": { - "properties": { - "cols": { - "type": "integer" + "mip": { + "type": "boolean" }, - "rows": { - "type": "integer" + "msg": { + "type": "boolean" }, - "time": { - "type": "number" - } - }, - "required": [ - "cols", - "rows", - "time" - ], - "type": "object" - }, - "Progress": { - "properties": { - "BestInteger": { - "items": { - "type": "string" - }, - "type": "array" + "threads": { + "type": "integer" }, - "CutsBestBound": { - "items": { - "type": "string" - }, - "type": "array" + "report": {}, + "keepFiles": { + "type": "boolean" }, - "Depth": { + "options": { + "type": "array", "items": { "type": "string" - }, - "type": "array" + } }, - "Gap": { - "items": { - "type": "string" - }, - "type": "array" + "logPath": { + "type": "string" }, - "IInf": { - "items": { - "type": "string" - }, - "type": "array" + "MIPGap": { + "type": "number" + } + } + }, + "ExecutionDetailsEndpointResponse": { + "type": "object", + "properties": { + "id": { + "type": "string" }, - "ItpNode": { + "reports": { + "type": "array", "items": { - "type": "string" - }, - "type": "array" + "$ref": "#/definitions/ReportSchemaBase" + } }, - "Node": { - "items": { - "type": "string" - }, - "type": "array" + "config": { + "$ref": "#/definitions/ConfigSchemaResponse" }, - "NodesLeft": { - "items": { - "type": "string" - }, - "type": "array" + "instance_id": { + "type": "string" }, - "Objective": { - "items": { - "type": "string" - }, - "type": "array" + "description": { + "type": "string" }, - "Time": { - "items": { - "type": "string" - }, - "type": "array" - } - }, - "type": "object" - }, - "RecoverPasswordRequest": { - "properties": { - "email": { + "data_hash": { "type": "string" - } - }, - "required": [ - "email" - ], - "type": "object" - }, - "RolesRequest": { - "properties": { - "name": { + }, + "message": { "type": "string" - } - }, - "type": "object" - }, - "RolesResponse": { - "properties": { - "id": { + }, + "created_at": { + "type": "string", + "format": "date-time" + }, + "user_id": { "type": "integer" }, - "name": { - "type": "string" - } - }, - "type": "object" - }, - "SchemaListApp": { - "properties": { - "name": { + "schema": { "type": "string" - } - }, - "required": [ - "name" - ], - "type": "object" - }, - "SchemaOneApp": { - "properties": { - "config": {}, - "instance": {}, + }, "name": { "type": "string" }, - "solution": {} - }, - "required": [ - "config", - "instance", - "name", - "solution" - ], - "type": "object" - }, - "TokenEndpointResponse": { - "properties": { - "valid": { + "state": { "type": "integer" } }, - "type": "object" + "required": [ + "id" + ] }, - "UserDetailsEndpointResponse": { + "InstanceDetailsEndpointResponse": { + "type": "object", "properties": { - "email": { + "id": { "type": "string" }, - "first_name": { + "description": { "type": "string" }, - "id": { + "data_hash": { + "type": "string" + }, + "created_at": { + "type": "string", + "format": "date-time" + }, + "user_id": { "type": "integer" }, - "last_name": { + "schema": { "type": "string" }, - "username": { + "name": { "type": "string" + }, + "executions": { + "type": "array", + "items": { + "$ref": "#/definitions/ExecutionDetailsEndpointResponse" + } } }, - "type": "object" + "required": [ + "id" + ] }, - "UserEditRequest": { + "InstanceDataEndpointResponse": { + "type": "object", "properties": { - "email": { + "id": { "type": "string" }, - "first_name": { + "description": { "type": "string" }, - "last_name": { + "data_hash": { "type": "string" }, - "password": { + "data": {}, + "created_at": { + "type": "string", + "format": "date-time" + }, + "user_id": { + "type": "integer" + }, + "schema": { "type": "string" }, - "username": { + "name": { "type": "string" + }, + "checks": { + "x-nullable": true } }, - "type": "object" + "required": [ + "data", + "id" + ] }, - "UserEndpointResponse": { + "ExecutionEditRequest": { + "type": "object", "properties": { - "created_at": { + "name": { "type": "string" }, - "email": { + "description": { "type": "string" }, - "first_name": { + "data": {} + } + }, + "ExecutionDetailsEndpointWithIndicatorsResponse": { + "type": "object", + "properties": { + "id": { "type": "string" }, - "id": { + "reports": { + "type": "array", + "items": { + "$ref": "#/definitions/ReportSchemaBase" + } + }, + "config": { + "$ref": "#/definitions/ConfigSchemaResponse" + }, + "indicators": { + "readOnly": true + }, + "instance_id": { + "type": "string" + }, + "description": { + "type": "string" + }, + "data_hash": { + "type": "string" + }, + "message": { + "type": "string" + }, + "created_at": { + "type": "string", + "format": "date-time" + }, + "user_id": { "type": "integer" }, - "last_name": { + "schema": { "type": "string" }, - "username": { + "name": { "type": "string" + }, + "state": { + "type": "integer" } }, - "type": "object" + "required": [ + "id" + ] }, - "UserRoleRequest": { + "ExecutionStatusEndpointUpdate": { + "type": "object", "properties": { - "role_id": { - "type": "integer" + "id": { + "type": "string" }, - "user_id": { + "status": { "type": "integer" } - }, - "type": "object" + } }, - "UserRoleResponse": { + "ExecutionStatusEndpointResponse": { + "type": "object", "properties": { "id": { + "type": "string" + }, + "state": { "type": "integer" }, - "role": { + "data_hash": { + "type": "string", "readOnly": true }, - "role_id": { - "type": "integer" + "message": { + "type": "string" + } + } + }, + "BasicLog": { + "type": "object", + "properties": { + "sol_code": { + "type": "integer", + "x-nullable": true }, - "user": { - "readOnly": true + "status": { + "type": "string", + "x-nullable": true }, - "user_id": { - "type": "integer" + "status_code": { + "type": "integer", + "x-nullable": true } - }, - "type": "object" + } }, - "UserSignupRequest": { + "ExecutionDataEndpointResponse": { + "type": "object", "properties": { - "email": { - "format": "email", + "id": { "type": "string" }, - "first_name": { + "reports": { + "type": "array", + "items": { + "$ref": "#/definitions/ReportSchemaBase" + } + }, + "config": { + "$ref": "#/definitions/ConfigSchemaResponse" + }, + "instance_id": { "type": "string" }, - "last_name": { + "description": { "type": "string" }, - "password": { + "data_hash": { "type": "string" }, - "username": { + "message": { + "type": "string" + }, + "data": {}, + "created_at": { + "type": "string", + "format": "date-time" + }, + "user_id": { + "type": "integer" + }, + "log": { + "$ref": "#/definitions/BasicLog" + }, + "schema": { + "type": "string" + }, + "name": { "type": "string" + }, + "checks": {}, + "state": { + "type": "integer" } }, "required": [ - "email", - "password", - "username" - ], - "type": "object" - } - }, - "info": { - "title": "Cornflow API docs", - "version": "v1" - }, - "paths": { - "/action/": { - "get": { - "description": "Get all the actions", - "parameters": [], - "responses": { - "default": { - "description": "", - "schema": { - "items": { - "$ref": "#/definitions/ActionsResponse" - }, - "type": "array" - } - } - }, - "tags": [ - "Actions" - ] - } + "id" + ] }, - "/apiview/": { - "get": { - "description": "Get all the api views", - "parameters": [], - "responses": { - "default": { - "description": "", - "schema": { - "items": { - "$ref": "#/definitions/ApiViewResponse" - }, - "type": "array" - } + "Progress": { + "type": "object", + "properties": { + "NodesLeft": { + "type": "array", + "items": { + "type": "string" } }, - "tags": [ - "ApiViews" - ] - } - }, - "/case/": { - "get": { - "description": "Get all cases", - "parameters": [ - { - "in": "query", - "name": "limit", - "required": false, - "type": "integer" - }, - { - "in": "query", - "name": "offset", - "required": false, - "type": "integer" - }, - { - "format": "date-time", - "in": "query", - "name": "creation_date_gte", - "required": false, + "IInf": { + "type": "array", + "items": { "type": "string" - }, - { - "format": "date-time", - "in": "query", - "name": "creation_date_lte", - "required": false, + } + }, + "Gap": { + "type": "array", + "items": { "type": "string" - }, - { - "in": "query", - "name": "schema", - "required": false, + } + }, + "CutsBestBound": { + "type": "array", + "items": { "type": "string" } - ], - "responses": { - "default": { - "description": "", - "schema": { - "items": { - "$ref": "#/definitions/CaseListResponse" - }, - "type": "array" - } + }, + "Node": { + "type": "array", + "items": { + "type": "string" } }, - "tags": [ - "Cases" - ] - }, - "post": { - "description": "Create a new case from raw data", - "parameters": [], - "responses": {}, - "tags": [ - "Cases" - ] - } - }, - "/case/instance/": { - "post": { - "description": "Create a new case from instance and execution", - "parameters": [ - { - "in": "body", - "name": "body", - "required": false, - "schema": { - "$ref": "#/definitions/CaseFromInstanceExecution" - } + "Depth": { + "type": "array", + "items": { + "type": "string" } - ], - "responses": { - "default": { - "description": "", - "schema": { - "$ref": "#/definitions/CaseListResponse" - } + }, + "ItpNode": { + "type": "array", + "items": { + "type": "string" } }, - "tags": [ - "Cases" - ] - } - }, - "/case/{idx1}/{idx2}/": { - "get": { - "description": "Compares the data and / or solution of two given cases", - "parameters": [ - { - "in": "query", - "name": "data", - "required": false, - "type": "boolean" - }, - { - "in": "query", - "name": "solution", - "required": false, - "type": "boolean" - }, - { - "format": "int32", - "in": "path", - "name": "idx1", - "required": true, - "type": "integer" - }, - { - "format": "int32", - "in": "path", - "name": "idx2", - "required": true, - "type": "integer" + "Time": { + "type": "array", + "items": { + "type": "string" } - ], - "responses": { - "default": { - "description": "", - "schema": { - "$ref": "#/definitions/CaseCompareResponse" - } + }, + "BestInteger": { + "type": "array", + "items": { + "type": "string" } }, - "tags": [ - "Cases" - ] + "Objective": { + "type": "array", + "items": { + "type": "string" + } + } } }, - "/case/{idx}/": { - "delete": { - "description": "Delete a case", - "parameters": [ - { - "format": "int32", - "in": "path", - "name": "idx", - "required": true, - "type": "integer" - } - ], - "responses": {}, - "tags": [ - "Cases" - ] + "Presolve": { + "type": "object", + "properties": { + "cols": { + "type": "integer" + }, + "rows": { + "type": "integer" + }, + "time": { + "type": "number" + } }, - "get": { - "description": "Get one case", - "parameters": [ - { - "format": "int32", - "in": "path", - "name": "idx", - "required": true, - "type": "integer" - } - ], - "responses": { - "default": { - "description": "", - "schema": { - "$ref": "#/definitions/CaseListResponse" - } - } + "required": [ + "cols", + "rows", + "time" + ] + }, + "FirstSolution": { + "type": "object", + "properties": { + "Node": { + "type": "integer", + "x-nullable": true }, - "tags": [ - "Cases" - ] + "NodesLeft": { + "type": "integer", + "x-nullable": true + }, + "BestInteger": { + "type": "number", + "x-nullable": true + }, + "CutsBestBound": { + "type": "number", + "x-nullable": true + } }, - "put": { - "description": "Edit a case", - "parameters": [ - { - "in": "body", - "name": "body", - "required": false, - "schema": { - "$ref": "#/definitions/CaseEditRequest" + "required": [ + "BestInteger", + "CutsBestBound", + "Node", + "NodesLeft" + ] + }, + "Matrix": { + "type": "object", + "properties": { + "nonzeros": { + "type": "integer" + }, + "constraints": { + "type": "integer" + }, + "variables": { + "type": "integer" + } + } + }, + "Log": { + "type": "object", + "properties": { + "progress": { + "$ref": "#/definitions/Progress" + }, + "presolve": { + "x-nullable": true, + "allOf": [ + { + "$ref": "#/definitions/Presolve" + } + ] + }, + "first_solution": { + "x-nullable": true, + "allOf": [ + { + "$ref": "#/definitions/FirstSolution" + } + ] + }, + "solver": { + "type": "string", + "x-nullable": true + }, + "cut_info": { + "x-nullable": true + }, + "sol_code": { + "type": "integer", + "x-nullable": true + }, + "status": { + "type": "string", + "x-nullable": true + }, + "version": { + "type": "string", + "x-nullable": true + }, + "best_solution": { + "type": "number", + "x-nullable": true + }, + "gap": { + "type": "number", + "x-nullable": true + }, + "time": { + "type": "number", + "x-nullable": true + }, + "matrix": { + "x-nullable": true, + "allOf": [ + { + "$ref": "#/definitions/Matrix" } - }, - { - "format": "int32", - "in": "path", - "name": "idx", - "required": true, - "type": "integer" - } - ], - "responses": {}, - "tags": [ - "Cases" - ] - } - }, - "/case/{idx}/copy/": { - "post": { - "description": "Copies a case to a new one", - "parameters": [ - { - "format": "int32", - "in": "path", - "name": "idx", - "required": true, - "type": "integer" - } - ], - "responses": { - "default": { - "description": "", - "schema": { - "$ref": "#/definitions/CaseListResponse" + ] + }, + "best_bound": { + "type": "number", + "x-nullable": true + }, + "matrix_post": { + "x-nullable": true, + "allOf": [ + { + "$ref": "#/definitions/Matrix" } - } + ] }, - "tags": [ - "Cases" - ] + "first_relaxed": { + "type": "number", + "x-nullable": true + }, + "rootTime": { + "type": "number", + "x-nullable": true + }, + "nodes": { + "type": "integer", + "x-nullable": true + }, + "status_code": { + "type": "integer", + "x-nullable": true + } } }, - "/case/{idx}/data/": { - "delete": { - "description": "Delete a case", - "parameters": [ - { - "format": "int32", - "in": "path", - "name": "idx", - "required": true, - "type": "integer" - } - ], - "responses": {}, - "tags": [ - "Cases" - ] - }, - "get": { - "description": "Get data of a case", - "parameters": [ - { - "format": "int32", - "in": "path", - "name": "idx", - "required": true, - "type": "integer" - } - ], - "responses": { - "default": { - "description": "", - "schema": { - "$ref": "#/definitions/CaseBase" - } - } + "ExecutionLogEndpointResponse": { + "type": "object", + "properties": { + "id": { + "type": "string" }, - "tags": [ - "Cases" - ] - }, - "patch": { - "description": "Patches the data of a given case", - "parameters": [ - { - "format": "int32", - "in": "path", - "name": "idx", - "required": true, - "type": "integer" + "reports": { + "type": "array", + "items": { + "$ref": "#/definitions/ReportSchemaBase" } - ], - "responses": {}, - "tags": [ - "Cases" - ] + }, + "config": { + "$ref": "#/definitions/ConfigSchemaResponse" + }, + "indicators": { + "readOnly": true + }, + "instance_id": { + "type": "string" + }, + "description": { + "type": "string" + }, + "log_text": { + "type": "string" + }, + "data_hash": { + "type": "string" + }, + "message": { + "type": "string" + }, + "created_at": { + "type": "string", + "format": "date-time" + }, + "user_id": { + "type": "integer" + }, + "log": { + "$ref": "#/definitions/Log" + }, + "schema": { + "type": "string" + }, + "name": { + "type": "string" + }, + "state": { + "type": "integer" + } }, - "put": { - "description": "Edit a case", - "parameters": [ - { - "in": "body", - "name": "body", - "required": false, - "schema": { - "$ref": "#/definitions/CaseEditRequest" - } - }, - { - "format": "int32", - "in": "path", - "name": "idx", - "required": true, - "type": "integer" - } - ], - "responses": {}, - "tags": [ - "Cases" - ] - } + "required": [ + "id" + ] }, - "/case/{idx}/instance/": { - "post": { - "description": "Copies the information stored in a case into a new instance", - "parameters": [ - { - "format": "int32", - "in": "path", - "name": "idx", - "required": true, - "type": "integer" - } - ], - "responses": { - "default": { - "description": "", - "schema": { - "$ref": "#/definitions/CaseToInstanceResponse" - } - } + "Config": { + "type": "object", + "properties": { + "maxMemory": { + "type": "integer" }, - "tags": [ - "Cases" - ] - } - }, - "/dag/": { - "post": { - "description": "Create an execution manually.", - "parameters": [ - { - "in": "body", - "name": "body", - "required": false, - "schema": { - "$ref": "#/definitions/ExecutionDagPostRequest" - } - } - ], - "responses": { - "default": { - "description": "", - "schema": { - "$ref": "#/definitions/ExecutionDetailsEndpointResponse" - } + "timeLimit": { + "type": "integer" + }, + "gapRel": { + "type": "number" + }, + "maxNodes": { + "type": "integer" + }, + "seconds": { + "type": "integer" + }, + "solver": { + "type": "string", + "default": "PULP_CBC_CMD" + }, + "warmStart": { + "type": "boolean" + }, + "gapAbs": { + "type": "number" + }, + "mip": { + "type": "boolean" + }, + "msg": { + "type": "boolean" + }, + "threads": { + "type": "integer" + }, + "report": {}, + "keepFiles": { + "type": "boolean" + }, + "options": { + "type": "array", + "items": { + "type": "string" } }, - "tags": [ - "DAGs" - ] + "logPath": { + "type": "string" + }, + "MIPGap": { + "type": "number" + } } }, - "/dag/deployed/": { - "get": { - "description": "Get list of deployed dags registered on the data base", - "parameters": [], - "responses": { - "default": { - "description": "", - "schema": { - "items": { - "$ref": "#/definitions/DeployedDAG" - }, - "type": "array" - } - } - }, - "tags": [ - "DeployedDAGs" - ] + "ReLaunchExecutionRequest": { + "type": "object", + "properties": { + "config": { + "$ref": "#/definitions/Config" + } }, - "post": { - "description": "Post a new deployed dag", - "parameters": [ - { - "in": "body", - "name": "body", - "required": false, - "schema": { - "$ref": "#/definitions/DeployedDAG" - } - } - ], - "responses": { - "default": { - "description": "", - "schema": { - "$ref": "#/definitions/DeployedDAG" - } - } - }, - "tags": [ - "DeployedDAGs" - ] - } + "required": [ + "config" + ] }, - "/dag/instance/{idx}/": { - "put": { - "description": "Endpoint to save instance checks performed on the DAG", - "parameters": [ - { - "in": "body", - "name": "body", - "required": false, - "schema": { - "$ref": "#/definitions/InstanceCheckRequest" - } - }, - { - "in": "path", - "name": "idx", - "required": true, - "type": "string" - } - ], - "responses": {}, - "tags": [ - "DAGs" - ] - } + "ExecutionRequest": { + "type": "object", + "properties": { + "config": {}, + "instance_id": { + "type": "string" + }, + "description": { + "type": "string" + }, + "data": {}, + "schema": { + "type": "string" + }, + "name": { + "type": "string" + } + }, + "required": [ + "config", + "instance_id", + "name" + ] }, - "/dag/{idx}/": { - "get": { - "description": "Get input data and configuration for an execution", - "parameters": [ - { - "in": "path", - "name": "idx", - "required": true, - "type": "string" + "ExecutionDetailsWithIndicatorsAndLogResponse": { + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "reports": { + "type": "array", + "items": { + "$ref": "#/definitions/ReportSchemaBase" } - ], - "responses": {}, - "tags": [ - "DAGs" - ] + }, + "config": { + "$ref": "#/definitions/ConfigSchemaResponse" + }, + "indicators": { + "readOnly": true + }, + "instance_id": { + "type": "string" + }, + "description": { + "type": "string" + }, + "data_hash": { + "type": "string" + }, + "message": { + "type": "string" + }, + "created_at": { + "type": "string", + "format": "date-time" + }, + "user_id": { + "type": "integer" + }, + "log": { + "$ref": "#/definitions/BasicLog" + }, + "schema": { + "type": "string" + }, + "name": { + "type": "string" + }, + "state": { + "type": "integer" + } }, - "put": { - "description": "Edit an execution", - "parameters": [ - { - "in": "body", - "name": "body", - "required": false, - "schema": { - "$ref": "#/definitions/ExecutionDagRequest" - } - }, - { - "in": "path", - "name": "idx", - "required": true, - "type": "string" - } - ], - "responses": {}, - "tags": [ - "DAGs" - ] + "required": [ + "id" + ] + }, + "ExecutionDagRequest": { + "type": "object", + "properties": { + "log_text": { + "type": "string" + }, + "data": {}, + "solution_schema": { + "type": "string", + "x-nullable": true + }, + "checks": {}, + "state": { + "type": "integer" + }, + "log_json": { + "$ref": "#/definitions/Log" + } } }, - "/execution/": { - "get": { - "description": "Get all executions", - "parameters": [ - { - "in": "query", - "name": "limit", - "required": false, - "type": "integer" - }, - { - "in": "query", - "name": "offset", - "required": false, - "type": "integer" - }, - { - "format": "date-time", - "in": "query", - "name": "creation_date_gte", - "required": false, - "type": "string" - }, - { - "format": "date-time", - "in": "query", - "name": "creation_date_lte", - "required": false, - "type": "string" - }, - { - "in": "query", - "name": "schema", - "required": false, - "type": "string" - } - ], - "responses": { - "default": { - "description": "", - "schema": { - "items": { - "$ref": "#/definitions/ExecutionDetailsEndpointWithIndicatorsResponse" - }, - "type": "array" - } - } + "ExecutionDagPostRequest": { + "type": "object", + "properties": { + "config": {}, + "instance_id": { + "type": "string" + }, + "description": { + "type": "string" + }, + "log_text": { + "type": "string" + }, + "data": {}, + "solution_schema": { + "type": "string", + "x-nullable": true + }, + "schema": { + "type": "string" }, - "tags": [ - "Executions" - ] - }, - "post": { - "description": "Create an execution", - "parameters": [ - { - "in": "body", - "name": "body", - "required": false, - "schema": { - "$ref": "#/definitions/ExecutionRequest" - } - } - ], - "responses": { - "default": { - "description": "", - "schema": { - "$ref": "#/definitions/ExecutionDetailsEndpointResponse" - } - } + "name": { + "type": "string" }, - "tags": [ - "Executions" - ] + "checks": {}, + "state": { + "type": "integer" + }, + "log_json": { + "$ref": "#/definitions/Log" + } + }, + "required": [ + "config", + "instance_id", + "name" + ] + }, + "InstanceCheckRequest": { + "type": "object", + "properties": { + "checks": {} } }, - "/execution/{idx}/": { - "delete": { - "description": "Delete an execution", - "parameters": [ - { - "in": "path", - "name": "idx", - "required": true, - "type": "string" - } - ], - "responses": {}, - "tags": [ - "Executions" - ] - }, - "get": { - "description": "Get details of an execution", - "parameters": [ - { - "in": "path", - "name": "idx", - "required": true, - "type": "string" - } - ], - "responses": { - "default": { - "description": "", - "schema": { - "$ref": "#/definitions/ExecutionDetailsEndpointWithIndicatorsResponse" - } - } + "CaseCheckRequest": { + "type": "object", + "properties": { + "solution_checks": {}, + "checks": {} + } + }, + "DeployedDAG": { + "type": "object", + "properties": { + "id": { + "type": "string" }, - "tags": [ - "Executions" - ] + "description": { + "type": "string", + "x-nullable": true + }, + "config_schema": {}, + "instance_checks_schema": {}, + "instance_schema": {}, + "solution_schema": {}, + "solution_checks_schema": {} }, - "post": { - "description": "Stop an execution", - "parameters": [ - { - "in": "path", - "name": "idx", - "required": true, - "type": "string" - } - ], - "responses": {}, - "tags": [ - "Executions" - ] + "required": [ + "config_schema", + "id", + "instance_checks_schema", + "instance_schema", + "solution_checks_schema", + "solution_schema" + ] + }, + "DeployedDAGEdit": { + "type": "object", + "properties": { + "description": { + "type": "string", + "x-nullable": true + }, + "config_schema": {}, + "instance_checks_schema": {}, + "instance_schema": {}, + "solution_schema": {}, + "solution_checks_schema": {} }, - "put": { - "description": "Edit an execution", - "parameters": [ - { - "in": "body", - "name": "body", - "required": false, - "schema": { - "$ref": "#/definitions/ExecutionEditRequest" - } - }, - { - "in": "path", - "name": "idx", - "required": true, - "type": "string" - } - ], - "responses": {}, - "tags": [ - "Executions" - ] - } + "required": [ + "instance_checks_schema", + "solution_checks_schema" + ] }, - "/execution/{idx}/data/": { - "get": { - "description": "Get solution data of an execution", - "parameters": [ - { - "in": "path", - "name": "idx", - "required": true, - "type": "string" - } - ], - "responses": { - "default": { - "description": "", - "schema": { - "$ref": "#/definitions/ExecutionDataEndpointResponse" - } - } + "UserEndpointResponse": { + "type": "object", + "properties": { + "id": { + "type": "integer" }, - "tags": [ - "Executions" - ] - } - }, - "/execution/{idx}/log/": { - "get": { - "description": "Get log of an execution", - "parameters": [ - { - "in": "path", - "name": "idx", - "required": true, - "type": "string" - } - ], - "responses": { - "default": { - "description": "", - "schema": { - "$ref": "#/definitions/ExecutionLogEndpointResponse" - } - } + "username": { + "type": "string" }, - "tags": [ - "Executions" - ] + "last_name": { + "type": "string" + }, + "pwd_last_change": { + "type": "string" + }, + "created_at": { + "type": "string" + }, + "email": { + "type": "string" + }, + "first_name": { + "type": "string" + } } }, - "/execution/{idx}/status/": { - "get": { - "description": "Get status of an execution", - "parameters": [ - { - "in": "path", - "name": "idx", - "required": true, - "type": "string" - } - ], - "responses": { - "default": { - "description": "", - "schema": { - "$ref": "#/definitions/ExecutionStatusEndpointResponse" - } - } + "UserEditRequest": { + "type": "object", + "properties": { + "username": { + "type": "string" }, - "tags": [ - "Executions" - ] + "last_name": { + "type": "string" + }, + "pwd_last_change": { + "type": "string", + "format": "date-time" + }, + "password": { + "type": "string" + }, + "email": { + "type": "string" + }, + "first_name": { + "type": "string" + } } }, - "/health/": { - "get": { - "description": "Health check", - "parameters": [], - "responses": { - "default": { - "description": "", - "schema": { - "$ref": "#/definitions/HealthResponse" - } - } + "UserDetailsEndpointResponse": { + "type": "object", + "properties": { + "id": { + "type": "integer" + }, + "username": { + "type": "string" + }, + "last_name": { + "type": "string" }, - "tags": [ - "Health" - ] + "pwd_last_change": { + "type": "string" + }, + "email": { + "type": "string" + }, + "first_name": { + "type": "string" + } } }, - "/instance/": { - "get": { - "description": "Get all instances", - "parameters": [ - { - "in": "query", - "name": "limit", - "required": false, - "type": "integer" - }, - { - "in": "query", - "name": "offset", - "required": false, - "type": "integer" - }, - { - "format": "date-time", - "in": "query", - "name": "creation_date_gte", - "required": false, - "type": "string" - }, - { - "format": "date-time", - "in": "query", - "name": "creation_date_lte", - "required": false, - "type": "string" - }, - { - "in": "query", - "name": "schema", - "required": false, - "type": "string" - } - ], - "responses": { - "default": { - "description": "", - "schema": { - "items": { - "$ref": "#/definitions/InstanceEndpointResponse" - }, - "type": "array" - } - } + "TokenEndpointResponse": { + "type": "object", + "properties": { + "valid": { + "type": "integer" + } + } + }, + "SchemaListApp": { + "type": "object", + "properties": { + "name": { + "type": "string" + } + }, + "required": [ + "name" + ] + }, + "SchemaOneApp": { + "type": "object", + "properties": { + "config": {}, + "solution_checks": {}, + "solution": {}, + "instance_checks": {}, + "name": { + "type": "string" }, - "tags": [ - "Instances" - ] + "instance": {} }, - "post": { - "description": "Create an instance", - "parameters": [], - "responses": {}, - "tags": [ - "Instances" - ] - } + "required": [ + "config", + "instance", + "instance_checks", + "name", + "solution", + "solution_checks" + ] }, - "/instance/{idx}/": { - "delete": { - "description": "Delete an instance", - "parameters": [ - { - "in": "path", - "name": "idx", - "required": true, - "type": "string" - } - ], - "responses": {}, - "tags": [ - "Instances" - ] + "ExampleListData": { + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "description": { + "type": "string" + } }, - "get": { - "description": "Get one instance", - "parameters": [ - { - "in": "path", - "name": "idx", - "required": true, - "type": "string" - } - ], - "responses": { - "default": { - "description": "", - "schema": { - "$ref": "#/definitions/InstanceDetailsEndpointResponse" - } - } + "required": [ + "name" + ] + }, + "ExampleDetailData": { + "type": "object", + "properties": { + "name": { + "type": "string" }, - "tags": [ - "Instances" - ] + "instance": {}, + "solution": {}, + "description": { + "type": "string" + } }, - "put": { - "description": "Edit an instance", - "parameters": [ - { - "in": "body", - "name": "body", - "required": false, - "schema": { - "$ref": "#/definitions/InstanceEditRequest" - } - }, - { - "in": "path", - "name": "idx", - "required": true, - "type": "string" - } - ], - "responses": {}, - "tags": [ - "Instances" - ] - } + "required": [ + "instance", + "name" + ] }, - "/instance/{idx}/data/": { - "get": { - "description": "Get input data of an instance", - "parameters": [ - { - "in": "path", - "name": "idx", - "required": true, - "type": "string" - } - ], - "responses": { - "default": { - "description": "", - "schema": { - "$ref": "#/definitions/InstanceDataEndpointResponse" - } - } + "HealthResponse": { + "type": "object", + "properties": { + "cornflow_status": { + "type": "string" }, - "tags": [ - "Instances" - ] + "airflow_status": { + "type": "string" + } } }, - "/instancefile/": { - "post": { - "description": "Create an instance from an mps file", - "parameters": [ - { - "in": "formData", - "name": "name", - "required": true, - "type": "string" - }, - { - "in": "formData", - "name": "description", - "required": false, - "type": "string" - }, - { - "in": "formData", - "name": "minimize", - "required": false, - "type": "boolean" - } - ], - "responses": { - "default": { - "description": "", - "schema": { - "$ref": "#/definitions/InstanceDetailsEndpointResponse" - } + "CaseFromInstanceExecution": { + "type": "object", + "properties": { + "instance_id": { + "type": "string" + }, + "description": { + "type": "string" + }, + "schema": { + "type": "string" + }, + "name": { + "type": "string" + }, + "execution_id": { + "type": "string" + }, + "parent_id": { + "type": "integer", + "x-nullable": true + } + }, + "required": [ + "name", + "schema" + ] + }, + "CaseListResponse": { + "type": "object", + "properties": { + "id": { + "type": "integer" + }, + "solution_hash": { + "type": "string" + }, + "description": { + "type": "string" + }, + "path": { + "type": "string" + }, + "dependents": { + "type": "array", + "items": { + "type": "integer" } }, - "tags": [ - "Instances" - ] + "updated_at": { + "type": "string", + "format": "date-time" + }, + "data_hash": { + "type": "string" + }, + "created_at": { + "type": "string", + "format": "date-time" + }, + "user_id": { + "type": "integer" + }, + "schema": { + "type": "string" + }, + "name": { + "type": "string" + }, + "is_dir": {} } }, - "/login/": { - "post": { - "description": "Log in", - "parameters": [ - { - "in": "body", - "name": "body", - "required": false, - "schema": { - "$ref": "#/definitions/LoginEndpointRequest" - } + "CaseListAllWithIndicators": { + "type": "object", + "properties": { + "id": { + "type": "integer" + }, + "solution_hash": { + "type": "string" + }, + "indicators": { + "readOnly": true + }, + "description": { + "type": "string" + }, + "path": { + "type": "string" + }, + "dependents": { + "type": "array", + "items": { + "type": "integer" } - ], - "responses": {}, - "tags": [ - "Users" - ] + }, + "updated_at": { + "type": "string", + "format": "date-time" + }, + "data_hash": { + "type": "string" + }, + "created_at": { + "type": "string", + "format": "date-time" + }, + "user_id": { + "type": "integer" + }, + "schema": { + "type": "string" + }, + "name": { + "type": "string" + }, + "is_dir": {} } }, - "/permission/": { - "get": { - "description": "Get all the permissions assigned to the roles", - "parameters": [], - "responses": { - "default": { - "description": "", - "schema": { - "items": { - "$ref": "#/definitions/PermissionViewRoleResponse" - }, - "type": "array" - } - } + "CaseEditRequest": { + "type": "object", + "properties": { + "name": { + "type": "string" }, - "tags": [ - "PermissionViewRole" - ] - }, - "post": { - "description": "Create a new permission", - "parameters": [ - { - "in": "body", - "name": "body", - "required": false, - "schema": { - "$ref": "#/definitions/PermissionViewRoleRequest" - } - } - ], - "responses": { - "default": { - "description": "", - "schema": { - "$ref": "#/definitions/PermissionViewRoleResponse" - } - } + "description": { + "type": "string" }, - "tags": [ - "PermissionViewRole" - ] + "parent_id": { + "type": "integer", + "x-nullable": true + } } }, - "/permission/{idx}/": { - "delete": { - "description": "Delete a permission", - "parameters": [ - { - "format": "int32", - "in": "path", - "name": "idx", - "required": true, - "type": "integer" - } - ], - "responses": {}, - "tags": [ - "PermissionViewRole" - ] - }, - "get": { - "description": "Get one permission", - "parameters": [ - { - "format": "int32", - "in": "path", - "name": "idx", - "required": true, - "type": "integer" - } - ], - "responses": { - "default": { - "description": "", - "schema": { - "$ref": "#/definitions/PermissionViewRoleResponse" - } - } + "CaseBase": { + "type": "object", + "properties": { + "id": { + "type": "integer" }, - "tags": [ - "PermissionViewRole" - ] - }, - "put": { - "description": "Edit a permission", - "parameters": [ - { - "in": "body", - "name": "body", - "required": false, - "schema": { - "$ref": "#/definitions/PermissionViewRoleEditRequest" - } - }, - { - "format": "int32", - "in": "path", - "name": "idx", - "required": true, + "solution_hash": { + "type": "string" + }, + "indicators": { + "readOnly": true + }, + "description": { + "type": "string" + }, + "path": { + "type": "string" + }, + "dependents": { + "type": "array", + "items": { "type": "integer" } - ], - "responses": {}, - "tags": [ - "PermissionViewRole" - ] - } - }, - "/roles/": { - "get": { - "description": "Gets all the roles", - "parameters": [], - "responses": { - "default": { - "description": "", - "schema": { - "items": { - "$ref": "#/definitions/RolesResponse" - }, - "type": "array" - } - } }, - "tags": [ - "Roles" - ] - }, - "post": { - "description": "Creates a new role", - "parameters": [ - { - "in": "body", - "name": "body", - "required": false, - "schema": { - "$ref": "#/definitions/RolesRequest" - } - } - ], - "responses": { - "default": { - "description": "", - "schema": { - "$ref": "#/definitions/RolesResponse" - } - } + "solution_checks": {}, + "updated_at": { + "type": "string", + "format": "date-time" + }, + "solution": {}, + "data_hash": { + "type": "string" + }, + "data": {}, + "created_at": { + "type": "string", + "format": "date-time" + }, + "user_id": { + "type": "integer" + }, + "schema": { + "type": "string" }, - "tags": [ - "Roles" - ] + "name": { + "type": "string" + }, + "checks": {}, + "is_dir": {} } }, - "/roles/{idx}/": { - "delete": { - "description": "Deletes one role", - "parameters": [ - { - "format": "int32", - "in": "path", - "name": "idx", - "required": true, - "type": "integer" - } - ], - "responses": {}, - "tags": [ - "Roles" - ] + "CaseToInstanceResponse": { + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "schema": { + "type": "string" + } }, - "get": { - "description": "Gets one role", - "parameters": [ - { - "format": "int32", - "in": "path", - "name": "idx", - "required": true, - "type": "integer" - } - ], - "responses": { - "default": { - "description": "", - "schema": { - "$ref": "#/definitions/RolesResponse" - } - } + "required": [ + "id", + "schema" + ] + }, + "BasePatchOperation": { + "type": "object", + "properties": { + "value": {}, + "op": { + "type": "string" }, - "tags": [ - "Roles" - ] + "path": { + "type": "string" + } }, - "put": { - "description": "Modifies one role", - "parameters": [ - { - "in": "body", - "name": "body", - "required": false, - "schema": { - "$ref": "#/definitions/RolesResponse" - } - }, - { - "format": "int32", - "in": "path", - "name": "idx", - "required": true, - "type": "integer" - } - ], - "responses": {}, - "tags": [ - "Roles" - ] - } + "required": [ + "op", + "path" + ] }, - "/schema/": { - "get": { - "description": "Get list of available apps", - "parameters": [], - "responses": { - "default": { - "description": "", - "schema": { - "items": { - "$ref": "#/definitions/SchemaListApp" - }, - "type": "array" - } + "CaseCompareResponse": { + "type": "object", + "properties": { + "solution_patch": { + "type": "array", + "items": { + "$ref": "#/definitions/BasePatchOperation" } }, - "tags": [ - "Schemas" - ] + "data_patch": { + "type": "array", + "items": { + "$ref": "#/definitions/BasePatchOperation" + } + } } }, - "/schema/{dag_name}/": { - "get": { - "description": "Get instance, solution and config schema", - "parameters": [ - { - "in": "path", - "name": "dag_name", - "required": true, - "type": "string" - } - ], - "responses": { - "default": { - "description": "", - "schema": { - "$ref": "#/definitions/SchemaOneApp" - } - } + "ActionsResponse": { + "type": "object", + "properties": { + "id": { + "type": "integer" }, - "tags": [ - "Schemas" - ] + "name": { + "readOnly": true + } } }, - "/signup/": { - "post": { - "description": "Sign up", - "parameters": [ - { - "in": "body", - "name": "body", - "required": false, - "schema": { - "$ref": "#/definitions/UserSignupRequest" - } - } - ], - "responses": {}, - "tags": [ - "Users" - ] + "PermissionViewRoleRequest": { + "type": "object", + "properties": { + "action_id": { + "type": "integer" + }, + "api_view_id": { + "type": "integer" + }, + "role_id": { + "type": "integer" + } } }, - "/token/": { - "get": { - "description": "Check token", - "parameters": [], - "responses": { - "default": { - "description": "", - "schema": { - "$ref": "#/definitions/TokenEndpointResponse" - } - } + "PermissionViewRoleResponse": { + "type": "object", + "properties": { + "id": { + "type": "integer" }, - "tags": [ - "Users" - ] + "api_view_id": { + "type": "integer" + }, + "action_id": { + "type": "integer" + }, + "api_view": { + "readOnly": true + }, + "action": { + "readOnly": true + }, + "role": { + "readOnly": true + }, + "role_id": { + "type": "integer" + } } }, - "/user/": { - "get": { - "description": "Get all users", - "parameters": [], - "responses": { - "default": { - "description": "", - "schema": { - "items": { - "$ref": "#/definitions/UserEndpointResponse" - }, - "type": "array" - } - } + "PermissionViewRoleEditRequest": { + "type": "object", + "properties": { + "api_view_id": { + "type": "integer" }, - "tags": [ - "Users" - ] + "action_id": { + "type": "integer" + }, + "api_view": { + "readOnly": true + }, + "action": { + "readOnly": true + }, + "role": { + "readOnly": true + }, + "role_id": { + "type": "integer" + } } }, - "/user/recover-password/": { - "put": { - "description": "Send email to create new password", - "parameters": [ - { - "in": "body", - "name": "body", - "required": false, - "schema": { - "$ref": "#/definitions/RecoverPasswordRequest" - } - } - ], - "responses": {}, - "tags": [ - "Users" - ] + "RolesRequest": { + "type": "object", + "properties": { + "name": { + "type": "string" + } + } + }, + "RolesResponse": { + "type": "object", + "properties": { + "id": { + "type": "integer" + }, + "name": { + "type": "string" + } } }, - "/user/role/": { - "get": { - "description": "Gets all the user role assignments", - "parameters": [], - "responses": { - "default": { - "description": "", - "schema": { - "items": { - "$ref": "#/definitions/UserRoleResponse" - }, - "type": "array" - } - } + "ViewResponse": { + "type": "object", + "properties": { + "id": { + "type": "integer" }, - "tags": [ - "User roles" - ] - }, - "post": { - "description": "Creates a new role assignment", - "parameters": [ - { - "in": "body", - "name": "body", - "required": false, - "schema": { - "$ref": "#/definitions/UserRoleRequest" - } - } - ], - "responses": { - "default": { - "description": "", - "schema": { - "$ref": "#/definitions/UserRoleResponse" - } - } + "name": { + "readOnly": true }, - "tags": [ - "User roles" - ] + "description": { + "type": "string" + }, + "url_rule": { + "type": "string" + } } }, - "/user/role/{user_id}/{role_id}/": { - "delete": { - "description": "Deletes one user role assignment", - "parameters": [ - { - "format": "int32", - "in": "path", - "name": "role_id", - "required": true, - "type": "integer" - }, - { - "format": "int32", - "in": "path", - "name": "user_id", - "required": true, - "type": "integer" - } - ], - "responses": {}, - "tags": [ - "User roles" - ] - }, - "get": { - "description": "Gets one user role assignment", - "parameters": [ - { - "format": "int32", - "in": "path", - "name": "role_id", - "required": true, - "type": "integer" - }, - { - "format": "int32", - "in": "path", - "name": "user_id", - "required": true, - "type": "integer" - } - ], - "responses": { - "default": { - "description": "", - "schema": { - "$ref": "#/definitions/UserRoleResponse" - } - } + "UserRoleRequest": { + "type": "object", + "properties": { + "user_id": { + "type": "integer" }, - "tags": [ - "User roles" - ] + "role_id": { + "type": "integer" + } } }, - "/user/{user_id}/": { - "delete": { - "description": "Delete a user", - "parameters": [ - { - "format": "int32", - "in": "path", - "name": "user_id", - "required": true, - "type": "integer" - } - ], - "responses": {}, - "tags": [ - "Users" - ] - }, - "get": { - "description": "Get a user", - "parameters": [ - { - "format": "int32", - "in": "path", - "name": "user_id", - "required": true, - "type": "integer" - } - ], - "responses": { - "default": { - "description": "", - "schema": { - "$ref": "#/definitions/UserDetailsEndpointResponse" - } - } + "UserRoleResponse": { + "type": "object", + "properties": { + "id": { + "type": "integer" }, - "tags": [ - "Users" - ] - }, - "put": { - "description": "Edit a user", - "parameters": [ - { - "in": "body", - "name": "body", - "required": false, - "schema": { - "$ref": "#/definitions/UserEditRequest" - } - }, - { - "format": "int32", - "in": "path", - "name": "user_id", - "required": true, - "type": "integer" - } - ], - "responses": { - "default": { - "description": "", - "schema": { - "$ref": "#/definitions/UserDetailsEndpointResponse" - } - } + "user_id": { + "type": "integer" }, - "tags": [ - "Users" - ] + "user": { + "readOnly": true + }, + "role": { + "readOnly": true + }, + "role_id": { + "type": "integer" + } } }, - "/user/{user_id}/{make_admin}/": { - "put": { - "description": "Toggle user into admin", - "parameters": [ - { - "format": "int32", - "in": "path", - "name": "make_admin", - "required": true, - "type": "integer" - }, - { - "format": "int32", - "in": "path", - "name": "user_id", - "required": true, - "type": "integer" - } - ], - "responses": { - "default": { - "description": "", - "schema": { - "$ref": "#/definitions/UserEndpointResponse" - } - } - }, - "tags": [ - "Users" - ] - } + "RecoverPasswordRequest": { + "type": "object", + "properties": { + "email": { + "type": "string" + } + }, + "required": [ + "email" + ] + }, + "Report": { + "type": "object", + "properties": { + "id": { + "type": "integer", + "readOnly": true + }, + "description": { + "type": "string" + }, + "updated_at": { + "type": "string", + "format": "date-time", + "readOnly": true + }, + "file_url": { + "type": "string" + }, + "deleted_at": { + "type": "string", + "format": "date-time", + "readOnly": true + }, + "user_id": { + "type": "integer" + }, + "created_at": { + "type": "string", + "format": "date-time", + "readOnly": true + }, + "name": { + "type": "string" + }, + "state_message": { + "type": "string" + }, + "state": { + "type": "integer" + }, + "execution_id": { + "type": "string" + } + }, + "required": [ + "execution_id", + "name" + ] } - }, - "swagger": "2.0.0" -} + } +} \ No newline at end of file From 37cf3a28cc45a9980f674277f41a33052fffc483 Mon Sep 17 00:00:00 2001 From: pchtsp Date: Wed, 31 Jul 2024 11:17:53 +0200 Subject: [PATCH 62/84] graph coloring report --- cornflow-dags/DAG/graph_coloring/__init__.py | 16 +- .../DAG/graph_coloring/core/experiment.py | 9 +- .../DAG/graph_coloring/core/instance.py | 31 +++- .../DAG/graph_coloring/report/.gitignore | 1 + .../DAG/graph_coloring/report/report.qmd | 143 ++++++++++++++++++ .../DAG/graph_coloring/solvers/cp_model.py | 8 +- cornflow-dags/requirements-dev.txt | 1 - cornflow-dags/requirements.txt | 24 ++- cornflow-dags/tests/test_dags.py | 11 ++ 9 files changed, 214 insertions(+), 30 deletions(-) create mode 100644 cornflow-dags/DAG/graph_coloring/report/.gitignore create mode 100644 cornflow-dags/DAG/graph_coloring/report/report.qmd diff --git a/cornflow-dags/DAG/graph_coloring/__init__.py b/cornflow-dags/DAG/graph_coloring/__init__.py index c0de3aa39..46e7cc76d 100644 --- a/cornflow-dags/DAG/graph_coloring/__init__.py +++ b/cornflow-dags/DAG/graph_coloring/__init__.py @@ -21,28 +21,18 @@ class GraphColoring(ApplicationCore): @property def test_cases(self) -> List[Dict]: - def read_file(filePath): - with open(filePath, "r") as f: - contents = f.read().splitlines() - - pairs = ( - pt.TupList(contents[1:]) - .vapply(lambda v: v.split(" ")) - .vapply(lambda v: dict(n1=int(v[0]), n2=int(v[1]))) - ) - return dict(pairs=pairs) file_dir = os.path.join(os.path.dirname(__file__), "data") - + get_file = lambda name: os.path.join(file_dir, name) return [ { "name": "gc_4_1", - "instance": read_file(os.path.join(file_dir, "gc_4_1")), + "instance": Instance.from_txt_file(get_file("gc_4_1")).to_dict(), "description": "Example data with 4 pairs", }, { "name": "gc_50_1", - "instance": read_file(os.path.join(file_dir, "gc_50_1")), + "instance": Instance.from_txt_file(get_file("gc_50_1")).to_dict(), "description": "Example data with 50 pairs", }, ] diff --git a/cornflow-dags/DAG/graph_coloring/core/experiment.py b/cornflow-dags/DAG/graph_coloring/core/experiment.py index 9a661fe83..fee26cc00 100644 --- a/cornflow-dags/DAG/graph_coloring/core/experiment.py +++ b/cornflow-dags/DAG/graph_coloring/core/experiment.py @@ -1,5 +1,6 @@ from cornflow_client import ExperimentCore from cornflow_client.core.tools import load_json +from pytups import TupList from .instance import Instance from .solution import Solution import os @@ -29,7 +30,11 @@ def check_solution(self, *args, **kwargs) -> dict: # if a pair of nodes have the same colors: that's a problem colors = self.solution.get_assignments() pairs = self.instance.get_pairs() + nodes = self.instance.get_nodes() + missing_colors = TupList(set(nodes) - colors.keys()) errors = [ - {"n1": n1, "n2": n2} for (n1, n2) in pairs if colors[n1] == colors[n2] + {"n1": n1, "n2": n2} + for (n1, n2) in pairs + if n1 in colors and n2 in colors and colors[n1] == colors[n2] ] - return dict(pairs=errors) + return dict(pairs=errors, missing=missing_colors) diff --git a/cornflow-dags/DAG/graph_coloring/core/instance.py b/cornflow-dags/DAG/graph_coloring/core/instance.py index 6e40a680c..6dba31120 100644 --- a/cornflow-dags/DAG/graph_coloring/core/instance.py +++ b/cornflow-dags/DAG/graph_coloring/core/instance.py @@ -2,6 +2,7 @@ from cornflow_client import InstanceCore, get_empty_schema from cornflow_client.core.tools import load_json import pytups as pt +import networkx as nx class Instance(InstanceCore): @@ -9,4 +10,32 @@ class Instance(InstanceCore): schema_checks = get_empty_schema() def get_pairs(self): - return pt.TupList((el["n1"], el["n2"]) for el in self.data["pairs"]) + return pt.TupList(self.data["pairs"]).take(["n1", "n2"]) + + def get_nodes(self): + pairs = self.data["pairs"] + n1s = pt.TupList(pairs).vapply(lambda v: v["n1"]) + n2s = pt.TupList(pairs).vapply(lambda v: v["n2"]) + return (n1s + n2s).unique2() + + @classmethod + def from_txt_file(cls, filePath): + with open(filePath, "r") as f: + contents = f.read().splitlines() + + pairs = ( + pt.TupList(contents[1:]) + .vapply(lambda v: v.split(" ")) + .vapply(lambda v: dict(n1=int(v[0]), n2=int(v[1]))) + ) + return Instance.from_dict(dict(pairs=pairs)) + + def get_graph(self): + nodes = self.get_nodes() + arcs = self.get_pairs() + G = nx.Graph() + for node in nodes: + G.add_node(node) + for n1, n2 in arcs: + G.add_edge(n1, n2) + return G diff --git a/cornflow-dags/DAG/graph_coloring/report/.gitignore b/cornflow-dags/DAG/graph_coloring/report/.gitignore new file mode 100644 index 000000000..075b2542a --- /dev/null +++ b/cornflow-dags/DAG/graph_coloring/report/.gitignore @@ -0,0 +1 @@ +/.quarto/ diff --git a/cornflow-dags/DAG/graph_coloring/report/report.qmd b/cornflow-dags/DAG/graph_coloring/report/report.qmd new file mode 100644 index 000000000..0014ad559 --- /dev/null +++ b/cornflow-dags/DAG/graph_coloring/report/report.qmd @@ -0,0 +1,143 @@ +--- +title: "Graph Coloring report" +execute: + echo: false + warning: false +format: + html: + embed-resources: true +editor_options: + chunk_output_type: console +--- + +```{python} +#| tags: [parameters] + +file_name = "../data/gc_50_1" +``` + +## Graph Coloring + +From [wikipedia](https://en.wikipedia.org/wiki/Graph_coloring): + +> In graph theory, graph coloring is a special case of graph labeling; it is an assignment of labels traditionally called "colors" to elements of a graph subject to certain constraints. In its simplest form, it is a way of coloring the vertices of a graph such that no two adjacent vertices are of the same color; this is called a vertex coloring. + +```{python} +#| echo: false +import os +import sys +module_path = os.path.abspath(os.path.join('../..')) +if module_path not in sys.path: + sys.path.append(module_path) + +from graph_coloring import GraphColoring +import networkx as nx +import numpy as np +import pytups as pt +import matplotlib.pyplot as plt +from matplotlib import colormaps + +extension = os.path.splitext(file_name)[1] +if extension=='': + # it's an instance, so we should solve it, I guess + my_instance = GraphColoring.instance.from_txt_file(file_name) + my_experiment = GraphColoring.solvers['default'](instance=my_instance, solution=None) + status = my_experiment.solve({'timeLimit': 5}) +elif extension=='.json': + my_experiment = GraphColoring.solvers['default'].from_json(file_name) + my_instance = my_experiment.instance +else: + raise ValueError("Unknown extension: {}".format(extension)) + + +``` + + +## Instance statistics + +The problem has `{python} len(my_instance.get_nodes())` nodes and `{python} len(my_instance.get_pairs())` arcs. + + +## The network + +See @fig-network for a representation of the network of nodes to color. + + +```{python} +#| echo: false +#| label: fig-network +#| fig-cap: "Network with forbidden pairs." +G = my_instance.get_graph() +pos = nx.kamada_kawai_layout(G) +__nodes = nx.draw_networkx_nodes(G, pos=pos) +__edges = nx.draw_networkx_edges(G, pos=pos) + +__labels = nx.draw_networkx_labels(G, pos=pos, font_color='white') + +ax = plt.gca() +ax.margins(0.05) +plt.axis("off") +plt.tight_layout() +plt.show() + +``` + +## Solution statistics + +```{python} +objective = my_experiment.get_objective() +checks = my_experiment.check_solution() +feasible = len(checks['missing']) == 0 and len(checks['pairs']) == 0 + +``` + +```{python} +#| output: asis + +# The following code shows (1) a box with feasibility + objective function OR (2) root cause of infeasibility. + +if feasible: + print("::: {{.callout-tip}}\n\n## Solution is feasible\n\nThe least number of colors is {}.\n\n:::".format(objective)) +else: + my_text = "" + if checks['missing']: + my_text += 'The solution is missing the following nodes: {}\n\n'.format(checks['missing']) + if checks['pairs']: + my_text += 'The solution violates the following arcs: {}\n\n'.format(checks['pairs']) + + print("::: {{.callout-important}}\n\n## Solution is infeasible\n\n{}\n\n:::".format(my_text)) + +``` + +## Solution + + +See @fig-colors for a representation of the solution. Colors are kept from the network representation of the instance. + +```{python} +#| label: fig-colors +#| fig-cap: "Color assignment to nodes" + +def get_colors_from_cmap(cmap_name, num_colors): + cmap = plt.cm.get_cmap(cmap_name) + colors = [cmap(i / num_colors) for i in range(num_colors)] + return colors + +# Example usage +color_id = my_experiment.solution.get_assignments() +num_colors = len(color_id.values_tl().unique2()) +cmap_name = 'viridis' # Name of the colormap +colors = get_colors_from_cmap(cmap_name, num_colors) + +my_colors = pt.TupList(G.nodes).vapply(lambda v: colors[color_id[v]]) + +__nodes = nx.draw_networkx_nodes(G, pos=pos, node_color=my_colors) +__edges = nx.draw_networkx_edges(G, pos=pos) + +__labels = nx.draw_networkx_labels(G, pos=pos, font_color='white') + +plt.axis("off") +plt.tight_layout() +plt.show() + +``` diff --git a/cornflow-dags/DAG/graph_coloring/solvers/cp_model.py b/cornflow-dags/DAG/graph_coloring/solvers/cp_model.py index 764223fc6..7731e5120 100644 --- a/cornflow-dags/DAG/graph_coloring/solvers/cp_model.py +++ b/cornflow-dags/DAG/graph_coloring/solvers/cp_model.py @@ -12,10 +12,8 @@ class OrToolsCP(Experiment): def solve(self, options: dict): model = cp_model.CpModel() input_data = pt.SuperDict.from_dict(self.instance.data) + nodes = self.instance.get_nodes() pairs = input_data["pairs"] - n1s = pt.TupList(pairs).vapply(lambda v: v["n1"]) - n2s = pt.TupList(pairs).vapply(lambda v: v["n2"]) - nodes = (n1s + n2s).unique2() max_colors = len(nodes) - 1 # variable declaration: @@ -38,7 +36,7 @@ def solve(self, options: dict): if termination_condition not in [cp_model.OPTIMAL, cp_model.FEASIBLE]: return dict( status=ORTOOLS_STATUS_MAPPING.get(termination_condition), - status_sol=SOLUTION_STATUS_INFEASIBLE + status_sol=SOLUTION_STATUS_INFEASIBLE, ) color_sol = color.vapply(solver.Value) @@ -47,5 +45,5 @@ def solve(self, options: dict): return dict( status=ORTOOLS_STATUS_MAPPING.get(termination_condition), - status_sol=SOLUTION_STATUS_FEASIBLE + status_sol=SOLUTION_STATUS_FEASIBLE, ) diff --git a/cornflow-dags/requirements-dev.txt b/cornflow-dags/requirements-dev.txt index 54d6b0ac7..378dc4b43 100644 --- a/cornflow-dags/requirements-dev.txt +++ b/cornflow-dags/requirements-dev.txt @@ -1,4 +1,3 @@ -r requirements.txt coverage numpyencoder -matplotlib diff --git a/cornflow-dags/requirements.txt b/cornflow-dags/requirements.txt index bb358b1a9..02f9bad86 100644 --- a/cornflow-dags/requirements.txt +++ b/cornflow-dags/requirements.txt @@ -1,18 +1,26 @@ +# cornflow cornflow-client>=1.1.0 -orloge<=0.17.2 -PuLP<=2.7 + +# data pandas<=2.1.1 -hackathonbaobab2020[solvers] -pytups<=0.86.2 -ortools<=9.7.2996 +pytups xmltodict<=0.13.0 openpyxl<=3.1.2 -pyomo<=6.6.2 -tsplib95<=0.7.1 -networkx scipy numpy<2.0.0 +# optimization: +orloge +PuLP +ortools +pyomo +tsplib95<=0.7.1 +hackathonbaobab2020[solvers] +networkx +--index-url https://pip.hexaly.com +hexaly + +# quarto and reports: matplotlib seaborn jupyter diff --git a/cornflow-dags/tests/test_dags.py b/cornflow-dags/tests/test_dags.py index 9239b96f8..883a1c317 100644 --- a/cornflow-dags/tests/test_dags.py +++ b/cornflow-dags/tests/test_dags.py @@ -158,6 +158,17 @@ def setUp(self): self.app = GraphColoring() self.config = dict(msg=False) + def test_incomplete_solution(self): + tests = self.app.test_cases + solution_data = dict(assignment=[dict(node=1, color=1), dict(node=3, color=1)]) + my_experim = self.app.solvers["default"]( + self.app.instance.from_dict(tests[0]["instance"]), + self.app.solution.from_dict(solution_data), + ) + checks = my_experim.check_solution() + self.assertEqual(len(checks["missing"]), 2) + self.assertEqual(len(checks["pairs"]), 1) + class Tsp(BaseDAGTests.SolvingTests): def setUp(self): From a89c67f2bb8329d5e9f2dccd6d9713fa967f8a08 Mon Sep 17 00:00:00 2001 From: pchtsp Date: Wed, 31 Jul 2024 14:38:29 +0200 Subject: [PATCH 63/84] fixed all tests --- cornflow-dags/DAG/graph_coloring/__init__.py | 4 + .../DAG/graph_coloring/core/experiment.py | 17 +++ cornflow-dags/DAG/tsp/__init__.py | 4 + cornflow-dags/DAG/tsp/core/experiment.py | 35 ++---- cornflow-dags/DAG/tsp/schemas/config.json | 6 + cornflow-dags/tests/test_dags.py | 113 +++++++++++------- .../client/cornflow_client/core/experiment.py | 28 ++++- 7 files changed, 135 insertions(+), 72 deletions(-) diff --git a/cornflow-dags/DAG/graph_coloring/__init__.py b/cornflow-dags/DAG/graph_coloring/__init__.py index 46e7cc76d..0c3fd3c9d 100644 --- a/cornflow-dags/DAG/graph_coloring/__init__.py +++ b/cornflow-dags/DAG/graph_coloring/__init__.py @@ -18,6 +18,10 @@ class GraphColoring(ApplicationCore): schema = get_empty_schema( properties=dict(timeLimit=dict(type="number")), solvers=list(solvers.keys()) ) + reports = ["report"] + schema["properties"]["report"] = dict( + type="string", default=reports[0], enum=reports + ) @property def test_cases(self) -> List[Dict]: diff --git a/cornflow-dags/DAG/graph_coloring/core/experiment.py b/cornflow-dags/DAG/graph_coloring/core/experiment.py index fee26cc00..dcd4426d8 100644 --- a/cornflow-dags/DAG/graph_coloring/core/experiment.py +++ b/cornflow-dags/DAG/graph_coloring/core/experiment.py @@ -4,6 +4,7 @@ from .instance import Instance from .solution import Solution import os +import quarto class Experiment(ExperimentCore): @@ -15,6 +16,12 @@ class Experiment(ExperimentCore): def instance(self) -> Instance: return super().instance + @classmethod + def from_dict(cls, data: dict): + return cls( + Instance.from_dict(data["instance"]), Solution.from_dict(data["solution"]) + ) + @property def solution(self) -> Solution: return super().solution @@ -38,3 +45,13 @@ def check_solution(self, *args, **kwargs) -> dict: if n1 in colors and n2 in colors and colors[n1] == colors[n2] ] return dict(pairs=errors, missing=missing_colors) + + def generate_report(self, report_path: str, report_name="report") -> None: + if not os.path.isabs(report_name): + report_name = os.path.join( + os.path.dirname(__file__), "../report/", report_name + ) + + return self.generate_report_quarto( + quarto, report_path=report_path, report_name=report_name + ) diff --git a/cornflow-dags/DAG/tsp/__init__.py b/cornflow-dags/DAG/tsp/__init__.py index c4befe6e6..6fccba914 100644 --- a/cornflow-dags/DAG/tsp/__init__.py +++ b/cornflow-dags/DAG/tsp/__init__.py @@ -15,6 +15,10 @@ class TspApp(ApplicationCore): solution = Solution solvers = dict(naive=TSPNaive, cpsat=OrToolsCP) schema = load_json(os.path.join(os.path.dirname(__file__), "schemas/config.json")) + schema["properties"]["solver"]["enum"] = solvers.keys() + reports = ["report"] + schema["properties"]["report"]["enum"] = reports + schema["properties"]["report"]["default"] = reports[0] @property def test_cases(self) -> List[Dict]: diff --git a/cornflow-dags/DAG/tsp/core/experiment.py b/cornflow-dags/DAG/tsp/core/experiment.py index 277d6300a..5d8a52c49 100644 --- a/cornflow-dags/DAG/tsp/core/experiment.py +++ b/cornflow-dags/DAG/tsp/core/experiment.py @@ -6,7 +6,7 @@ from .instance import Instance from .solution import Solution -import json, tempfile +import json import quarto @@ -77,31 +77,14 @@ def check_solution(self, *args, **kwargs) -> SuperDict: ) def generate_report(self, report_path: str, report_name="report") -> None: - # a user may give the full "report.qmd" name. - # We want to take out the extension - path_without_ext = os.path.splitext(report_name)[0] - # if someone gives the absolute path: we use that. # otherwise we assume it's a file on the report/ directory: - if not os.path.isabs(path_without_ext): - path_without_ext = os.path.join( - os.path.dirname(__file__), "../report/", path_without_ext + + if not os.path.isabs(report_name): + report_name = os.path.join( + os.path.dirname(__file__), "../report/", report_name ) - path_to_qmd = path_without_ext + ".qmd" - if not os.path.exists(path_to_qmd): - raise FileNotFoundError(f"Report with path {path_to_qmd} does not exist.") - path_to_output = path_without_ext + ".html" - try: - quarto.quarto.find_quarto() - except FileNotFoundError: - raise ModuleNotFoundError("Quarto is not installed.") - with tempfile.TemporaryDirectory() as tmp: - path = os.path.join(tmp, "experiment.json") - # write a json with instance and solution to temp file - self.to_json(path) - # pass the path to the report to render - # it generates a report with path = path_to_output - quarto.render(input=path_to_qmd, execute_params=dict(file_name=path)) - # quarto always writes the report in the .qmd directory. - # thus, we need to move it where we want to: - os.replace(path_to_output, report_path) + + return self.generate_report_quarto( + quarto, report_path=report_path, report_name=report_name + ) diff --git a/cornflow-dags/DAG/tsp/schemas/config.json b/cornflow-dags/DAG/tsp/schemas/config.json index a62dcb567..4f4cc45df 100644 --- a/cornflow-dags/DAG/tsp/schemas/config.json +++ b/cornflow-dags/DAG/tsp/schemas/config.json @@ -6,6 +6,12 @@ "threads": {"type": "integer"}, "seed": {"type": "integer"}, "gap": {"type": "number"}, + "report": { + "type": "object", + "properties": { + "name": {"type": "string"} + } + }, "solver": { "type": "string", "enum": ["naive", "cpsat"], diff --git a/cornflow-dags/tests/test_dags.py b/cornflow-dags/tests/test_dags.py index 883a1c317..0d6924aa0 100644 --- a/cornflow-dags/tests/test_dags.py +++ b/cornflow-dags/tests/test_dags.py @@ -25,7 +25,7 @@ from jsonschema import Draft7Validator from pytups import SuperDict -from typing import Dict, List, Tuple +from typing import Dict, List, Tuple, Optional class BaseDAGTests: @@ -55,6 +55,36 @@ def test_config_requirements(self): self.assertIn("enum", props["solver"]) self.assertGreater(len(props["solver"]["enum"]), 0) + def load_experiment_from_dataset(self, dataset): + instance_data = dataset.get("instance") + solution_data = dataset.get("solution", None) + instance = self.app.instance.from_dict(instance_data) + solution = self.app.solution.from_dict(solution_data) + s = self.app.get_default_solver_name() + return self.app.get_solver(s)(instance, solution) + + def generate_check_report( + self, + my_experim, + things_to_look, + verbose=False, + report_path="./my_report.html", + ): + + my_experim.generate_report(report_path=report_path) + # check the file is created. + self.assertTrue(os.path.exists(report_path)) + + parser = HTMLCheckTags(things_to_look, verbose) + with open(report_path, "r") as f: + content = f.read() + + try: + os.remove(report_path) + except FileNotFoundError: + pass + self.assertRaises(StopIteration, parser.feed, content) + def test_try_solving_testcase(self, config=None): config = config or self.config tests = self.app.test_cases @@ -169,6 +199,21 @@ def test_incomplete_solution(self): self.assertEqual(len(checks["missing"]), 2) self.assertEqual(len(checks["pairs"]), 1) + def test_report(self): + tests = self.app.test_cases + my_experim = self.load_experiment_from_dataset(tests[0]) + my_experim.solve(dict()) + things_to_look = dict( + section=[ + ("id", "solution"), + ("id", "instance-statistics"), + ("id", "graph-coloring"), + ] + ) + self.generate_check_report( + my_experim, things_to_look=things_to_look, verbose=False + ) + class Tsp(BaseDAGTests.SolvingTests): def setUp(self): @@ -184,10 +229,6 @@ def test_report(self): tests = self.app.test_cases my_experim = self.app.solvers["cpsat"](self.app.instance(tests[0]["instance"])) my_experim.solve(dict()) - report_path = "./my_report.html" - my_experim.generate_report(report_path=report_path) - # check the file is created. - self.assertTrue(os.path.exists(report_path)) # let's just check for an element inside the html that we know should exist # in this case a few 'section' tags with an attribute with a specific id @@ -198,15 +239,7 @@ def test_report(self): ("id", "tsp"), ] ) - parser = HTMLCheckTags(things_to_look) - with open(report_path, "r") as f: - content = f.read() - - try: - os.remove(report_path) - except FileNotFoundError: - pass - self.assertRaises(StopIteration, parser.feed, content) + self.generate_check_report(my_experim, things_to_look) def test_report_error(self): tests = self.app.test_cases @@ -438,46 +471,36 @@ def setUp(self): def test_report(self): my_experim = self.app.solvers["default"](self.app.instance({})) - my_experim.solve(dict(timeLimit=0)) - report_path = "./my_report.html" - my_experim.generate_report(report_path=report_path) - # check the file is created. - self.assertTrue(os.path.exists(report_path)) - - # let's just check for an element inside the html that we know should exist - # a 'div' tag with a 'foo' attribute - - # class MyHTMLParser(HTMLParser): - # - # def handle_starttag(self, tag, attrs): - # print("Start tag:", tag) - # for attr in attrs: - # print(" attr:", attr) - - parser = HTMLCheckTags(dict(div=[("class", "foo")], span=[("class", "bar")])) - with open(report_path, "r") as f: - content = f.read() - # parser.feed(content) - try: - os.remove(report_path) - except FileNotFoundError: - pass - self.assertRaises(StopIteration, parser.feed, content) + things_to_look = dict(div=[("class", "foo")], span=[("class", "bar")]) + self.generate_check_report( + my_experim, things_to_look=things_to_look, verbose=False + ) class HTMLCheckTags(HTMLParser): - things_to_check: Dict[str, List[Tuple[str, str]]] + things_to_check: Optional[Dict[str, List[Tuple[str, str]]]] - def __init__(self, things_to_check: Dict[str, List[Tuple[str, str]]]): + def __init__(self, things_to_check: Dict[str, List[Tuple[str, str]]], verbose): HTMLParser.__init__(self) - self.things_to_check = SuperDict(things_to_check).copy_deep() + self.verbose = verbose + if things_to_check is None: + self.things_to_check = None + else: + self.things_to_check = SuperDict(things_to_check).copy_deep() def handle_starttag(self, tag: str, attrs: List[Tuple[str, str]]): - # print("Start tag:", tag) - if tag not in self.things_to_check: + # when things_to_check is None, we traverse everything + # when verbose=True, we print what we traverse + if self.verbose: + print("Start tag:", tag) + if self.things_to_check is not None and tag not in self.things_to_check: return for attr in attrs: - # print(" attr:", attr) + if self.verbose: + print(" attr:", attr) + # is we're not looking for keys, we just continue + if self.things_to_check is None: + continue try: # we find the element in the list and remove it index = self.things_to_check[tag].index(attr) diff --git a/libs/client/cornflow_client/core/experiment.py b/libs/client/cornflow_client/core/experiment.py index cb893791a..9edb23309 100644 --- a/libs/client/cornflow_client/core/experiment.py +++ b/libs/client/cornflow_client/core/experiment.py @@ -4,7 +4,8 @@ from abc import ABC, abstractmethod from typing import Union, Dict -import json +import json, tempfile +import os from cornflow_client.constants import ( PARAMETER_SOLVER_TRANSLATING_MAPPING, @@ -181,3 +182,28 @@ def to_json(self, path: str) -> None: data = self.to_dict() with open(path, "w") as f: json.dump(data, f, indent=4, sort_keys=True) + + def generate_report_quarto(self, quarto, report_path: str, report_name="report"): + + # a user may give the full "report.qmd" name. + # We want to take out the extension + path_without_ext = os.path.splitext(report_name)[0] + + path_to_qmd = path_without_ext + ".qmd" + if not os.path.exists(path_to_qmd): + raise FileNotFoundError(f"Report with path {path_to_qmd} does not exist.") + path_to_output = path_without_ext + ".html" + try: + quarto.quarto.find_quarto() + except FileNotFoundError: + raise ModuleNotFoundError("Quarto is not installed.") + with tempfile.TemporaryDirectory() as tmp: + path = os.path.join(tmp, "experiment.json") + # write a json with instance and solution to temp file + self.to_json(path) + # pass the path to the report to render + # it generates a report with path = path_to_output + quarto.render(input=path_to_qmd, execute_params=dict(file_name=path)) + # quarto always writes the report in the .qmd directory. + # thus, we need to move it where we want to: + os.replace(path_to_output, report_path) From 8fbd57077c9086aa00a2d9667e225403cbe949ef Mon Sep 17 00:00:00 2001 From: pchtsp Date: Wed, 31 Jul 2024 15:59:24 +0200 Subject: [PATCH 64/84] take out hexaly from requirements --- cornflow-dags/requirements.txt | 2 -- 1 file changed, 2 deletions(-) diff --git a/cornflow-dags/requirements.txt b/cornflow-dags/requirements.txt index 02f9bad86..042ac2572 100644 --- a/cornflow-dags/requirements.txt +++ b/cornflow-dags/requirements.txt @@ -17,8 +17,6 @@ pyomo tsplib95<=0.7.1 hackathonbaobab2020[solvers] networkx ---index-url https://pip.hexaly.com -hexaly # quarto and reports: matplotlib From 6c982160e4e206ca30d13a4c42ba8efd227c56fb Mon Sep 17 00:00:00 2001 From: Guillermo Gonzalez-Santander Date: Fri, 2 Aug 2024 13:15:00 +0200 Subject: [PATCH 65/84] Fixed errors that caused test to fail --- cornflow-dags/DAG/tsp/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cornflow-dags/DAG/tsp/__init__.py b/cornflow-dags/DAG/tsp/__init__.py index 6fccba914..6c1852139 100644 --- a/cornflow-dags/DAG/tsp/__init__.py +++ b/cornflow-dags/DAG/tsp/__init__.py @@ -15,7 +15,7 @@ class TspApp(ApplicationCore): solution = Solution solvers = dict(naive=TSPNaive, cpsat=OrToolsCP) schema = load_json(os.path.join(os.path.dirname(__file__), "schemas/config.json")) - schema["properties"]["solver"]["enum"] = solvers.keys() + schema["properties"]["solver"]["enum"] = list(solvers.keys()) reports = ["report"] schema["properties"]["report"]["enum"] = reports schema["properties"]["report"]["default"] = reports[0] From ad5752510357a383f2d8771f2473c396555c454a Mon Sep 17 00:00:00 2001 From: pchtsp Date: Fri, 2 Aug 2024 14:37:02 +0200 Subject: [PATCH 66/84] fix the report schema errors --- cornflow-dags/DAG/graph_coloring/__init__.py | 11 ++-------- cornflow-dags/DAG/tsp/__init__.py | 5 ++--- .../tests/integration/test_cornflowclient.py | 22 ------------------- libs/client/cornflow_client/__init__.py | 6 ++++- libs/client/cornflow_client/schema/tools.py | 12 ++++++++++ 5 files changed, 21 insertions(+), 35 deletions(-) diff --git a/cornflow-dags/DAG/graph_coloring/__init__.py b/cornflow-dags/DAG/graph_coloring/__init__.py index 0c3fd3c9d..f21c881b0 100644 --- a/cornflow-dags/DAG/graph_coloring/__init__.py +++ b/cornflow-dags/DAG/graph_coloring/__init__.py @@ -1,9 +1,5 @@ -from cornflow_client import ( - get_empty_schema, - ApplicationCore, -) +from cornflow_client import get_empty_schema, ApplicationCore, add_reports_to_schema from typing import List, Dict -import pytups as pt import os from .solvers import OrToolsCP @@ -18,10 +14,7 @@ class GraphColoring(ApplicationCore): schema = get_empty_schema( properties=dict(timeLimit=dict(type="number")), solvers=list(solvers.keys()) ) - reports = ["report"] - schema["properties"]["report"] = dict( - type="string", default=reports[0], enum=reports - ) + schema = add_reports_to_schema(schema, ["report"]) @property def test_cases(self) -> List[Dict]: diff --git a/cornflow-dags/DAG/tsp/__init__.py b/cornflow-dags/DAG/tsp/__init__.py index 6c1852139..f71ff15a0 100644 --- a/cornflow-dags/DAG/tsp/__init__.py +++ b/cornflow-dags/DAG/tsp/__init__.py @@ -2,6 +2,7 @@ ApplicationCore, ) from cornflow_client.core.tools import load_json +from cornflow_client.schema.tools import add_reports_to_schema from typing import List, Dict import os @@ -16,9 +17,7 @@ class TspApp(ApplicationCore): solvers = dict(naive=TSPNaive, cpsat=OrToolsCP) schema = load_json(os.path.join(os.path.dirname(__file__), "schemas/config.json")) schema["properties"]["solver"]["enum"] = list(solvers.keys()) - reports = ["report"] - schema["properties"]["report"]["enum"] = reports - schema["properties"]["report"]["default"] = reports[0] + schema = add_reports_to_schema(schema, ["report"]) @property def test_cases(self) -> List[Dict]: diff --git a/cornflow-server/cornflow/tests/integration/test_cornflowclient.py b/cornflow-server/cornflow/tests/integration/test_cornflowclient.py index be297d2a1..c7bd85b5f 100644 --- a/cornflow-server/cornflow/tests/integration/test_cornflowclient.py +++ b/cornflow-server/cornflow/tests/integration/test_cornflowclient.py @@ -290,28 +290,6 @@ def test_new_execution_with_timer_report_wait(self): except OSError: pass - def test_new_execution_with_tsp_report_error(self): - payload = dict(report_name="wrong_name") - execution = self.create_instance_and_execution_report(**payload) - func = self.wait_until_report_finishes( - self.client, execution["id"], REPORT_STATE.ERROR - ) - reports_info = self.try_until_condition(func, lambda v: v is not None, 20, 5) - self.assertEqual(REPORT_STATE.ERROR, reports_info["state"]) - id_report = reports_info["id"] - my_name = "./my_report.html" - try: - os.remove(my_name) - except: - pass - self.client.raw.get_one_report(id_report, "./", my_name) - # if we did not write a file, we should not have it: - self.assertFalse(os.path.exists(my_name)) - try: - os.remove(my_name) - except OSError: - pass - def test_delete_execution(self): execution = self.test_new_execution() response = self.client.raw.get_api_for_id("execution/", execution["id"]) diff --git a/libs/client/cornflow_client/__init__.py b/libs/client/cornflow_client/__init__.py index 43b590fa3..45ce50b6a 100644 --- a/libs/client/cornflow_client/__init__.py +++ b/libs/client/cornflow_client/__init__.py @@ -8,4 +8,8 @@ SolutionCore, ExperimentCore, ) -from cornflow_client.schema.tools import get_empty_schema, get_pulp_jsonschema +from cornflow_client.schema.tools import ( + get_empty_schema, + get_pulp_jsonschema, + add_reports_to_schema, +) diff --git a/libs/client/cornflow_client/schema/tools.py b/libs/client/cornflow_client/schema/tools.py index 8e6025d57..cfa64a1c2 100644 --- a/libs/client/cornflow_client/schema/tools.py +++ b/libs/client/cornflow_client/schema/tools.py @@ -33,6 +33,18 @@ def get_empty_schema(properties=None, solvers=None): return schema +def add_reports_to_schema(schema, reports): + """ + assumes the first report is the default. + """ + schema = dict(schema) + schema["report"] = dict( + type="object", + properties=dict(name=dict(type="string", enum=reports, default=reports[0])), + ) + return schema + + def clean_none(dic): """ Remove empty values from a dict From a5aea1a8be81b25ed2292c1e2d7aabc7309269d9 Mon Sep 17 00:00:00 2001 From: Guillermo Gonzalez-Santander Date: Fri, 2 Aug 2024 17:11:37 +0200 Subject: [PATCH 67/84] Bump version for new client alpha --- cornflow-dags/requirements.txt | 2 +- cornflow-server/requirements.txt | 2 +- libs/client/setup.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/cornflow-dags/requirements.txt b/cornflow-dags/requirements.txt index 042ac2572..6af60cc28 100644 --- a/cornflow-dags/requirements.txt +++ b/cornflow-dags/requirements.txt @@ -1,5 +1,5 @@ # cornflow -cornflow-client>=1.1.0 +cornflow-client==1.1.1a1 # data pandas<=2.1.1 diff --git a/cornflow-server/requirements.txt b/cornflow-server/requirements.txt index ff1cdca59..77c8dd799 100644 --- a/cornflow-server/requirements.txt +++ b/cornflow-server/requirements.txt @@ -1,7 +1,7 @@ alembic==1.9.2 apispec<=6.2.0 click<=8.1.3 -cornflow-client>=1.1.0 +cornflow-client==1.1.1a1 cryptography<=42.0.5 disposable-email-domains>=0.0.86 Flask==2.3.2 diff --git a/libs/client/setup.py b/libs/client/setup.py index bd3b8f896..43550b6cc 100644 --- a/libs/client/setup.py +++ b/libs/client/setup.py @@ -12,7 +12,7 @@ setuptools.setup( name="cornflow-client", - version="1.1.0", + version="1.1.1a1", author="baobab soluciones", author_email="sistemas@baobabsoluciones.es", description="Client to connect to a cornflow server", From c1d88037a0c76a858792ec8f4c7c969727a99b1b Mon Sep 17 00:00:00 2001 From: Guillermo Gonzalez-Santander Date: Fri, 2 Aug 2024 17:18:16 +0200 Subject: [PATCH 68/84] Updated workflows for publishing --- .github/workflows/cornflow-client-publish-to-pypi.yml | 6 +++--- .github/workflows/cornflow-publish-to-pypi.yml | 6 +++--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/cornflow-client-publish-to-pypi.yml b/.github/workflows/cornflow-client-publish-to-pypi.yml index 7a1439d86..05c43d1f8 100644 --- a/.github/workflows/cornflow-client-publish-to-pypi.yml +++ b/.github/workflows/cornflow-client-publish-to-pypi.yml @@ -15,7 +15,7 @@ jobs: steps: - uses: actions/checkout@master - name: Set up Python 3.8 - uses: actions/setup-python@v1 + uses: actions/setup-python@v5 with: python-version: 3.8 - name: Install wheel @@ -27,13 +27,13 @@ jobs: - name: Build a binary wheel and a source tarball run: python setup.py sdist bdist_wheel - name: Publish distribution 📦 to Test PyPI - uses: pypa/gh-action-pypi-publish@master + uses: pypa/gh-action-pypi-publish@release/v1 with: password: ${{ secrets.test_pypi_password }} repository_url: https://test.pypi.org/legacy/ packages_dir: libs/client/dist/ - name: Publish distribution 📦 to PyPI - uses: pypa/gh-action-pypi-publish@master + uses: pypa/gh-action-pypi-publish@release/v1 with: password: ${{ secrets.pypi_password }} packages_dir: libs/client/dist/ diff --git a/.github/workflows/cornflow-publish-to-pypi.yml b/.github/workflows/cornflow-publish-to-pypi.yml index 1d1ae8c9c..b945fb705 100644 --- a/.github/workflows/cornflow-publish-to-pypi.yml +++ b/.github/workflows/cornflow-publish-to-pypi.yml @@ -15,7 +15,7 @@ jobs: steps: - uses: actions/checkout@master - name: Set up Python 3.8 - uses: actions/setup-python@v1 + uses: actions/setup-python@v5 with: python-version: 3.8 - name: Install wheel @@ -27,13 +27,13 @@ jobs: - name: Build a binary wheel and a source tarball run: python setup.py sdist bdist_wheel - name: Publish distribution 📦 to Test PyPI - uses: pypa/gh-action-pypi-publish@master + uses: pypa/gh-action-pypi-publish@release/v1 with: password: ${{ secrets.CORNFLOW_TEST_PYPI_TOKEN }} repository_url: https://test.pypi.org/legacy/ packages_dir: cornflow-server/dist/ - name: Publish distribution 📦 to PyPI - uses: pypa/gh-action-pypi-publish@master + uses: pypa/gh-action-pypi-publish@release/v1 with: password: ${{ secrets.CORNFLOW_PYPI_TOKEN }} packages_dir: cornflow-server/dist/ From c464839217df16c5a74ded73b56b52ef06d5790d Mon Sep 17 00:00:00 2001 From: Guillermo Gonzalez-Santander Date: Mon, 5 Aug 2024 12:45:03 +0200 Subject: [PATCH 69/84] Bump cornflow and airflow versions for new image building --- cornflow-server/Dockerfile | 2 +- cornflow-server/airflow_config/Dockerfile | 2 +- cornflow-server/setup.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/cornflow-server/Dockerfile b/cornflow-server/Dockerfile index 7db8eae21..c0bb17818 100644 --- a/cornflow-server/Dockerfile +++ b/cornflow-server/Dockerfile @@ -9,7 +9,7 @@ ENV DEBIAN_FRONTEND noninteractive ENV TERM linux # CORNFLOW vars -ARG CORNFLOW_VERSION=1.1.0 +ARG CORNFLOW_VERSION=1.1.1a1 # install linux pkg RUN apt update -y && apt-get install -y --no-install-recommends \ diff --git a/cornflow-server/airflow_config/Dockerfile b/cornflow-server/airflow_config/Dockerfile index 3a749c59b..c81e6903a 100644 --- a/cornflow-server/airflow_config/Dockerfile +++ b/cornflow-server/airflow_config/Dockerfile @@ -11,7 +11,7 @@ ENV DEBIAN_FRONTEND noninteractive ENV TERM linux # Airflow vars -ARG AIRFLOW_VERSION=2.9.1 +ARG AIRFLOW_VERSION=2.9.3 ARG AIRFLOW_USER_HOME=/usr/local/airflow ARG CONSTRAINT_URL="https://raw.githubusercontent.com/apache/airflow/constraints-${AIRFLOW_VERSION}/constraints-3.10.txt" ARG AIRFLOW__CORE__LOAD_EXAMPLES=False diff --git a/cornflow-server/setup.py b/cornflow-server/setup.py index c64649c0f..d2e32dcef 100644 --- a/cornflow-server/setup.py +++ b/cornflow-server/setup.py @@ -9,7 +9,7 @@ setuptools.setup( name="cornflow", - version="1.1.0", + version="1.1.1a1", author="baobab soluciones", author_email="cornflow@baobabsoluciones.es", description="Cornflow is an open source multi-solver optimization server with a REST API built using flask.", From d1beb92c749ec7204244c314ce70aa8dfab25e3a Mon Sep 17 00:00:00 2001 From: pchtsp Date: Mon, 5 Aug 2024 17:48:01 +0200 Subject: [PATCH 70/84] change in where to get the get_cmap function from recent versions of matplotlib --- cornflow-dags/DAG/graph_coloring/report/report.qmd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cornflow-dags/DAG/graph_coloring/report/report.qmd b/cornflow-dags/DAG/graph_coloring/report/report.qmd index 0014ad559..a2f6e4024 100644 --- a/cornflow-dags/DAG/graph_coloring/report/report.qmd +++ b/cornflow-dags/DAG/graph_coloring/report/report.qmd @@ -119,7 +119,7 @@ See @fig-colors for a representation of the solution. Colors are kept from the n #| fig-cap: "Color assignment to nodes" def get_colors_from_cmap(cmap_name, num_colors): - cmap = plt.cm.get_cmap(cmap_name) + cmap = colormaps.get_cmap(cmap_name) colors = [cmap(i / num_colors) for i in range(num_colors)] return colors From 74077ba411c862ee2ddee6e2274cc8741d67e10d Mon Sep 17 00:00:00 2001 From: pchtsp Date: Wed, 7 Aug 2024 15:26:42 +0200 Subject: [PATCH 71/84] fixed schema modification for reports --- libs/client/cornflow_client/schema/tools.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/libs/client/cornflow_client/schema/tools.py b/libs/client/cornflow_client/schema/tools.py index cfa64a1c2..c8e022050 100644 --- a/libs/client/cornflow_client/schema/tools.py +++ b/libs/client/cornflow_client/schema/tools.py @@ -37,8 +37,7 @@ def add_reports_to_schema(schema, reports): """ assumes the first report is the default. """ - schema = dict(schema) - schema["report"] = dict( + schema["properties"]["report"] = dict( type="object", properties=dict(name=dict(type="string", enum=reports, default=reports[0])), ) From 31a2f542b1d8f147d34a668433caf49ab34ade93 Mon Sep 17 00:00:00 2001 From: pchtsp Date: Wed, 7 Aug 2024 20:18:04 +0200 Subject: [PATCH 72/84] fix: now we do not move the report, we just return the path to it. --- cornflow-dags/DAG/dag_timer.py | 7 +++++-- .../DAG/graph_coloring/core/experiment.py | 6 ++---- cornflow-dags/DAG/tsp/core/experiment.py | 6 ++---- cornflow-dags/tests/test_dags.py | 17 ++++------------- .../cornflow_client/airflow/dag_utilities.py | 11 ++--------- libs/client/cornflow_client/core/experiment.py | 16 +++++++++++----- 6 files changed, 26 insertions(+), 37 deletions(-) diff --git a/cornflow-dags/DAG/dag_timer.py b/cornflow-dags/DAG/dag_timer.py index 82c4e986e..35a6a8967 100644 --- a/cornflow-dags/DAG/dag_timer.py +++ b/cornflow-dags/DAG/dag_timer.py @@ -1,3 +1,5 @@ +import os.path + import time from cornflow_client import get_empty_schema from cornflow_client import ApplicationCore, InstanceCore, SolutionCore, ExperimentCore @@ -35,7 +37,8 @@ def get_objective(self) -> float: def check_solution(self, *args, **kwargs): return dict() - def generate_report(self, report_path: str, report_name="report") -> None: + def generate_report(self, report_name="report") -> str: + report_path = os.path.abspath("./report.html") html = ET.Element("html") body = ET.Element("body") html.append(body) @@ -45,7 +48,7 @@ def generate_report(self, report_path: str, report_name="report") -> None: div.append(span) with open(report_path, "w") as f: ET.ElementTree(html).write(f, encoding="unicode", method="html") - return + return report_path class Timer(ApplicationCore): diff --git a/cornflow-dags/DAG/graph_coloring/core/experiment.py b/cornflow-dags/DAG/graph_coloring/core/experiment.py index dcd4426d8..6f7dce03c 100644 --- a/cornflow-dags/DAG/graph_coloring/core/experiment.py +++ b/cornflow-dags/DAG/graph_coloring/core/experiment.py @@ -46,12 +46,10 @@ def check_solution(self, *args, **kwargs) -> dict: ] return dict(pairs=errors, missing=missing_colors) - def generate_report(self, report_path: str, report_name="report") -> None: + def generate_report(self, report_name="report") -> str: if not os.path.isabs(report_name): report_name = os.path.join( os.path.dirname(__file__), "../report/", report_name ) - return self.generate_report_quarto( - quarto, report_path=report_path, report_name=report_name - ) + return self.generate_report_quarto(quarto, report_name=report_name) diff --git a/cornflow-dags/DAG/tsp/core/experiment.py b/cornflow-dags/DAG/tsp/core/experiment.py index 5d8a52c49..ffe27f518 100644 --- a/cornflow-dags/DAG/tsp/core/experiment.py +++ b/cornflow-dags/DAG/tsp/core/experiment.py @@ -76,7 +76,7 @@ def check_solution(self, *args, **kwargs) -> SuperDict: missing_positions=self.check_missing_positions(), ) - def generate_report(self, report_path: str, report_name="report") -> None: + def generate_report(self, report_name="report") -> str: # if someone gives the absolute path: we use that. # otherwise we assume it's a file on the report/ directory: @@ -85,6 +85,4 @@ def generate_report(self, report_path: str, report_name="report") -> None: os.path.dirname(__file__), "../report/", report_name ) - return self.generate_report_quarto( - quarto, report_path=report_path, report_name=report_name - ) + return self.generate_report_quarto(quarto, report_name=report_name) diff --git a/cornflow-dags/tests/test_dags.py b/cornflow-dags/tests/test_dags.py index 0d6924aa0..8adc6ed8e 100644 --- a/cornflow-dags/tests/test_dags.py +++ b/cornflow-dags/tests/test_dags.py @@ -63,15 +63,9 @@ def load_experiment_from_dataset(self, dataset): s = self.app.get_default_solver_name() return self.app.get_solver(s)(instance, solution) - def generate_check_report( - self, - my_experim, - things_to_look, - verbose=False, - report_path="./my_report.html", - ): - - my_experim.generate_report(report_path=report_path) + def generate_check_report(self, my_experim, things_to_look, verbose=False): + + report_path = my_experim.generate_report() # check the file is created. self.assertTrue(os.path.exists(report_path)) @@ -245,10 +239,7 @@ def test_report_error(self): tests = self.app.test_cases my_experim = self.app.solvers["cpsat"](self.app.instance(tests[0]["instance"])) my_experim.solve(dict()) - report_path = "./my_report.html" - my_fun = lambda: my_experim.generate_report( - report_path=report_path, report_name="wrong_name" - ) + my_fun = lambda: my_experim.generate_report(report_name="wrong_name") self.assertRaises(FileNotFoundError, my_fun) def test_export(self): diff --git a/libs/client/cornflow_client/airflow/dag_utilities.py b/libs/client/cornflow_client/airflow/dag_utilities.py index 5c11ea0a5..dea74b071 100644 --- a/libs/client/cornflow_client/airflow/dag_utilities.py +++ b/libs/client/cornflow_client/airflow/dag_utilities.py @@ -389,17 +389,10 @@ def my_try_to_save(state): my_experiment = experiment( app.instance(input_data), app.solution(solution_data) ) - report_path = os.path.abspath("./my_report.html") - if os.path.exists(report_path): - try: - os.remove(report_path) - except: - pass + print(f"Preparing to write the report: {report_name}") try: - my_experiment.generate_report( - report_path=report_path, report_name=report_name - ) + report_path = my_experiment.generate_report(report_name=report_name) except ModuleNotFoundError as e: my_try_to_save(-10) raise AirflowDagException("The generation of the report failed") diff --git a/libs/client/cornflow_client/core/experiment.py b/libs/client/cornflow_client/core/experiment.py index 9edb23309..a93a73db2 100644 --- a/libs/client/cornflow_client/core/experiment.py +++ b/libs/client/cornflow_client/core/experiment.py @@ -153,9 +153,10 @@ def get_solver_config( return conf - def generate_report(self, report_path: str, report_name="report") -> None: + def generate_report(self, report_name="report") -> str: """ - this method should write a report file into report_path, using the template in report_name. + this method should write a report file, using the template in report_name. + It returns the path to the file :param report_path: the path of the report to export :param report_name: the name of the template for the report @@ -183,7 +184,8 @@ def to_json(self, path: str) -> None: with open(path, "w") as f: json.dump(data, f, indent=4, sort_keys=True) - def generate_report_quarto(self, quarto, report_path: str, report_name="report"): + def generate_report_quarto(self, quarto, report_name: str = "report") -> str: + # it returns the path to the file being written # a user may give the full "report.qmd" name. # We want to take out the extension @@ -193,6 +195,10 @@ def generate_report_quarto(self, quarto, report_path: str, report_name="report") if not os.path.exists(path_to_qmd): raise FileNotFoundError(f"Report with path {path_to_qmd} does not exist.") path_to_output = path_without_ext + ".html" + try: + os.remove(path_to_output) + except FileNotFoundError: + pass try: quarto.quarto.find_quarto() except FileNotFoundError: @@ -205,5 +211,5 @@ def generate_report_quarto(self, quarto, report_path: str, report_name="report") # it generates a report with path = path_to_output quarto.render(input=path_to_qmd, execute_params=dict(file_name=path)) # quarto always writes the report in the .qmd directory. - # thus, we need to move it where we want to: - os.replace(path_to_output, report_path) + # thus, we need to return it so the user can move it if needed + return path_to_output From d05517008abe627fb85cf896c5e7cb8d2795b8db Mon Sep 17 00:00:00 2001 From: Guillermo Gonzalez-Santander Date: Thu, 8 Aug 2024 10:35:19 +0200 Subject: [PATCH 73/84] Bump cornflow-client version --- cornflow-server/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cornflow-server/setup.py b/cornflow-server/setup.py index d2e32dcef..a419b1d99 100644 --- a/cornflow-server/setup.py +++ b/cornflow-server/setup.py @@ -9,7 +9,7 @@ setuptools.setup( name="cornflow", - version="1.1.1a1", + version="1.1.1a2", author="baobab soluciones", author_email="cornflow@baobabsoluciones.es", description="Cornflow is an open source multi-solver optimization server with a REST API built using flask.", From 18871c65a6812469752b8fce85b916953c19a6b5 Mon Sep 17 00:00:00 2001 From: Guillermo Gonzalez-Santander Date: Thu, 8 Aug 2024 10:39:15 +0200 Subject: [PATCH 74/84] Bump version for new client alpha --- cornflow-server/setup.py | 2 +- libs/client/setup.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/cornflow-server/setup.py b/cornflow-server/setup.py index a419b1d99..d2e32dcef 100644 --- a/cornflow-server/setup.py +++ b/cornflow-server/setup.py @@ -9,7 +9,7 @@ setuptools.setup( name="cornflow", - version="1.1.1a2", + version="1.1.1a1", author="baobab soluciones", author_email="cornflow@baobabsoluciones.es", description="Cornflow is an open source multi-solver optimization server with a REST API built using flask.", diff --git a/libs/client/setup.py b/libs/client/setup.py index 43550b6cc..eebc5c065 100644 --- a/libs/client/setup.py +++ b/libs/client/setup.py @@ -12,7 +12,7 @@ setuptools.setup( name="cornflow-client", - version="1.1.1a1", + version="1.1.1a2", author="baobab soluciones", author_email="sistemas@baobabsoluciones.es", description="Client to connect to a cornflow server", From 495537e3659c32815f51c6231c800ef471775345 Mon Sep 17 00:00:00 2001 From: Guillermo Gonzalez-Santander Date: Thu, 8 Aug 2024 10:48:40 +0200 Subject: [PATCH 75/84] Bump cornflow-client version on requirements file --- cornflow-dags/requirements.txt | 2 +- cornflow-server/requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/cornflow-dags/requirements.txt b/cornflow-dags/requirements.txt index 6af60cc28..7751dc7f0 100644 --- a/cornflow-dags/requirements.txt +++ b/cornflow-dags/requirements.txt @@ -1,5 +1,5 @@ # cornflow -cornflow-client==1.1.1a1 +cornflow-client==1.1.1a # data pandas<=2.1.1 diff --git a/cornflow-server/requirements.txt b/cornflow-server/requirements.txt index 77c8dd799..1547c445d 100644 --- a/cornflow-server/requirements.txt +++ b/cornflow-server/requirements.txt @@ -1,7 +1,7 @@ alembic==1.9.2 apispec<=6.2.0 click<=8.1.3 -cornflow-client==1.1.1a1 +cornflow-client==1.1.1a2 cryptography<=42.0.5 disposable-email-domains>=0.0.86 Flask==2.3.2 From 6845f5325432befdc58cab2fc45b1d52d4147bea Mon Sep 17 00:00:00 2001 From: Guillermo Gonzalez-Santander Date: Thu, 8 Aug 2024 10:55:19 +0200 Subject: [PATCH 76/84] Fixed error on requirements file --- cornflow-dags/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cornflow-dags/requirements.txt b/cornflow-dags/requirements.txt index 7751dc7f0..d30e34a30 100644 --- a/cornflow-dags/requirements.txt +++ b/cornflow-dags/requirements.txt @@ -1,5 +1,5 @@ # cornflow -cornflow-client==1.1.1a +cornflow-client==1.1.1a2 # data pandas<=2.1.1 From bae1109ca8fa36631f23889cd06158309ffe7e3e Mon Sep 17 00:00:00 2001 From: pchtsp Date: Mon, 12 Aug 2024 15:26:51 +0200 Subject: [PATCH 77/84] complete example of sudoku --- .../DAG/graph_coloring/report/report.qmd | 2 +- cornflow-dags/DAG/sudoku/README.rst | 33 +++++ cornflow-dags/DAG/sudoku/__init__.py | 49 ++++++++ cornflow-dags/DAG/sudoku/core/__init__.py | 3 + cornflow-dags/DAG/sudoku/core/experiment.py | 70 +++++++++++ cornflow-dags/DAG/sudoku/core/instance.py | 117 ++++++++++++++++++ cornflow-dags/DAG/sudoku/core/solution.py | 19 +++ cornflow-dags/DAG/sudoku/core/tools.py | 31 +++++ cornflow-dags/DAG/sudoku/data/easy.txt | 50 ++++++++ cornflow-dags/DAG/sudoku/data/example_1 | 1 + cornflow-dags/DAG/sudoku/data/example_2 | 1 + cornflow-dags/DAG/sudoku/data/hardest.txt | 11 ++ cornflow-dags/DAG/sudoku/report/.gitignore | 1 + cornflow-dags/DAG/sudoku/report/report.qmd | 105 ++++++++++++++++ cornflow-dags/DAG/sudoku/schemas/input.json | 36 ++++++ cornflow-dags/DAG/sudoku/schemas/output.json | 26 ++++ cornflow-dags/DAG/sudoku/solvers/__init__.py | 1 + cornflow-dags/DAG/sudoku/solvers/cp_model.py | 71 +++++++++++ cornflow-dags/tests/test_dags.py | 24 ++++ 19 files changed, 650 insertions(+), 1 deletion(-) create mode 100644 cornflow-dags/DAG/sudoku/README.rst create mode 100644 cornflow-dags/DAG/sudoku/__init__.py create mode 100644 cornflow-dags/DAG/sudoku/core/__init__.py create mode 100644 cornflow-dags/DAG/sudoku/core/experiment.py create mode 100644 cornflow-dags/DAG/sudoku/core/instance.py create mode 100644 cornflow-dags/DAG/sudoku/core/solution.py create mode 100644 cornflow-dags/DAG/sudoku/core/tools.py create mode 100644 cornflow-dags/DAG/sudoku/data/easy.txt create mode 100644 cornflow-dags/DAG/sudoku/data/example_1 create mode 100644 cornflow-dags/DAG/sudoku/data/example_2 create mode 100644 cornflow-dags/DAG/sudoku/data/hardest.txt create mode 100644 cornflow-dags/DAG/sudoku/report/.gitignore create mode 100644 cornflow-dags/DAG/sudoku/report/report.qmd create mode 100644 cornflow-dags/DAG/sudoku/schemas/input.json create mode 100644 cornflow-dags/DAG/sudoku/schemas/output.json create mode 100644 cornflow-dags/DAG/sudoku/solvers/__init__.py create mode 100644 cornflow-dags/DAG/sudoku/solvers/cp_model.py diff --git a/cornflow-dags/DAG/graph_coloring/report/report.qmd b/cornflow-dags/DAG/graph_coloring/report/report.qmd index a2f6e4024..0926bd480 100644 --- a/cornflow-dags/DAG/graph_coloring/report/report.qmd +++ b/cornflow-dags/DAG/graph_coloring/report/report.qmd @@ -112,7 +112,7 @@ else: ## Solution -See @fig-colors for a representation of the solution. Colors are kept from the network representation of the instance. +See @fig-colors for a representation of the solution. Positions are kept from the network representation of the instance. ```{python} #| label: fig-colors diff --git a/cornflow-dags/DAG/sudoku/README.rst b/cornflow-dags/DAG/sudoku/README.rst new file mode 100644 index 000000000..404da6cff --- /dev/null +++ b/cornflow-dags/DAG/sudoku/README.rst @@ -0,0 +1,33 @@ +Sudoku +======================= + +The problem consists on finding a set of values that: +(1) comply with the "sudoku rules": all unique in each (a) row, (b) column and (c) square (for a 9x9 sudoku that is) and +(2) comply with the initial values given at specific places in the grid. + +There is usually no objective function: this is a feasibility problem. + +**Name of dag**: sudoku + +**Available solution methods**: + +- **cp_sat:** CP model built in ortools with CP-SAT as solver. +- **mip:** MIP model built in pulp and solved with cbc. + + +Decision +------------ + +For each row i and column j which value p. + +Parameters +------------ + +- Initial values per row and column. +- Parameters: size of sudoku (9 by default). + +Thanks +-------- + +To Peter Norvig for the datasets: https://github.com/norvig/pytudes/blob/main/py +To Alain T. for the print visualization: https://stackoverflow.com/a/56581709/6508131 \ No newline at end of file diff --git a/cornflow-dags/DAG/sudoku/__init__.py b/cornflow-dags/DAG/sudoku/__init__.py new file mode 100644 index 000000000..e710ed82e --- /dev/null +++ b/cornflow-dags/DAG/sudoku/__init__.py @@ -0,0 +1,49 @@ +from cornflow_client import get_empty_schema, ApplicationCore, add_reports_to_schema +from typing import List, Dict +import os + +from .solvers import OrToolsCP +from .core import Instance, Solution + + +class Sudoku(ApplicationCore): + name = "sudoku" + instance = Instance + solution = Solution + solvers = dict(cpsat=OrToolsCP) + schema = get_empty_schema( + properties=dict(timeLimit=dict(type="number")), solvers=list(solvers.keys()) + ) + schema = add_reports_to_schema(schema, ["report"]) + + @property + def test_cases(self) -> List[Dict]: + + file_dir = os.path.join(os.path.dirname(__file__), "data") + get_file = lambda name: os.path.join(file_dir, name) + + def get_dataset(number, prefix): + return dict( + name=f"{prefix}_{number}", + instance=Instance.from_txt_file( + get_file(f"{prefix}.txt"), number + ).to_dict(), + descriptipn=f"{prefix} example # {number}", + ) + + return ( + [get_dataset(n, "easy") for n in range(50)] + + [get_dataset(n, "hardest") for n in range(11)] + + [ + dict( + name="example_1", + instance=Instance.from_txt_file(get_file("example_1")).to_dict(), + descriptipn="Example 1", + ), + dict( + name="example_2", + instance=Instance.from_txt_file(get_file("example_2")).to_dict(), + descriptipn="Example 2", + ), + ] + ) diff --git a/cornflow-dags/DAG/sudoku/core/__init__.py b/cornflow-dags/DAG/sudoku/core/__init__.py new file mode 100644 index 000000000..69bad8c50 --- /dev/null +++ b/cornflow-dags/DAG/sudoku/core/__init__.py @@ -0,0 +1,3 @@ +from .solution import Solution +from .instance import Instance +from .experiment import Experiment diff --git a/cornflow-dags/DAG/sudoku/core/experiment.py b/cornflow-dags/DAG/sudoku/core/experiment.py new file mode 100644 index 000000000..f603fdb59 --- /dev/null +++ b/cornflow-dags/DAG/sudoku/core/experiment.py @@ -0,0 +1,70 @@ +from cornflow_client import ExperimentCore +from cornflow_client.core.tools import load_json +import pytups as pt +from .instance import Instance +from .solution import Solution +import os +import quarto + + +class Experiment(ExperimentCore): + schema_checks = load_json( + os.path.join(os.path.dirname(__file__), "../schemas/solution_checks.json") + ) + + @property + def instance(self) -> Instance: + return super().instance + + @classmethod + def from_dict(cls, data: dict): + return cls( + Instance.from_dict(data["instance"]), Solution.from_dict(data["solution"]) + ) + + @property + def solution(self) -> Solution: + return super().solution + + @solution.setter + def solution(self, value): + self._solution = value + + def get_objective(self) -> float: + return 0 + + def get_complete_solution(self): + initial_values = self.instance.get_initial_values() + solution_values = self.solution.get_assignments(self.instance.get_size()) + return solution_values + initial_values + + def check_solution(self, *args, **kwargs) -> dict: + + # if we check that we have all expected values at least once in each group, it should be enough + all_values = self.get_complete_solution() + size = self.instance.get_size() + + expected_values = set(range(1, size + 1)) + groups = ["row", "col", "square"] + err_all_dif = pt.SuperDict() + for group in groups: + err_all_dif[group] = all_values.to_dict("value", indices="row").vapply( + lambda v: v.set_diff(expected_values) + ) + # this returns for each group and slot, the list of missing values + err_all_dif = err_all_dif.to_dictup().vfilter(lambda v: len(v)) + + return pt.SuperDict(missing_values=err_all_dif).vfilter(lambda v: len(v)) + + def generate_report(self, report_name="report") -> str: + if not os.path.isabs(report_name): + report_name = os.path.join( + os.path.dirname(__file__), "../report/", report_name + ) + + return self.generate_report_quarto(quarto, report_name=report_name) + + def print(self): + values = self.get_complete_solution() + board = self.instance.values_to_matrix(values) + return self.instance.generate_board(board) diff --git a/cornflow-dags/DAG/sudoku/core/instance.py b/cornflow-dags/DAG/sudoku/core/instance.py new file mode 100644 index 000000000..a2afe81b3 --- /dev/null +++ b/cornflow-dags/DAG/sudoku/core/instance.py @@ -0,0 +1,117 @@ +import os +from cornflow_client import InstanceCore, get_empty_schema +from cornflow_client.core.tools import load_json +import pytups as pt +import math +from .tools import pos_to_row_col, add_pos_square, row_col_to_pos, row_col_to_square + + +class Instance(InstanceCore): + schema = load_json(os.path.join(os.path.dirname(__file__), "../schemas/input.json")) + schema_checks = get_empty_schema() + data: pt.SuperDict + + def __init__(self, data: dict): + data = pt.SuperDict(data) + data["initial_values"] = pt.TupList(data["initial_values"]) + super().__init__(data) + + @classmethod + def from_txt_file(cls, filePath, line_number: int = 0): + with open(filePath, "r") as f: + contents = f.read().splitlines() + empty_chars = {".", "0"} + my_chars = [ + el if el not in empty_chars else None for el in contents[line_number] + ] + + size = int(math.sqrt(len(my_chars))) + value_by_position = ( + pt.TupList(my_chars) + .kvapply(lambda k, v: dict(pos=k, value=v)) + .vfilter(lambda v: v["value"] is not None) + ) + + def get_element(el): + row, col = pos_to_row_col(el["pos"], size) + return pt.SuperDict(row=row, col=col, value=int(el["value"])) + + data = pt.SuperDict( + initial_values=value_by_position.vapply(get_element), + parameters=dict(size=size), + ) + + return Instance.from_dict(data) + + def get_initial_values(self) -> pt.TupList[dict]: + my_size = self.data["parameters"]["size"] + return add_pos_square(self.data["initial_values"], my_size) + + def get_parameter(self, key=None) -> pt.SuperDict | int | str | None: + if key is None: + return self.data["parameters"] + return self.data["parameters"][key] + + def get_size(self): + return self.get_parameter("size") + + @classmethod + def from_dict(self, data: dict) -> "Instance": + return Instance(data) + + def to_dict(self) -> pt.SuperDict: + my_data = self.data.copy_deep() + return my_data + + def generate_all_positions(self): + size = self.get_size() + all_positions = ( + pt.SuperDict( + row=row, + col=col, + pos=row_col_to_pos(row, col, size), + square=row_col_to_square(row, col, int(math.sqrt(size))), + ) + for row in range(size) + for col in range(size) + ) + all_positions = pt.TupList(all_positions).to_dict( + None, indices="pos", is_list=False + ) + return all_positions + + def values_to_matrix(self, values): + size = self.get_size() + my_matrix = [[0 for _ in range(size)] for _ in range(size)] + for el in values: + my_matrix[el["row"]][el["col"]] = el["value"] + return my_matrix + + def print(self): + values = self.get_initial_values() + board = self.values_to_matrix(values) + return self.generate_board(board) + + def generate_board(self, board): + # Taken from response of Alain. T in https://stackoverflow.com/a/56581709/6508131 + side = self.get_size() + base = int(math.sqrt(side)) + + def expandLine(line): + return ( + line[0] + line[5:9].join([line[1:5] * (base - 1)] * base) + line[9:13] + ) + + line0 = expandLine("╔═══╤═══╦═══╗") + line1 = expandLine("║ . │ . ║ . ║") + line2 = expandLine("╟───┼───╫───╢") + line3 = expandLine("╠═══╪═══╬═══╣") + line4 = expandLine("╚═══╧═══╩═══╝") + + symbol = " 1234567890ABCDEFGHIJKLMNOPQRSTUVWXYZ" + + nums = [[""] + [symbol[n] for n in row] for row in board] + print(line0) + for r in range(1, side + 1): + print("".join(n + s for n, s in zip(nums[r - 1], line1.split(".")))) + print([line2, line3, line4][(r % side == 0) + (r % base == 0)]) diff --git a/cornflow-dags/DAG/sudoku/core/solution.py b/cornflow-dags/DAG/sudoku/core/solution.py new file mode 100644 index 000000000..ecd0a70ff --- /dev/null +++ b/cornflow-dags/DAG/sudoku/core/solution.py @@ -0,0 +1,19 @@ +import os +from cornflow_client import SolutionCore +from cornflow_client.core.tools import load_json +from ..core.tools import add_pos_square +import pytups as pt + + +class Solution(SolutionCore): + schema = load_json( + os.path.join(os.path.dirname(__file__), "../schemas/output.json") + ) + + def __init__(self, data: dict): + data = pt.SuperDict(data) + data["assignment"] = pt.TupList(data["assignment"]) + super().__init__(data) + + def get_assignments(self, size): + return add_pos_square(self.data["assignment"], size) diff --git a/cornflow-dags/DAG/sudoku/core/tools.py b/cornflow-dags/DAG/sudoku/core/tools.py new file mode 100644 index 000000000..cb03fc99e --- /dev/null +++ b/cornflow-dags/DAG/sudoku/core/tools.py @@ -0,0 +1,31 @@ +import math +import pytups as pt +from typing import Tuple + + +def row_col_to_pos(row: int, col: int, size: int) -> int: + return row * size + col + + +def row_col_to_square(row: int, col: int, len_square: int) -> int: + return math.floor(row / len_square) * len_square + math.floor(col / len_square) + + +def pos_to_row_col(pos: int, size: int) -> Tuple[int, int]: + row = math.floor(pos / size) + return row, pos - row * size + + +def add_pos_square(values: pt.TupList[pt.SuperDict], size: int): + # I make a copy of the dictionaries inside the list: + values = values.vapply(lambda v: pt.SuperDict(v)) + + # we expand the contents of the initial values with two columns: + # pos: the position of the element. + values.vapply_col("pos", lambda v: row_col_to_pos(v["row"], v["col"], size)) + # square: the square number of the element + len_square = math.floor(math.sqrt(size)) + values.vapply_col( + "square", lambda v: row_col_to_square(v["row"], v["col"], len_square) + ) + return values diff --git a/cornflow-dags/DAG/sudoku/data/easy.txt b/cornflow-dags/DAG/sudoku/data/easy.txt new file mode 100644 index 000000000..f7320fbe6 --- /dev/null +++ b/cornflow-dags/DAG/sudoku/data/easy.txt @@ -0,0 +1,50 @@ +003020600900305001001806400008102900700000008006708200002609500800203009005010300 +200080300060070084030500209000105408000000000402706000301007040720040060004010003 +000000907000420180000705026100904000050000040000507009920108000034059000507000000 +030050040008010500460000012070502080000603000040109030250000098001020600080060020 +020810740700003100090002805009040087400208003160030200302700060005600008076051090 +100920000524010000000000070050008102000000000402700090060000000000030945000071006 +043080250600000000000001094900004070000608000010200003820500000000000005034090710 +480006902002008001900370060840010200003704100001060049020085007700900600609200018 +000900002050123400030000160908000000070000090000000205091000050007439020400007000 +001900003900700160030005007050000009004302600200000070600100030042007006500006800 +000125400008400000420800000030000095060902010510000060000003049000007200001298000 +062340750100005600570000040000094800400000006005830000030000091006400007059083260 +300000000005009000200504000020000700160000058704310600000890100000067080000005437 +630000000000500008005674000000020000003401020000000345000007004080300902947100080 +000020040008035000000070602031046970200000000000501203049000730000000010800004000 +361025900080960010400000057008000471000603000259000800740000005020018060005470329 +050807020600010090702540006070020301504000908103080070900076205060090003080103040 +080005000000003457000070809060400903007010500408007020901020000842300000000100080 +003502900000040000106000305900251008070408030800763001308000104000020000005104800 +000000000009805100051907420290401065000000000140508093026709580005103600000000000 +020030090000907000900208005004806500607000208003102900800605007000309000030020050 +005000006070009020000500107804150000000803000000092805907006000030400010200000600 +040000050001943600009000300600050002103000506800020007005000200002436700030000040 +004000000000030002390700080400009001209801307600200008010008053900040000000000800 +360020089000361000000000000803000602400603007607000108000000000000418000970030014 +500400060009000800640020000000001008208000501700500000000090084003000600060003002 +007256400400000005010030060000508000008060200000107000030070090200000004006312700 +000000000079050180800000007007306800450708096003502700700000005016030420000000000 +030000080009000500007509200700105008020090030900402001004207100002000800070000090 +200170603050000100000006079000040700000801000009050000310400000005000060906037002 +000000080800701040040020030374000900000030000005000321010060050050802006080000000 +000000085000210009960080100500800016000000000890006007009070052300054000480000000 +608070502050608070002000300500090006040302050800050003005000200010704090409060701 +050010040107000602000905000208030501040070020901080406000401000304000709020060010 +053000790009753400100000002090080010000907000080030070500000003007641200061000940 +006080300049070250000405000600317004007000800100826009000702000075040190003090600 +005080700700204005320000084060105040008000500070803010450000091600508007003010600 +000900800128006400070800060800430007500000009600079008090004010003600284001007000 +000080000270000054095000810009806400020403060006905100017000620460000038000090000 +000602000400050001085010620038206710000000000019407350026040530900020007000809000 +000900002050123400030000160908000000070000090000000205091000050007439020400007000 +380000000000400785009020300060090000800302009000040070001070500495006000000000092 +000158000002060800030000040027030510000000000046080790050000080004070100000325000 +010500200900001000002008030500030007008000500600080004040100700000700006003004050 +080000040000469000400000007005904600070608030008502100900000005000781000060000010 +904200007010000000000706500000800090020904060040002000001607000000000030300005702 +000700800006000031040002000024070000010030080000060290000800070860000500002006000 +001007090590080001030000080000005800050060020004100000080000030100020079020700400 +000003017015009008060000000100007000009000200000500004000000020500600340340200000 +300200000000107000706030500070009080900020004010800050009040301000702000000008006 \ No newline at end of file diff --git a/cornflow-dags/DAG/sudoku/data/example_1 b/cornflow-dags/DAG/sudoku/data/example_1 new file mode 100644 index 000000000..0a9c1419e --- /dev/null +++ b/cornflow-dags/DAG/sudoku/data/example_1 @@ -0,0 +1 @@ +4.....8.5.3..........7......2.....6.....8.4......1.......6.3.7.5..2.....1.4...... diff --git a/cornflow-dags/DAG/sudoku/data/example_2 b/cornflow-dags/DAG/sudoku/data/example_2 new file mode 100644 index 000000000..dce68207a --- /dev/null +++ b/cornflow-dags/DAG/sudoku/data/example_2 @@ -0,0 +1 @@ +.....6....59.....82....8....45........3........6..3.54...325..6.................. \ No newline at end of file diff --git a/cornflow-dags/DAG/sudoku/data/hardest.txt b/cornflow-dags/DAG/sudoku/data/hardest.txt new file mode 100644 index 000000000..83f63c2fa --- /dev/null +++ b/cornflow-dags/DAG/sudoku/data/hardest.txt @@ -0,0 +1,11 @@ +85...24..72......9..4.........1.7..23.5...9...4...........8..7..17..........36.4. +..53.....8......2..7..1.5..4....53...1..7...6..32...8..6.5....9..4....3......97.. +12..4......5.69.1...9...5.........7.7...52.9..3......2.9.6...5.4..9..8.1..3...9.4 +...57..3.1......2.7...234......8...4..7..4...49....6.5.42...3.....7..9....18..... +7..1523........92....3.....1....47.8.......6............9...5.6.4.9.7...8....6.1. +1....7.9..3..2...8..96..5....53..9...1..8...26....4...3......1..4......7..7...3.. +1...34.8....8..5....4.6..21.18......3..1.2..6......81.52..7.9....6..9....9.64...2 +...92......68.3...19..7...623..4.1....1...7....8.3..297...8..91...5.72......64... +.6.5.4.3.1...9...8.........9...5...6.4.6.2.7.7...4...5.........4...8...1.5.2.3.4. +7.....4...2..7..8...3..8.799..5..3...6..2..9...1.97..6...3..9...3..4..6...9..1.35 +....7..2.8.......6.1.2.5...9.54....8.........3....85.1...3.2.8.4.......9.7..6.... \ No newline at end of file diff --git a/cornflow-dags/DAG/sudoku/report/.gitignore b/cornflow-dags/DAG/sudoku/report/.gitignore new file mode 100644 index 000000000..075b2542a --- /dev/null +++ b/cornflow-dags/DAG/sudoku/report/.gitignore @@ -0,0 +1 @@ +/.quarto/ diff --git a/cornflow-dags/DAG/sudoku/report/report.qmd b/cornflow-dags/DAG/sudoku/report/report.qmd new file mode 100644 index 000000000..40f88f28e --- /dev/null +++ b/cornflow-dags/DAG/sudoku/report/report.qmd @@ -0,0 +1,105 @@ +--- +title: "Sudoku" +execute: + echo: false + warning: false +format: + html: + embed-resources: true +editor_options: + chunk_output_type: console +--- + +```{python} +#| tags: [parameters] + +file_name = "../data/example_2" +``` + +## Sudoku + +From [wikipedia](https://en.wikipedia.org/wiki/Sudoku): + +> Sudoku (/suːˈdoʊkuː, -ˈdɒk-, sə-/; Japanese: 数独, romanized: sūdoku, lit. 'digit-single'; originally called Number Place)[1] is a logic-based,[2][3] combinatorial[4] number-placement puzzle. In classic Sudoku, the objective is to fill a 9 × 9 grid with digits so that each column, each row, and each of the nine 3 × 3 subgrids that compose the grid (also called "boxes", "blocks", or "regions") contains all of the digits from 1 to 9. The puzzle setter provides a partially completed grid, which for a well-posed puzzle has a single solution. + +```{python} +#| echo: false +import os +import sys +module_path = os.path.abspath(os.path.join('../..')) +if module_path not in sys.path: + sys.path.append(module_path) + +from sudoku import Sudoku +import pytups as pt +import matplotlib.pyplot as plt +from matplotlib import colormaps + +extension = os.path.splitext(file_name)[1] +if extension=='.txt' or extension=='': + # it's an instance, so we should solve it, I guess + my_instance = Sudoku.instance.from_txt_file(file_name) + my_experiment = Sudoku.solvers['cpsat'](instance=my_instance, solution=None) + status = my_experiment.solve({'timeLimit': 5}) +elif extension=='.json': + my_experiment = Sudoku.solvers['cpsat'].from_json(file_name) + my_instance = my_experiment.instance +else: + raise ValueError("Unknown extension: {}".format(extension)) + + +``` + + +## Instance + +The problem has `{python} my_instance.get_size()` rows, cols and squares. + +See @fig-instance for a representation of the unsolved Sudoku. + +```{python} +#| label: fig-instance +#| fig-cap: "Initial values" + +my_instance.print() + +``` + + + +## Solution statistics + +```{python} +objective = my_experiment.get_objective() +checks = my_experiment.check_solution() +feasible = len(checks) == 0 + +``` + +```{python} +#| output: asis + +# The following code shows (1) a box with feasibility + objective function OR (2) root cause of infeasibility. + +if feasible: + print("::: {{.callout-tip}}\n\n## Solution is feasible.\n\n:::".format(objective)) +else: + my_text = "" + if checks['missing_values']: + my_text += 'The solution is missing the following values: {}\n\n'.format(checks['missing_values']) + + print("::: {{.callout-important}}\n\n## Solution is infeasible\n\n{}\n\n:::".format(my_text)) + +``` + +## Solution + +See @fig-solution for a representation of the final Sudoku. + +```{python} +#| label: fig-solution +#| fig-cap: "Sudoku" + +my_experiment.print() + +``` diff --git a/cornflow-dags/DAG/sudoku/schemas/input.json b/cornflow-dags/DAG/sudoku/schemas/input.json new file mode 100644 index 000000000..aadc40fab --- /dev/null +++ b/cornflow-dags/DAG/sudoku/schemas/input.json @@ -0,0 +1,36 @@ +{ + "$schema": "http://json-schema.org/schema#", + "type": "object", + "properties": { + "parameters": { + "description": "Main parameters", + "type": "object", + "properties": { + "size": { + "type": "integer" + } + }, + "required": ["size"] + }, + "initial_values": { + "description": "Values for each row and column", + "type": "array", + "items": { + "type": "object", + "properties": { + "row": { + "type": "integer" + }, + "col": { + "type": "integer" + }, + "value": { + "type": "integer" + } + }, + "required": ["row", "col", "value"] + } + } + }, + "required": ["initial_values", "parameters"] +} \ No newline at end of file diff --git a/cornflow-dags/DAG/sudoku/schemas/output.json b/cornflow-dags/DAG/sudoku/schemas/output.json new file mode 100644 index 000000000..1116d3f8d --- /dev/null +++ b/cornflow-dags/DAG/sudoku/schemas/output.json @@ -0,0 +1,26 @@ +{ + "$schema": "http://json-schema.org/schema#", + "type": "object", + "properties": { + "assignment": { + "description": "Missing values for each row and column", + "type": "array", + "items": { + "type": "object", + "properties": { + "row": { + "type": "integer" + }, + "col": { + "type": "integer" + }, + "value": { + "type": "integer" + } + }, + "required": ["row", "col", "value"] + } + } + }, + "required": ["assignment"] +} \ No newline at end of file diff --git a/cornflow-dags/DAG/sudoku/solvers/__init__.py b/cornflow-dags/DAG/sudoku/solvers/__init__.py new file mode 100644 index 000000000..4fe892115 --- /dev/null +++ b/cornflow-dags/DAG/sudoku/solvers/__init__.py @@ -0,0 +1 @@ +from .cp_model import OrToolsCP diff --git a/cornflow-dags/DAG/sudoku/solvers/cp_model.py b/cornflow-dags/DAG/sudoku/solvers/cp_model.py new file mode 100644 index 000000000..08400ae40 --- /dev/null +++ b/cornflow-dags/DAG/sudoku/solvers/cp_model.py @@ -0,0 +1,71 @@ +import math + +from ortools.sat.python import cp_model +from cornflow_client.constants import ( + ORTOOLS_STATUS_MAPPING, + SOLUTION_STATUS_FEASIBLE, + SOLUTION_STATUS_INFEASIBLE, +) +import pytups as pt +from ..core import Solution, Experiment +from ..core.tools import pos_to_row_col + + +class OrToolsCP(Experiment): + def solve(self, options: dict): + model = cp_model.CpModel() + initial_values = self.instance.get_initial_values() + size = self.instance.get_size() + value_per_pos = initial_values.to_dict("value", indices="pos", is_list=False) + all_positions = self.instance.generate_all_positions() + + def get_var_or_number(v): + + pos = v["pos"] + row = v["row"] + col = v["col"] + + if pos in value_per_pos: + return value_per_pos[v["pos"]] + # assignment values start at 1 (e.g., 1 -> 9 in classic sudoku) + return model.NewIntVar(lb=1, ub=size, name=f"assign_{row}_{col}") + + my_elements = all_positions.vapply(get_var_or_number) + + # unique over squares, rows, cols + my_groups = ["square", "row", "col"] + for group in my_groups: + positions_per_group = all_positions.values_tl().to_dict( + "pos", indices=group + ) + for values in positions_per_group.values(): + model.AddAllDifferent(my_elements.filter(values).values()) + + solver = cp_model.CpSolver() + solver.parameters.max_time_in_seconds = options.get("timeLimit", 10) + if options.get("msg"): + solver.parameters.log_search_progress = True + termination_condition = solver.Solve(model) + if termination_condition not in [cp_model.OPTIMAL, cp_model.FEASIBLE]: + return dict( + status=ORTOOLS_STATUS_MAPPING.get(termination_condition), + status_sol=SOLUTION_STATUS_INFEASIBLE, + ) + + assignment_values = my_elements.vapply(solver.Value) + + def solution_enc(pos, value): + row, col = pos_to_row_col(pos, size) + return pt.SuperDict(row=row, col=col, value=value) + + solution_data = ( + assignment_values.kfilter(lambda k: k not in value_per_pos) + .kvapply(solution_enc) + .values_tl() + ) + self.solution = Solution(dict(assignment=solution_data)) + + return dict( + status=ORTOOLS_STATUS_MAPPING.get(termination_condition), + status_sol=SOLUTION_STATUS_FEASIBLE, + ) diff --git a/cornflow-dags/tests/test_dags.py b/cornflow-dags/tests/test_dags.py index 8adc6ed8e..9e9a1e67f 100644 --- a/cornflow-dags/tests/test_dags.py +++ b/cornflow-dags/tests/test_dags.py @@ -468,6 +468,30 @@ def test_report(self): ) +class Sudoku(BaseDAGTests.SolvingTests): + def setUp(self): + super().setUp() + from DAG.sudoku import Sudoku + + self.app = Sudoku() + + def test_report(self): + tests = self.app.test_cases + my_experim = self.app.solvers["cpsat"](self.app.instance(tests[0]["instance"])) + my_experim.solve(dict()) + + # let's just check for an element inside the html that we know should exist + # in this case a few 'section' tags with an attribute with a specific id + things_to_look = dict( + section=[ + ("id", "solution"), + ("id", "instance"), + ("id", "sudoku"), + ] + ) + self.generate_check_report(my_experim, things_to_look) + + class HTMLCheckTags(HTMLParser): things_to_check: Optional[Dict[str, List[Tuple[str, str]]]] From 9b044dbf67eca39818d2c28722cec6fbb3761047 Mon Sep 17 00:00:00 2001 From: pchtsp Date: Thu, 15 Aug 2024 15:53:19 +0200 Subject: [PATCH 78/84] changes while testing the windproblem --- cornflow-dags/DAG/knapsack/schemas/instance.json | 2 +- cornflow-dags/DAG/wind_problem.py | 1 + cornflow-dags/requirements.txt | 1 + cornflow-server/cornflow/endpoints/reports.py | 2 -- cornflow-server/cornflow/endpoints/schemas.py | 2 +- 5 files changed, 4 insertions(+), 4 deletions(-) create mode 100644 cornflow-dags/DAG/wind_problem.py diff --git a/cornflow-dags/DAG/knapsack/schemas/instance.json b/cornflow-dags/DAG/knapsack/schemas/instance.json index 6bad5ef86..5b8b94078 100644 --- a/cornflow-dags/DAG/knapsack/schemas/instance.json +++ b/cornflow-dags/DAG/knapsack/schemas/instance.json @@ -17,7 +17,7 @@ "type": "integer" } }, - "required": ["weight", "value", "id"] + "required": ["id", "weight", "value"] } }, "parameters": { diff --git a/cornflow-dags/DAG/wind_problem.py b/cornflow-dags/DAG/wind_problem.py new file mode 100644 index 000000000..15511cd41 --- /dev/null +++ b/cornflow-dags/DAG/wind_problem.py @@ -0,0 +1 @@ +from windenergybattery import WindEnergyBattery diff --git a/cornflow-dags/requirements.txt b/cornflow-dags/requirements.txt index 6af60cc28..7592678d1 100644 --- a/cornflow-dags/requirements.txt +++ b/cornflow-dags/requirements.txt @@ -16,6 +16,7 @@ ortools pyomo tsplib95<=0.7.1 hackathonbaobab2020[solvers] +git+ssh://git@github.com/pchtsp/windenergybattery.git@article_case_validation#egg=windenergybattery networkx # quarto and reports: diff --git a/cornflow-server/cornflow/endpoints/reports.py b/cornflow-server/cornflow/endpoints/reports.py index 58d202366..84068af58 100644 --- a/cornflow-server/cornflow/endpoints/reports.py +++ b/cornflow-server/cornflow/endpoints/reports.py @@ -129,7 +129,6 @@ def put(self, idx, **data): a message) and an integer with the HTTP status code. :rtype: Tuple(dict, integer) """ - # TODO: forbid non-service users from running put current_app.logger.info(f"User {self.get_user()} edits report {idx}") report = self.get_detail(idx=idx) @@ -189,7 +188,6 @@ def get(self, idx): # TODO: are we able to download the name in the database and not as part of the file? current_app.logger.info(f"User {self.get_user()} gets details of report {idx}") report = self.get_detail(user=self.get_user(), idx=idx) - if report is None: raise ObjectDoesNotExist diff --git a/cornflow-server/cornflow/endpoints/schemas.py b/cornflow-server/cornflow/endpoints/schemas.py index 8301e8987..fc7778ed2 100644 --- a/cornflow-server/cornflow/endpoints/schemas.py +++ b/cornflow-server/cornflow/endpoints/schemas.py @@ -71,7 +71,7 @@ def get(self, dag_name): "instance_checks": deployed_dag.instance_checks_schema, "solution_checks": deployed_dag.solution_checks_schema, "config": deployed_dag.config_schema, - "name": dag_name + "name": dag_name, }, 200 else: err = "User does not have permission to access this dag" From 31c5c06400e577c96b71a985e605eb9c79dfb255 Mon Sep 17 00:00:00 2001 From: pchtsp Date: Fri, 16 Aug 2024 08:11:49 +0200 Subject: [PATCH 79/84] fixes to cornflowclient + tests for WindProblem --- .../DAG/sudoku/schemas/solution_checks.json | 7 ++++ cornflow-dags/requirements.txt | 2 +- cornflow-dags/tests/test_dags.py | 32 +++++++++++++++++++ .../cornflow_client/airflow/dag_utilities.py | 4 +-- 4 files changed, 42 insertions(+), 3 deletions(-) create mode 100644 cornflow-dags/DAG/sudoku/schemas/solution_checks.json diff --git a/cornflow-dags/DAG/sudoku/schemas/solution_checks.json b/cornflow-dags/DAG/sudoku/schemas/solution_checks.json new file mode 100644 index 000000000..5a340fa69 --- /dev/null +++ b/cornflow-dags/DAG/sudoku/schemas/solution_checks.json @@ -0,0 +1,7 @@ +{ + "$schema": "http://json-schema.org/schema#", + "type": "object", + "properties": { + }, + "required": [] +} \ No newline at end of file diff --git a/cornflow-dags/requirements.txt b/cornflow-dags/requirements.txt index ad2c1f31e..49d25c2ec 100644 --- a/cornflow-dags/requirements.txt +++ b/cornflow-dags/requirements.txt @@ -16,7 +16,7 @@ ortools pyomo tsplib95<=0.7.1 hackathonbaobab2020[solvers] -git+ssh://git@github.com/pchtsp/windenergybattery.git@article_case_validation#egg=windenergybattery +git+ssh://git@github.com/baobabsoluciones/windenergybattery.git@main#egg=windenergybattery networkx # quarto and reports: diff --git a/cornflow-dags/tests/test_dags.py b/cornflow-dags/tests/test_dags.py index 9e9a1e67f..73507f578 100644 --- a/cornflow-dags/tests/test_dags.py +++ b/cornflow-dags/tests/test_dags.py @@ -209,6 +209,38 @@ def test_report(self): ) +class WindProblem(BaseDAGTests.SolvingTests): + def setUp(self): + super().setUp() + from DAG.wind_problem import WindEnergyBattery + + self.app = WindEnergyBattery() + + @patch("cornflow_client.airflow.dag_utilities.connect_to_cornflow") + def test_complete_report(self, connectCornflow, config=None): + config = config or self.config + config = dict(**config, report=dict(name="report")) + tests = self.app.test_cases + for test_case in tests: + instance_data = test_case.get("instance") + solution_data = test_case.get("solution", None) + if solution_data is None: + solution_data = dict(solution_node_values=[]) + + mock = Mock() + mock.get_data.return_value = dict( + data=instance_data, solution_data=solution_data + ) + mock.get_results.return_value = dict(config=config, state=1) + mock.create_report.return_value = dict(id=1) + connectCornflow.return_value = mock + dag_run = Mock() + dag_run.conf = dict(exec_id="exec_id") + cf_report(app=self.app, secrets="", dag_run=dag_run) + mock.create_report.assert_called_once() + mock.put_one_report.assert_called_once() + + class Tsp(BaseDAGTests.SolvingTests): def setUp(self): super().setUp() diff --git a/libs/client/cornflow_client/airflow/dag_utilities.py b/libs/client/cornflow_client/airflow/dag_utilities.py index dea74b071..0f5b5b5c3 100644 --- a/libs/client/cornflow_client/airflow/dag_utilities.py +++ b/libs/client/cornflow_client/airflow/dag_utilities.py @@ -386,8 +386,8 @@ def my_try_to_save(state): # maybe all of this should be abstracted inside the app? # maybe the app should return an Experiment? experiment = app.get_solver(app.get_default_solver_name()) - my_experiment = experiment( - app.instance(input_data), app.solution(solution_data) + my_experiment = experiment.from_dict( + dict(instance=input_data, solution=solution_data) ) print(f"Preparing to write the report: {report_name}") From 022886851275cde1e34427ed44ed84042fd3183b Mon Sep 17 00:00:00 2001 From: pchtsp Date: Fri, 16 Aug 2024 08:42:26 +0200 Subject: [PATCH 80/84] take out github repo link + bump version of cornflowclient --- .../cornflow-client-publish-to-pypi.yml | 6 +- .../workflows/cornflow-publish-to-pypi.yml | 6 +- cornflow-dags/requirements.txt | 1 - cornflow-dags/tests/test_dags.py | 59 ++++++++++--------- libs/client/setup.py | 2 +- 5 files changed, 39 insertions(+), 35 deletions(-) diff --git a/.github/workflows/cornflow-client-publish-to-pypi.yml b/.github/workflows/cornflow-client-publish-to-pypi.yml index 05c43d1f8..98a79916f 100644 --- a/.github/workflows/cornflow-client-publish-to-pypi.yml +++ b/.github/workflows/cornflow-client-publish-to-pypi.yml @@ -30,13 +30,13 @@ jobs: uses: pypa/gh-action-pypi-publish@release/v1 with: password: ${{ secrets.test_pypi_password }} - repository_url: https://test.pypi.org/legacy/ - packages_dir: libs/client/dist/ + repository-url: https://test.pypi.org/legacy/ + packages-dir: libs/client/dist/ - name: Publish distribution 📦 to PyPI uses: pypa/gh-action-pypi-publish@release/v1 with: password: ${{ secrets.pypi_password }} - packages_dir: libs/client/dist/ + packages-dir: libs/client/dist/ - name: Get version number uses: winterjung/split@v2 id: split diff --git a/.github/workflows/cornflow-publish-to-pypi.yml b/.github/workflows/cornflow-publish-to-pypi.yml index b945fb705..561fe7d55 100644 --- a/.github/workflows/cornflow-publish-to-pypi.yml +++ b/.github/workflows/cornflow-publish-to-pypi.yml @@ -30,13 +30,13 @@ jobs: uses: pypa/gh-action-pypi-publish@release/v1 with: password: ${{ secrets.CORNFLOW_TEST_PYPI_TOKEN }} - repository_url: https://test.pypi.org/legacy/ - packages_dir: cornflow-server/dist/ + repository-url: https://test.pypi.org/legacy/ + packages-dir: cornflow-server/dist/ - name: Publish distribution 📦 to PyPI uses: pypa/gh-action-pypi-publish@release/v1 with: password: ${{ secrets.CORNFLOW_PYPI_TOKEN }} - packages_dir: cornflow-server/dist/ + packages-dir: cornflow-server/dist/ - name: Get version number uses: winterjung/split@v2 id: split diff --git a/cornflow-dags/requirements.txt b/cornflow-dags/requirements.txt index 49d25c2ec..d30e34a30 100644 --- a/cornflow-dags/requirements.txt +++ b/cornflow-dags/requirements.txt @@ -16,7 +16,6 @@ ortools pyomo tsplib95<=0.7.1 hackathonbaobab2020[solvers] -git+ssh://git@github.com/baobabsoluciones/windenergybattery.git@main#egg=windenergybattery networkx # quarto and reports: diff --git a/cornflow-dags/tests/test_dags.py b/cornflow-dags/tests/test_dags.py index 73507f578..ab2f6726a 100644 --- a/cornflow-dags/tests/test_dags.py +++ b/cornflow-dags/tests/test_dags.py @@ -209,36 +209,41 @@ def test_report(self): ) -class WindProblem(BaseDAGTests.SolvingTests): - def setUp(self): - super().setUp() - from DAG.wind_problem import WindEnergyBattery +try: + from DAG.wind_problem import WindEnergyBattery - self.app = WindEnergyBattery() + class WindProblem(BaseDAGTests.SolvingTests): + def setUp(self): + super().setUp() - @patch("cornflow_client.airflow.dag_utilities.connect_to_cornflow") - def test_complete_report(self, connectCornflow, config=None): - config = config or self.config - config = dict(**config, report=dict(name="report")) - tests = self.app.test_cases - for test_case in tests: - instance_data = test_case.get("instance") - solution_data = test_case.get("solution", None) - if solution_data is None: - solution_data = dict(solution_node_values=[]) + self.app = WindEnergyBattery() - mock = Mock() - mock.get_data.return_value = dict( - data=instance_data, solution_data=solution_data - ) - mock.get_results.return_value = dict(config=config, state=1) - mock.create_report.return_value = dict(id=1) - connectCornflow.return_value = mock - dag_run = Mock() - dag_run.conf = dict(exec_id="exec_id") - cf_report(app=self.app, secrets="", dag_run=dag_run) - mock.create_report.assert_called_once() - mock.put_one_report.assert_called_once() + @patch("cornflow_client.airflow.dag_utilities.connect_to_cornflow") + def test_complete_report(self, connectCornflow, config=None): + config = config or self.config + config = dict(**config, report=dict(name="report")) + tests = self.app.test_cases + for test_case in tests: + instance_data = test_case.get("instance") + solution_data = test_case.get("solution", None) + if solution_data is None: + solution_data = dict(solution_node_values=[]) + + mock = Mock() + mock.get_data.return_value = dict( + data=instance_data, solution_data=solution_data + ) + mock.get_results.return_value = dict(config=config, state=1) + mock.create_report.return_value = dict(id=1) + connectCornflow.return_value = mock + dag_run = Mock() + dag_run.conf = dict(exec_id="exec_id") + cf_report(app=self.app, secrets="", dag_run=dag_run) + mock.create_report.assert_called_once() + mock.put_one_report.assert_called_once() + +except ImportError: + pass class Tsp(BaseDAGTests.SolvingTests): diff --git a/libs/client/setup.py b/libs/client/setup.py index eebc5c065..3f9bc6595 100644 --- a/libs/client/setup.py +++ b/libs/client/setup.py @@ -12,7 +12,7 @@ setuptools.setup( name="cornflow-client", - version="1.1.1a2", + version="1.1.1a4", author="baobab soluciones", author_email="sistemas@baobabsoluciones.es", description="Client to connect to a cornflow server", From 5767e21259dda4ef8c625a5e23645148f310a977 Mon Sep 17 00:00:00 2001 From: pchtsp Date: Fri, 16 Aug 2024 08:54:39 +0200 Subject: [PATCH 81/84] added already deployed of cornflowclient and github link to installation for wind problem --- cornflow-dags/requirements.txt | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/cornflow-dags/requirements.txt b/cornflow-dags/requirements.txt index d30e34a30..bf7762d72 100644 --- a/cornflow-dags/requirements.txt +++ b/cornflow-dags/requirements.txt @@ -1,5 +1,5 @@ # cornflow -cornflow-client==1.1.1a2 +cornflow-client==1.1.1a4 # data pandas<=2.1.1 @@ -16,6 +16,7 @@ ortools pyomo tsplib95<=0.7.1 hackathonbaobab2020[solvers] +git+ssh://git@github.com/baobabsoluciones/windenergybattery.git@main#egg=windenergybattery networkx # quarto and reports: From ec4f332b193f920af72591ae8f57cd9ba0571b22 Mon Sep 17 00:00:00 2001 From: pchtsp Date: Fri, 20 Sep 2024 09:26:52 +0200 Subject: [PATCH 82/84] sudoku changes --- cornflow-dags/DAG/sudoku/__init__.py | 4 +- cornflow-dags/DAG/sudoku/core/experiment.py | 41 +++- cornflow-dags/DAG/sudoku/core/instance.py | 15 +- cornflow-dags/DAG/sudoku/core/solution.py | 21 +- cornflow-dags/DAG/sudoku/report/report.qmd | 27 ++- cornflow-dags/DAG/sudoku/schemas/output.json | 33 +++ cornflow-dags/DAG/sudoku/solvers/__init__.py | 1 + cornflow-dags/DAG/sudoku/solvers/cp_model.py | 51 ++++- cornflow-dags/DAG/sudoku/solvers/norvig.py | 214 +++++++++++++++++++ cornflow-dags/tests/test_dags.py | 93 ++++++++ cornflow-server/requirements.txt | 2 +- 11 files changed, 478 insertions(+), 24 deletions(-) create mode 100644 cornflow-dags/DAG/sudoku/solvers/norvig.py diff --git a/cornflow-dags/DAG/sudoku/__init__.py b/cornflow-dags/DAG/sudoku/__init__.py index e710ed82e..a9949827c 100644 --- a/cornflow-dags/DAG/sudoku/__init__.py +++ b/cornflow-dags/DAG/sudoku/__init__.py @@ -2,7 +2,7 @@ from typing import List, Dict import os -from .solvers import OrToolsCP +from .solvers import OrToolsCP, Norvig from .core import Instance, Solution @@ -10,7 +10,7 @@ class Sudoku(ApplicationCore): name = "sudoku" instance = Instance solution = Solution - solvers = dict(cpsat=OrToolsCP) + solvers = dict(cpsat=OrToolsCP, norvig=Norvig) schema = get_empty_schema( properties=dict(timeLimit=dict(type="number")), solvers=list(solvers.keys()) ) diff --git a/cornflow-dags/DAG/sudoku/core/experiment.py b/cornflow-dags/DAG/sudoku/core/experiment.py index f603fdb59..c243cf74d 100644 --- a/cornflow-dags/DAG/sudoku/core/experiment.py +++ b/cornflow-dags/DAG/sudoku/core/experiment.py @@ -5,6 +5,8 @@ from .solution import Solution import os import quarto +import plotnine as pn +import pandas as pd class Experiment(ExperimentCore): @@ -33,9 +35,15 @@ def solution(self, value): def get_objective(self) -> float: return 0 - def get_complete_solution(self): - initial_values = self.instance.get_initial_values() - solution_values = self.solution.get_assignments(self.instance.get_size()) + def get_complete_solution(self, id=None): + initial_values = self.instance.get_initial_values().vapply(pt.SuperDict) + initial_values.vapply_col("initial", lambda v: True) + if id is None: + solution_values = self.solution.get_assignments(self.instance.get_size()) + else: + solution_values = self.solution.get_others(self.instance.get_size(), id=id) + solution_values = solution_values.vapply(pt.SuperDict) + solution_values.vapply_col("initial", lambda v: False) return solution_values + initial_values def check_solution(self, *args, **kwargs) -> dict: @@ -64,7 +72,30 @@ def generate_report(self, report_name="report") -> str: return self.generate_report_quarto(quarto, report_name=report_name) - def print(self): - values = self.get_complete_solution() + def print(self, id=None): + values = self.get_complete_solution(id=id) board = self.instance.values_to_matrix(values) return self.instance.generate_board(board) + + def plot(self, id=None): + + my_solution = self.get_complete_solution(id=id) + my_table = pd.DataFrame(my_solution) + a = pt.TupList(range(10)).vapply(lambda v: v - 0.5) + return ( + pn.ggplot(my_table, pn.aes(x="row", y="col", fill="initial")) + + pn.geom_tile(pn.aes(width=1, height=1)) + + pn.geom_text(pn.aes(label="value"), size=10) + + pn.theme_void() + # + pn.labs(fill='') + + pn.scale_fill_manual(values=["white", "lightgreen"], guide=None) + + pn.geom_vline(xintercept=a, color="black", size=0.5, linetype="dashed") + + pn.geom_hline(yintercept=a, color="black", size=0.5, linetype="dashed") + + pn.geom_vline(xintercept=[-0.5, 2.5, 5.5, 8.5], color="black", size=3) + + pn.geom_hline(yintercept=[-0.5, 2.5, 5.5, 8.5], color="black", size=3) + + pn.xlim(-0.5, 8.5) + + pn.ylim(-0.5, 8.5) + ) + + def get_others(self): + return self.solution.get_others(self.instance.get_size()) diff --git a/cornflow-dags/DAG/sudoku/core/instance.py b/cornflow-dags/DAG/sudoku/core/instance.py index a2afe81b3..606cd1be6 100644 --- a/cornflow-dags/DAG/sudoku/core/instance.py +++ b/cornflow-dags/DAG/sudoku/core/instance.py @@ -5,6 +5,8 @@ import math from .tools import pos_to_row_col, add_pos_square, row_col_to_pos, row_col_to_square +from typing import Optional + class Instance(InstanceCore): schema = load_json(os.path.join(os.path.dirname(__file__), "../schemas/input.json")) @@ -17,9 +19,16 @@ def __init__(self, data: dict): super().__init__(data) @classmethod - def from_txt_file(cls, filePath, line_number: int = 0): - with open(filePath, "r") as f: - contents = f.read().splitlines() + def from_txt_file( + cls, filePath, line_number: int = 0, contents: Optional[str] = None + ): + # if content is given, filePath is ignored: + if contents is None: + with open(filePath, "r") as f: + contents = f.read().splitlines() + else: + contents = [contents] + line_number = 0 empty_chars = {".", "0"} my_chars = [ el if el not in empty_chars else None for el in contents[line_number] diff --git a/cornflow-dags/DAG/sudoku/core/solution.py b/cornflow-dags/DAG/sudoku/core/solution.py index ecd0a70ff..37d7d757a 100644 --- a/cornflow-dags/DAG/sudoku/core/solution.py +++ b/cornflow-dags/DAG/sudoku/core/solution.py @@ -12,8 +12,27 @@ class Solution(SolutionCore): def __init__(self, data: dict): data = pt.SuperDict(data) - data["assignment"] = pt.TupList(data["assignment"]) + properties = self.schema["properties"] + for key in data: + if properties[key]["type"] == "array": + data[key] = pt.TupList(data[key]) + else: + # if properties[key]['type'] == 'object': + data[key] = pt.SuperDict(data[key]) super().__init__(data) def get_assignments(self, size): return add_pos_square(self.data["assignment"], size) + + def get_others(self, size, id=None): + if "alternatives" in self.data: + if id is None: + return add_pos_square(self.data["alternatives"], size) + else: + alternative = self.data["alternatives"].vfilter(lambda v: v["id"] == id) + return add_pos_square(alternative, size) + else: + return pt.TupList() + + def get_indicators(self): + return self.data.get("indicators", pt.SuperDict()) diff --git a/cornflow-dags/DAG/sudoku/report/report.qmd b/cornflow-dags/DAG/sudoku/report/report.qmd index 40f88f28e..c76999b64 100644 --- a/cornflow-dags/DAG/sudoku/report/report.qmd +++ b/cornflow-dags/DAG/sudoku/report/report.qmd @@ -65,14 +65,13 @@ my_instance.print() ``` - - ## Solution statistics ```{python} objective = my_experiment.get_objective() checks = my_experiment.check_solution() feasible = len(checks) == 0 +indicators = my_experiment.solution.get_indicators() ``` @@ -82,7 +81,11 @@ feasible = len(checks) == 0 # The following code shows (1) a box with feasibility + objective function OR (2) root cause of infeasibility. if feasible: - print("::: {{.callout-tip}}\n\n## Solution is feasible.\n\n:::".format(objective)) + my_text = "Solution is feasible" + if "num_fails" in indicators: + my_text += ' and difficulty is {}'.format(indicators['num_fails']) + print("::: {{.callout-tip}}\n\n## {}.\n\n:::".format(my_text)) + else: my_text = "" if checks['missing_values']: @@ -100,6 +103,22 @@ See @fig-solution for a representation of the final Sudoku. #| label: fig-solution #| fig-cap: "Sudoku" -my_experiment.print() +my_experiment.plot() ``` + +## Alternative solutions + + +In case there are alternative solutions to the problem, they are shown here: + +```{python} +#| label: fig-solution-alternatives +#| fig-cap: "Alternative solutions" + +my_ids = my_experiment.get_others().take('id').unique() +for _id in my_ids: + display(my_experiment.plot(_id)) + plt.show() +``` + diff --git a/cornflow-dags/DAG/sudoku/schemas/output.json b/cornflow-dags/DAG/sudoku/schemas/output.json index 1116d3f8d..8e1be8f69 100644 --- a/cornflow-dags/DAG/sudoku/schemas/output.json +++ b/cornflow-dags/DAG/sudoku/schemas/output.json @@ -20,6 +20,39 @@ }, "required": ["row", "col", "value"] } + }, + "alternatives": { + "description": "alternative sudokus (in case of multiple solutions)", + "type": "array", + "items": { + "type": "object", + "properties": { + "id": { + "type": "integer" + }, + "row": { + "type": "integer" + }, + "col": { + "type": "integer" + }, + "value": { + "type": "integer" + } + }, + "required": ["id", "row", "col", "value"] + } + }, + "indicators": { + "description": "Main indicators", + "type": "object", + "properties": { + "num_fails": { + "description": "Number of fail searches (the more, the harder the problem)", + "type": "integer" + } + }, + "required": [] } }, "required": ["assignment"] diff --git a/cornflow-dags/DAG/sudoku/solvers/__init__.py b/cornflow-dags/DAG/sudoku/solvers/__init__.py index 4fe892115..571619123 100644 --- a/cornflow-dags/DAG/sudoku/solvers/__init__.py +++ b/cornflow-dags/DAG/sudoku/solvers/__init__.py @@ -1 +1,2 @@ from .cp_model import OrToolsCP +from .norvig import Norvig diff --git a/cornflow-dags/DAG/sudoku/solvers/cp_model.py b/cornflow-dags/DAG/sudoku/solvers/cp_model.py index 08400ae40..a3f7893b2 100644 --- a/cornflow-dags/DAG/sudoku/solvers/cp_model.py +++ b/cornflow-dags/DAG/sudoku/solvers/cp_model.py @@ -45,25 +45,60 @@ def get_var_or_number(v): solver.parameters.max_time_in_seconds = options.get("timeLimit", 10) if options.get("msg"): solver.parameters.log_search_progress = True - termination_condition = solver.Solve(model) + # we want to find all the potential solutions for the sudoku + solver.parameters.enumerate_all_solutions = True + + # we want all sudokus, in case of more than one solution + class VarArraySolutionCollector(cp_model.CpSolverSolutionCallback): + + def __init__(self, variables): + cp_model.CpSolverSolutionCallback.__init__(self) + self.__variables = variables + self.solution_list = [] + + def on_solution_callback(self): + # my_elements + self.solution_list.append(self.__variables.vapply(self.Value)) + + solution_collector = VarArraySolutionCollector(my_elements) + termination_condition = solver.Solve(model, solution_collector) + all_solutions = solution_collector.solution_list + other_solutions = [] + if len(all_solutions) >= 1: + assignment_values = solution_collector.solution_list[0] + if len(all_solutions) > 1: + other_solutions = solution_collector.solution_list[1:] if termination_condition not in [cp_model.OPTIMAL, cp_model.FEASIBLE]: return dict( status=ORTOOLS_STATUS_MAPPING.get(termination_condition), status_sol=SOLUTION_STATUS_INFEASIBLE, ) - assignment_values = my_elements.vapply(solver.Value) - def solution_enc(pos, value): row, col = pos_to_row_col(pos, size) return pt.SuperDict(row=row, col=col, value=value) - solution_data = ( - assignment_values.kfilter(lambda k: k not in value_per_pos) - .kvapply(solution_enc) - .values_tl() + def assignment_to_solution(values: pt.SuperDict) -> pt.TupList[pt.SuperDict]: + return ( + values.kfilter(lambda k: k not in value_per_pos) + .kvapply(solution_enc) + .values_tl() + ) + + def treat_others(others): + result = pt.TupList() + for position, solution in enumerate(others): + values = assignment_to_solution(solution) + values.vapply_col("id", lambda v: position) + result.extend(values) + return result + + self.solution = Solution.from_dict( + dict( + assignment=assignment_to_solution(assignment_values), + alternatives=treat_others(other_solutions), + ) ) - self.solution = Solution(dict(assignment=solution_data)) return dict( status=ORTOOLS_STATUS_MAPPING.get(termination_condition), diff --git a/cornflow-dags/DAG/sudoku/solvers/norvig.py b/cornflow-dags/DAG/sudoku/solvers/norvig.py new file mode 100644 index 000000000..3767bfc24 --- /dev/null +++ b/cornflow-dags/DAG/sudoku/solvers/norvig.py @@ -0,0 +1,214 @@ +# from ortools.sat.python import cp_model +from cornflow_client.constants import ( + STATUS_OPTIMAL, + STATUS_UNDEFINED, + SOLUTION_STATUS_FEASIBLE, + SOLUTION_STATUS_INFEASIBLE, +) +import pytups as pt +from ..core import Solution, Experiment +from ..core.tools import pos_to_row_col + +# this method implements Peter Norvig's method as explained here: +# credit goes to Peter Norvig +# https://github.com/norvig/pytudes/blob/main/ipynb/Sudoku.ipynb +import re +from typing import Dict, Optional, List + + +def cross(A, B) -> tuple: + "Cross product of strings in A and strings in B." + return tuple(a + b for a in A for b in B) + + +Digit = str # e.g. '1' +digits = "123456789" +DigitSet = str # e.g. '123' +rows = "ABCDEFGHI" +cols = digits +Square = str # e.g. 'A9' +squares = cross(rows, cols) +Grid = Dict[Square, DigitSet] # E.g. {'A9': '123', ...} +all_boxes = [ + cross(rs, cs) for rs in ("ABC", "DEF", "GHI") for cs in ("123", "456", "789") +] +all_units = [cross(rows, c) for c in cols] + [cross(r, cols) for r in rows] + all_boxes +units = {s: tuple(u for u in all_units if s in u) for s in squares} +peers = {s: set().union(*units[s]) - {s} for s in squares} +Picture = str + + +def is_solution(solution: Grid, puzzle: Grid) -> bool: + "Is this proposed solution to the puzzle actually valid?" + return ( + solution is not None + and all(solution[s] in puzzle[s] for s in squares) + and all({solution[s] for s in unit} == set(digits) for unit in all_units) + ) + + +def constrain(grid) -> Grid: + "Propagate constraints on a copy of grid to yield a new constrained Grid." + result: Grid = {s: digits for s in squares} + for s in grid: + if len(grid[s]) == 1: + fill(result, s, grid[s]) + return result + + +def fill(grid: Grid, s: Square, d: Digit) -> Optional[Grid]: + """Eliminate all the digits except d from grid[s].""" + if grid[s] == d or all(eliminate(grid, s, d2) for d2 in grid[s] if d2 != d): + return grid + else: + return None + + +def eliminate(grid: Grid, s: Square, d: Digit) -> Optional[Grid]: + """Eliminate d from grid[s]; implement the two constraint propagation strategies.""" + if d not in grid[s]: + return grid ## Already eliminated + grid[s] = grid[s].replace(d, "") + if not grid[s]: + return None ## None: no legal digit left + elif len(grid[s]) == 1: + # 1. If a square has only one possible digit, then eliminate that digit as a possibility for each of the square's peers. + d2 = grid[s] + if not all(eliminate(grid, s2, d2) for s2 in peers[s]): + return None ## None: can't eliminate d2 from some square + for u in units[s]: + dplaces = [s for s in u if d in grid[s]] + # 2. If a unit has only one possible square that can hold a digit, then fill the square with the digit. + if not dplaces or (len(dplaces) == 1 and not fill(grid, dplaces[0], d)): + return None ## None: no place in u for d + return grid + + +def parse(picture) -> Grid: + """Convert a Picture to a Grid.""" + vals = re.findall(r"[.1-9]|[{][1-9]+[}]", picture) + assert len(vals) == 81 + return { + s: digits if v == "." else re.sub(r"[{}]", "", v) for s, v in zip(squares, vals) + } + + +def picture(grid) -> Picture: + """Convert a Grid to a Picture string, one line at a time.""" + if grid is None: + return "None" + + def val(d: DigitSet) -> str: + return "." if d == digits else d if len(d) == 1 else "{" + d + "}" + + maxwidth = max(len(val(grid[s])) for s in grid) + dash1 = "-" * (maxwidth * 3 + 2) + dash3 = "\n" + "+".join(3 * [dash1]) + + def cell(r, c): + return val(grid[r + c]).center(maxwidth) + ("|" if c in "36" else " ") + + def line(r): + return "".join(cell(r, c) for c in cols) + (dash3 if r in "CF" else "") + + return "\n".join(map(line, rows)) + + +def search(grid) -> Optional[Grid]: + "Depth-first search with constraint propagation to find a solution." + + if grid is None: + return None + s = min( + (s for s in squares if len(grid[s]) > 1), + default=None, + key=lambda s: len(grid[s]), + ) + if s is None: # No squares with multiple possibilities; the search has succeeded + return grid + for d in grid[s]: + solution = search(fill(grid.copy(), s, d)) + if solution: + return solution + return None + + +def solve_puzzles(puzzles: List[Grid], verbose=True) -> int: + "Solve and verify each puzzle, and if `verbose`, print puzzle and solution." + for puzzle in puzzles: + solution = search(constrain(puzzle)) + assert is_solution(solution, puzzle) + if verbose: + print_side_by_side( + "\nPuzzle:\n" + picture(puzzle), "\nSolution:\n" + picture(solution) + ) + return len(puzzles) + + +def print_side_by_side(left, right, width=20): + """Print two strings side-by-side, line-by-line, each side `width` wide.""" + for L, R in zip(left.splitlines(), right.splitlines()): + print(L.ljust(width), R.ljust(width)) + + +class Norvig(Experiment): + fail_counter: int + + def search(self, grid) -> Optional[Grid]: + """ + "Depth-first search with constraint propagation to find a solution." + Note: we have modified the original stateless function so we can count the number of failed searches. + """ + + if grid is None: + self.fail_counter += 1 + return None + s = min( + (s for s in squares if len(grid[s]) > 1), + default=None, + key=lambda s: len(grid[s]), + ) + if ( + s is None + ): # No squares with multiple possibilities; the search has succeeded + return grid + for d in grid[s]: + solution = self.search(fill(grid.copy(), s, d)) + if solution: + return solution + self.fail_counter += 1 + return None + + def solve(self, options: dict): + # we need to convert the instance into the right format. + initial_values = self.instance.get_initial_values() + size = self.instance.get_size() + if size != 9: + raise ValueError("This method only solves size-9 sudokus") + + # we initialize the grid and then fill the initial values: + grid = {s: digits for s in squares} + for value in initial_values: + grid[squares[value["pos"]]] = str(value["value"]) + # we initialize the fail counter + self.fail_counter = 0 + # we run the "constrain + search" procedure + solution = self.search(constrain(grid)) + if not is_solution(solution, grid): + return dict( + status=STATUS_UNDEFINED, + status_sol=SOLUTION_STATUS_INFEASIBLE, + ) + # we put the solution in the right format + solution_data = pt.TupList() + for pos, s in enumerate(squares): + value = int(solution[s]) + row, col = pos_to_row_col(pos, size) + solution_data.append(pt.SuperDict(row=row, col=col, value=value)) + self.solution = Solution.from_dict( + pt.SuperDict( + assignment=solution_data, + indicators=dict(num_fails=self.fail_counter), + ) + ) + return dict(status=STATUS_OPTIMAL, status_sol=SOLUTION_STATUS_FEASIBLE) diff --git a/cornflow-dags/tests/test_dags.py b/cornflow-dags/tests/test_dags.py index ab2f6726a..cf6feb866 100644 --- a/cornflow-dags/tests/test_dags.py +++ b/cornflow-dags/tests/test_dags.py @@ -528,6 +528,99 @@ def test_report(self): ) self.generate_check_report(my_experim, things_to_look) + def test_two_solutions(self): + my_instance = self.app.instance.from_txt_file( + filePath=None, + contents="..........12.34567.345.6182..1.582.6..86....1.2...7.5...37.5.28.8..6.7..2.7..3615", + ) + my_experim = self.app.solvers["cpsat"](my_instance) + my_experim.solve(dict()) + my_experim.solution.check_schema() + indicators = my_experim.solution.get_indicators() + self.assertFalse("num_fails" in indicators) + others = my_experim.solution.get_others(my_experim.instance.get_size()) + required_keys = ["pos", "square", "id", "col", "row", "value"] + self.assertTrue( + len(others[0].keys_tl().intersect(required_keys)) == len(required_keys) + ) + + def test_easy_norvig(self): + dataset = [t for t in self.app.test_cases if t["name"].startswith("hardest")][0] + # we try solving in the standard way: + self.app.solve(dataset["instance"], dict(solver="norvig")) + + # we solve it in more detail + my_experim = self.app.solvers["norvig"]( + self.app.instance.from_dict(dataset["instance"]) + ) + my_experim.solve(dict()) + my_experim.solution.check_schema() + indicators = my_experim.solution.get_indicators() + self.assertTrue("num_fails" in indicators) + others = my_experim.solution.get_others(my_experim.instance.get_size()) + self.assertTrue(len(others) == 0) + + def test_print(self): + my_instance = self.app.instance.from_txt_file( + filePath=None, + contents="..........12.34567.345.6182..1.582.6..86....1.2...7.5...37.5.28.8..6.7..2.7..3615", + ) + my_experim = self.app.solvers["cpsat"](my_instance) + my_experim.solve(dict()) + my_ids = my_experim.get_others().take("id").unique() + for _id in my_ids: + my_experim.print(_id) + + def test_plot(self): + my_instance = self.app.instance.from_txt_file( + filePath=None, + contents="..........12.34567.345.6182..1.582.6..86....1.2...7.5...37.5.28.8..6.7..2.7..3615", + ) + my_experim = self.app.solvers["cpsat"](my_instance) + my_experim.solve(dict()) + my_experim.plot() + + def test_report2(self): + my_instance = self.app.instance.from_txt_file( + filePath=None, + contents="..........12.34567.345.6182..1.582.6..86....1.2...7.5...37.5.28.8..6.7..2.7..3615", + ) + my_experim = self.app.solvers["cpsat"](my_instance) + my_experim.solve(dict()) + + # let's just check for an element inside the html that we know should exist + # in this case a few 'section' tags with an attribute with a specific id + things_to_look = dict( + section=[ + ("id", "solution"), + ("id", "instance"), + ("id", "sudoku"), + ] + ) + self.generate_check_report(my_experim, things_to_look) + + def test_report3(self): + dataset = [t for t in self.app.test_cases if t["name"].startswith("hardest")][0] + # we try solving in the standard way: + self.app.solve(dataset["instance"], dict(solver="norvig")) + + # we solve it in more detail + my_experim = self.app.solvers["norvig"]( + self.app.instance.from_dict(dataset["instance"]) + ) + my_experim.solve(dict()) + + # let's just check for an element inside the html that we know should exist + # in this case a few 'section' tags with an attribute with a specific id + things_to_look = dict( + section=[ + ("id", "solution"), + ("id", "instance"), + ("id", "sudoku"), + ] + ) + self.generate_check_report(my_experim, things_to_look) + class HTMLCheckTags(HTMLParser): things_to_check: Optional[Dict[str, List[Tuple[str, str]]]] diff --git a/cornflow-server/requirements.txt b/cornflow-server/requirements.txt index 1547c445d..b1c37f6ce 100644 --- a/cornflow-server/requirements.txt +++ b/cornflow-server/requirements.txt @@ -1,7 +1,7 @@ alembic==1.9.2 apispec<=6.2.0 click<=8.1.3 -cornflow-client==1.1.1a2 +cornflow-client==1.1.1a4 cryptography<=42.0.5 disposable-email-domains>=0.0.86 Flask==2.3.2 From c4753aa1a8c7b8f76e87bd6195804003e2deb869 Mon Sep 17 00:00:00 2001 From: pchtsp Date: Fri, 20 Sep 2024 09:27:40 +0200 Subject: [PATCH 83/84] improve README + filter tests with flag --- cornflow-dags/README.rst | 50 ++++++++++++++++--- cornflow-dags/tests/test_dags.py | 32 ++++++------ .../cornflow_client/core/application.py | 20 +++++++- 3 files changed, 81 insertions(+), 21 deletions(-) diff --git a/cornflow-dags/README.rst b/cornflow-dags/README.rst index 066c62862..9c78d6928 100644 --- a/cornflow-dags/README.rst +++ b/cornflow-dags/README.rst @@ -4,11 +4,8 @@ Cornflow-dags Public DAGs for cornflow server -Uploading a new app / solver -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - Setting the environment ------------------------- +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ This project requires python 3.7 or above:: @@ -16,6 +13,46 @@ This project requires python 3.7 or above:: venv/Scripts/activate pip install -r requirements.txt +Optionally, to generate reports, it is required to install quarto: https://quarto.org/docs/download/. + +Testing +~~~~~~~~~~~~~~~~~~~~~ + +To run all tests you may want to do the following: + + python -m unittest tests.test_dags + +To run the specific tests for one of the apps, just choose the name of the DAG (example: Tsp): + + python -m unittest tests.test_dags.Tsp + +Running an app +~~~~~~~~~~~~~~~~~~~~~ + +from python (example with GraphColoring):: + + from DAG.graph_coloring import GraphColoring + + app = GraphColoring() + # we load an example dataset: + tests = app.get_unittest_cases() + instance_data = tests[0].get("instance") + # we instantiate the instance + instance = app.instance.from_dict(instance_data) + # we get the default solver (solvers available in app.solvers) + s = app.get_default_solver_name() + my_experim = app.get_solver(s)(instance, None) + # we solve the problem + my_experim.solve(dict()) + # the solution is stored in solution: + my_experim.solution.to_dict() + # some apps allow generating an html report (Quarto required) + path_to_report = my_experim.generate_report() + + +Uploading a new app / solver +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + Introduction ------------- @@ -307,7 +344,7 @@ The reports -------------- The generation of reports needs to have the `quarto` app installed in the system. -To downlodad and install quarto, check here: https://quarto.org/docs/download/. +To download and install quarto, check here: https://quarto.org/docs/download/. A report is a static/ self-contained view of an Experiment (solved or not). @@ -324,4 +361,5 @@ Developing reports Quarto reports are easier to create using VS-code with the following extensions: `Python`, `Quarto`, `Jupyter`, `black (Microsoft)`. -VS-code offers an interactive window to execute cells, and automatic re-run of the report by watching for changes. \ No newline at end of file +VS-code offers an interactive window to execute cells, and automatic re-run of the report by watching for changes. + diff --git a/cornflow-dags/tests/test_dags.py b/cornflow-dags/tests/test_dags.py index cf6feb866..6eded1831 100644 --- a/cornflow-dags/tests/test_dags.py +++ b/cornflow-dags/tests/test_dags.py @@ -81,7 +81,7 @@ def generate_check_report(self, my_experim, things_to_look, verbose=False): def test_try_solving_testcase(self, config=None): config = config or self.config - tests = self.app.test_cases + tests = self.app.get_unittest_cases() for test_case in tests: instance_data = test_case.get("instance") @@ -144,7 +144,7 @@ def test_try_solving_testcase(self, config=None): @patch("cornflow_client.airflow.dag_utilities.connect_to_cornflow") def test_complete_solve(self, connectCornflow, config=None): config = config or self.config - tests = self.app.test_cases + tests = self.app.get_unittest_cases() for test_case in tests: instance_data = test_case.get("instance") solution_data = test_case.get("solution", None) @@ -183,7 +183,7 @@ def setUp(self): self.config = dict(msg=False) def test_incomplete_solution(self): - tests = self.app.test_cases + tests = self.app.get_unittest_cases() solution_data = dict(assignment=[dict(node=1, color=1), dict(node=3, color=1)]) my_experim = self.app.solvers["default"]( self.app.instance.from_dict(tests[0]["instance"]), @@ -194,7 +194,7 @@ def test_incomplete_solution(self): self.assertEqual(len(checks["pairs"]), 1) def test_report(self): - tests = self.app.test_cases + tests = self.app.get_unittest_cases() my_experim = self.load_experiment_from_dataset(tests[0]) my_experim.solve(dict()) things_to_look = dict( @@ -222,7 +222,7 @@ def setUp(self): def test_complete_report(self, connectCornflow, config=None): config = config or self.config config = dict(**config, report=dict(name="report")) - tests = self.app.test_cases + tests = self.app.get_unittest_cases() for test_case in tests: instance_data = test_case.get("instance") solution_data = test_case.get("solution", None) @@ -257,7 +257,7 @@ def test_solve_cpsat(self): return self.test_try_solving_testcase(dict(solver="cpsat", **self.config)) def test_report(self): - tests = self.app.test_cases + tests = self.app.get_unittest_cases() my_experim = self.app.solvers["cpsat"](self.app.instance(tests[0]["instance"])) my_experim.solve(dict()) @@ -273,14 +273,14 @@ def test_report(self): self.generate_check_report(my_experim, things_to_look) def test_report_error(self): - tests = self.app.test_cases + tests = self.app.get_unittest_cases() my_experim = self.app.solvers["cpsat"](self.app.instance(tests[0]["instance"])) my_experim.solve(dict()) my_fun = lambda: my_experim.generate_report(report_name="wrong_name") self.assertRaises(FileNotFoundError, my_fun) def test_export(self): - tests = self.app.test_cases + tests = self.app.get_unittest_cases() my_file_path = "export.json" self.app.instance(tests[0]["instance"]).to_json(my_file_path) self.assertTrue(os.path.exists(my_file_path)) @@ -293,7 +293,7 @@ def test_export(self): def test_complete_report(self, connectCornflow, config=None): config = config or self.config config = dict(**config, report=dict(name="report")) - tests = self.app.test_cases + tests = self.app.get_unittest_cases() for test_case in tests: instance_data = test_case.get("instance") solution_data = test_case.get("solution", None) @@ -317,7 +317,7 @@ def test_complete_report(self, connectCornflow, config=None): def test_complete_report_wrong_data(self, connectCornflow, config=None): config = config or self.config config = dict(**config, report=dict(name="report")) - tests = self.app.test_cases + tests = self.app.get_unittest_cases() for test_case in tests: instance_data = test_case.get("instance") solution_data = None @@ -343,7 +343,7 @@ def test_complete_report_wrong_data(self, connectCornflow, config=None): def test_complete_report_no_quarto(self, connectCornflow, render, config=None): config = config or self.config config = dict(**config, report=dict(name="report")) - tests = self.app.test_cases + tests = self.app.get_unittest_cases() render.side_effect = ModuleNotFoundError() render.return_value = dict(a=1) for test_case in tests: @@ -513,7 +513,7 @@ def setUp(self): self.app = Sudoku() def test_report(self): - tests = self.app.test_cases + tests = self.app.get_unittest_cases() my_experim = self.app.solvers["cpsat"](self.app.instance(tests[0]["instance"])) my_experim.solve(dict()) @@ -545,7 +545,9 @@ def test_two_solutions(self): ) def test_easy_norvig(self): - dataset = [t for t in self.app.test_cases if t["name"].startswith("hardest")][0] + dataset = [ + t for t in self.app.get_unittest_cases() if t["name"].startswith("hardest") + ][0] # we try solving in the standard way: self.app.solve(dataset["instance"], dict(solver="norvig")) @@ -600,7 +602,9 @@ def test_report2(self): self.generate_check_report(my_experim, things_to_look) def test_report3(self): - dataset = [t for t in self.app.test_cases if t["name"].startswith("hardest")][0] + dataset = [ + t for t in self.app.get_unittest_cases() if t["name"].startswith("hardest") + ][0] # we try solving in the standard way: self.app.solve(dataset["instance"], dict(solver="norvig")) diff --git a/libs/client/cornflow_client/core/application.py b/libs/client/cornflow_client/core/application.py index a12c791fd..b6300abb3 100644 --- a/libs/client/cornflow_client/core/application.py +++ b/libs/client/cornflow_client/core/application.py @@ -1,11 +1,12 @@ """ """ + # Partial imports from abc import ABC, abstractmethod from timeit import default_timer as timer from typing import Type, Dict, List, Tuple, Union - +import warnings from jsonschema import Draft7Validator from pytups import SuperDict @@ -127,10 +128,27 @@ def test_cases( * **description** optional field with a description of the test case. * **instance**: the instance data. * **solution**: the solution data (optional) + * **unittest**: flag indicating whether the test case is a unittest """ # TODO: Phase out older list implementation raise NotImplementedError() + def get_unittest_cases(self): + # we raise old deprecation warnings + # and we filter to only get tests than run fast. + my_cases = self.test_cases + if len(my_cases) == 0: + return [] + if isinstance(my_cases[0], tuple): + warnings.warn( + "tuple format is deprecated, use a dict(name, description, instance, solution)" + ) + if "instance" not in my_cases[0]: + warnings.warn( + "old-dict format is deprecated, use a dict(name, description, instance, solution)" + ) + return [t for t in my_cases if t.get("unittest", True)] + @property @abstractmethod def solvers(self) -> Dict[str, Type[ExperimentCore]]: From ca1a24d96655eb63f897378e56b5cc5e1795af25 Mon Sep 17 00:00:00 2001 From: pchtsp Date: Fri, 20 Sep 2024 09:50:47 +0200 Subject: [PATCH 84/84] minor fixes --- cornflow-dags/README.rst | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/cornflow-dags/README.rst b/cornflow-dags/README.rst index 9c78d6928..28b7cf978 100644 --- a/cornflow-dags/README.rst +++ b/cornflow-dags/README.rst @@ -12,17 +12,18 @@ This project requires python 3.7 or above:: python -m venv venv venv/Scripts/activate pip install -r requirements.txt + pip install -U ../libs/client Optionally, to generate reports, it is required to install quarto: https://quarto.org/docs/download/. Testing ~~~~~~~~~~~~~~~~~~~~~ -To run all tests you may want to do the following: +To run all tests you may want to do the following:: python -m unittest tests.test_dags -To run the specific tests for one of the apps, just choose the name of the DAG (example: Tsp): +To run the specific tests for one of the apps, just choose the name of the DAG (example: Tsp):: python -m unittest tests.test_dags.Tsp