generated from amosproj/amos202Xss0Y-projname
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #122 from amosproj/feature/apache_airflow_input_ou…
…tput Feature/apache airflow input output
- Loading branch information
Showing
14 changed files
with
300 additions
and
53 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,94 @@ | ||
from flask import request, jsonify, Blueprint | ||
|
||
from database.models.dp_run import DatapipelineRun | ||
from database.mongo_repo import datapipelineRunDB | ||
from services.auth_service import secure | ||
from services.dp_run import run | ||
|
||
dp_run = Blueprint("dp_run", __name__, template_folder="templates") | ||
|
||
|
||
|
||
|
||
@dp_run.route("/dp_run", methods=["GET"]) | ||
@secure | ||
def get_all_dp_runs(): | ||
dp_run = datapipelineRunDB.find() | ||
|
||
allData = [] | ||
for d in dp_run: | ||
allData.append({ | ||
"executionId": d["executionId"], | ||
"datapipelineId": d["datapipelineId"], | ||
"fileId": d["fileId"], | ||
"result": d["result"], | ||
"state": d["state"], | ||
}) | ||
return jsonify(allData), 201 | ||
|
||
|
||
@dp_run.route("/dp_run/new", methods=["POST"]) | ||
@secure | ||
def create_dp_run(): | ||
data = request.json | ||
|
||
if "datapipelineId" not in data or "fileId" not in data: | ||
return ( | ||
jsonify({"error": "Missing datapipelineId or s3BucketFileId in request."}), | ||
400, | ||
) | ||
|
||
created_dp_run = DatapipelineRun( | ||
data["datapipelineId"], | ||
data["fileId"], | ||
) | ||
|
||
datapipelineRunDB.insert_one(created_dp_run.to_json()) | ||
|
||
return jsonify({"message": "Datapipeline dp_run is stored successfully", | ||
"object": created_dp_run.to_json()}), 201 | ||
|
||
@dp_run.route("/dp_run/<executionId>/run", methods=["GET"]) | ||
@secure | ||
def run_by_id(executionId): | ||
run_response = run(executionId) | ||
if run_response: | ||
return jsonify({"message": "Successfully started"}) | ||
else: | ||
return jsonify({"message": "Failed to start"}) | ||
|
||
|
||
@dp_run.route("/dp_run/<id>", methods=["DELETE"]) | ||
@secure | ||
def delete_dp_run(id): | ||
|
||
result = datapipelineRunDB.delete_one({"executionId": id}) | ||
|
||
if result.deleted_count > 0: | ||
return jsonify({'message': 'Sucessfully deleted'}), 201 | ||
else: | ||
return jsonify({'error': 'Entity not found'}), 400 | ||
|
||
@dp_run.route('/inputData', methods=['POST']) | ||
# @public | ||
def input_endpoint(): | ||
|
||
data = request.json | ||
|
||
error_flag = False | ||
if 'error' in data: | ||
error_flag = True | ||
if 'executionId' not in data or 'result' not in data: | ||
return jsonify({'error': 'Missing id or result in request'}), 400 | ||
|
||
d = datapipelineRunDB.find_one({"executionId": data['executionId']}) | ||
if not d: | ||
return jsonify({'error': 'Entity not found'}), 400 | ||
|
||
if error_flag: | ||
datapipelineRunDB.update_one({"executionId": data['executionId']}, {'$set': { 'state': "FAILED" }}) | ||
else: | ||
# TODO add to result not overwrite | ||
datapipelineRunDB.update_one({"executionId": data['executionId']}, {'$set': { 'state': "SUCCESSFULL", 'result': data['result'] }}) | ||
|
||
return jsonify({'executionId': d['executionId'], 'result': d['result'], 'fileId': d['fileId'], 'datapipelineId': d['datapipelineId']}), 201 |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,32 @@ | ||
import uuid | ||
from datetime import datetime | ||
|
||
# Datapipeline State | ||
# PENDING | ||
# QUEUED | ||
# FINISHED | ||
# FAILED | ||
|
||
|
||
class DatapipelineRun: | ||
def __init__( | ||
self, | ||
datapipelineId, | ||
fileId, | ||
): | ||
self.executionId = str(uuid.uuid4()) | ||
self.datapipelineId = datapipelineId | ||
self.fileId = fileId | ||
self.result = [] | ||
self.create_date = datetime.now() | ||
self.state = "PENDING" | ||
|
||
def to_json(self): | ||
return { | ||
"executionId": self.executionId, | ||
"datapipelineId": self.datapipelineId, | ||
"fileId": self.fileId, | ||
"result": self.result, | ||
"create_date": self.create_date, | ||
"state": self.state, | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,33 @@ | ||
from flask import jsonify | ||
|
||
from api.airflow_api import dagsExecuteById, airflow_post | ||
from database.mongo_repo import datapipelineRunDB | ||
from services.upload_to_s3 import download_file | ||
|
||
|
||
def run(executionId): | ||
dp_run = datapipelineRunDB.find_one({"executionId": executionId}) | ||
|
||
if not dp_run: | ||
return False | ||
|
||
response = run_airflow(executionId, dp_run['datapipelineId'], dp_run['fileId']) | ||
|
||
if response.status_code == 200: | ||
datapipelineRunDB.update_one({"executionId": executionId}, {'$set': { 'state': "QUEUED"}}) | ||
return True | ||
else: | ||
datapipelineRunDB.update_one({"executionId": executionId}, {'$set': { 'state': "FAILED"}}) | ||
return False | ||
|
||
|
||
def run_airflow(executionId, datapipelineId, fileId): | ||
|
||
airflow_config = {'conf': {"download_url": download_file(fileId).get("download_url"), | ||
'executionId': executionId, | ||
'datapipelineId': datapipelineId, | ||
'fileId': fileId}} | ||
|
||
response = airflow_post('dags/' + datapipelineId + '/dagRuns', airflow_config) | ||
return response | ||
|
Oops, something went wrong.