generated from amosproj/amos202Xss0Y-projname
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #115 from amosproj/feature/apache_airflow_input_ou…
…tput Feature/apache airflow input output
- Loading branch information
Showing
24 changed files
with
697 additions
and
47 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,134 @@ | ||
from flask import request, jsonify, Blueprint | ||
|
||
from database.mongo_repo import metadataDB | ||
from database.models.metadata_details import MetadataDetails | ||
from services.auth_service import secure | ||
from services.store_s3metadata import ( | ||
insert_all_s3files_metadata, | ||
insert_one_s3file_metadata, | ||
remove_s3metadata, | ||
) | ||
|
||
|
||
metadata = Blueprint("metadata", __name__, template_folder="templates") | ||
|
||
|
||
@metadata.route("/datapipeline_metadata", methods=["GET"]) | ||
@secure | ||
def get_all_datapipeline_metadatas(): | ||
metadata = metadataDB.find() | ||
|
||
allData = [] | ||
for data in metadata: | ||
if "datapipelineId" in data: | ||
allData.append( | ||
{ | ||
"datapipelineId": data["datapipelineId"], | ||
"s3bucketfileId": data["s3bucketfileId"], | ||
"result": data["result"], | ||
"create_date": data["create_date"], | ||
"state": data["state"], | ||
"file_type": data["file_type"], | ||
"file_size": data["file_size"], | ||
} | ||
) | ||
else: | ||
continue | ||
return jsonify(allData), 201 | ||
|
||
|
||
@metadata.route("/s3files_metadata", methods=["GET"]) | ||
@secure | ||
def get_all_s3files_metadatas(): | ||
metadata = metadataDB.find() | ||
allData = [] | ||
|
||
for data in metadata: | ||
if "Key" in data: | ||
allData.append( | ||
{ | ||
"key": data["Key"], | ||
"last_modified": data["LastModified"], | ||
"size": data["Size"], | ||
"etag": data["ETag"], | ||
"storage_class": data["StorageClass"], | ||
} | ||
) | ||
else: | ||
continue | ||
return jsonify(allData), 201 | ||
|
||
|
||
@metadata.route("/metadata/datapipline_result", methods=["POST"]) | ||
@secure | ||
def insert_file_metadata(): | ||
data = request.json | ||
|
||
if "datapipelineId" not in data or "s3bucketfileId" not in data: | ||
return ( | ||
jsonify({"error": "Missing datapipelineId or s3bucketfileId in request."}), | ||
400, | ||
) | ||
if "create_date" not in data or "state" not in data: | ||
return jsonify({"error": "Missing create_date or state in request."}), 400 | ||
if "file_type" not in data or "file_size" not in data: | ||
return jsonify({"error": "Missing file_type or file_size in request."}), 400 | ||
if "result" not in data: | ||
return jsonify({"error": "Missing result in request."}) | ||
|
||
store_metadata = MetadataDetails( | ||
data["datapipelineId"], | ||
data["s3bucketfileId"], | ||
data["result"], | ||
data["create_date"], | ||
data["state"], | ||
data["file_type"], | ||
data["file_size"], | ||
) | ||
|
||
metadataDB.insert_one(store_metadata.to_json()) | ||
print(store_metadata.to_json()) | ||
|
||
return jsonify({"message": "Datapipeline metadata is stored successfully"}), 201 | ||
|
||
|
||
@metadata.route("/metadata/store_all_s3metadata", methods=["POST"]) | ||
@secure | ||
def store_all_s3files_metadata(): | ||
insert_all_s3files_metadata(metadataDB) | ||
return jsonify( | ||
{"message": "The metadatas of files in S3 bucket are stored successfully!"} | ||
) | ||
|
||
|
||
@metadata.route("/metadata/store_single_s3metadata", methods=["POST"]) | ||
@secure | ||
def store_single_s3metadata(): | ||
data = request.json | ||
response = insert_one_s3file_metadata(metadataDB, data["file_name"]) | ||
if response != None: | ||
return jsonify( | ||
{"message": "The metadatas of uploaded file is stored successfully!"} | ||
) | ||
else: | ||
return jsonify({"message": "There is no such a file in the S3 bucket!"}) | ||
|
||
|
||
@metadata.route("/metadata/delete_all_metadata", methods=["DELETE"]) | ||
@secure | ||
def delete_all_metadata(): | ||
metadataDB.delete_many({}) | ||
|
||
return jsonify({"message": "All metadatas are deleted successfully"}) | ||
|
||
|
||
@metadata.route("/metadata/delete_single_s3file_metadata", methods=["DELETE"]) | ||
@secure | ||
def delete_single_s3file_metadata(): | ||
data = request.json | ||
|
||
response = remove_s3metadata(metadataDB, data["file_name"]) | ||
if response == None: | ||
return jsonify({"message": "Metadata of file is not exist"}) | ||
else: | ||
return jsonify({"message": "Metadata of file is deleted successfully"}) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,29 @@ | ||
class MetadataDetails: | ||
def __init__( | ||
self, | ||
datapipelineId, | ||
s3bucketfileId, | ||
result, | ||
create_date, | ||
state, | ||
file_type, | ||
file_size, | ||
): | ||
self.datapipelineId = datapipelineId | ||
self.s3bucketfileId = s3bucketfileId | ||
self.result = result | ||
self.create_date = create_date | ||
self.state = str(state) | ||
self.file_type = file_type | ||
self.file_size = file_size | ||
|
||
def to_json(self): | ||
return { | ||
"datapipelineId": self.datapipelineId, | ||
"s3bucketfileId": self.s3bucketfileId, | ||
"result": self.result, | ||
"create_date": self.create_date, | ||
"state": str(self.state), | ||
"file_type": self.file_type, | ||
"file_size": self.file_size, | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,53 @@ | ||
import os | ||
import boto3 | ||
from dotenv import load_dotenv | ||
|
||
# from database.models.s3_detials_entity import S3ObjectDetails | ||
|
||
|
||
# import upload_to_s3 | ||
|
||
|
||
load_dotenv() | ||
|
||
AWS_ACCESS_KEY = os.getenv("AWS_ACCESS_KEY") | ||
AWS_SECRET_KEY = os.getenv("AWS_SECRET_KEY") | ||
REGION = os.getenv("REGION") | ||
BUCKET_NAME = os.getenv("BUCKET_NAME") | ||
s3_client = boto3.client( | ||
"s3", | ||
aws_access_key_id=AWS_ACCESS_KEY, | ||
aws_secret_access_key=AWS_SECRET_KEY, | ||
region_name=REGION, | ||
) | ||
|
||
|
||
def insert_all_s3files_metadata(collection): | ||
s3file_metadata = s3_client.list_objects_v2(Bucket=BUCKET_NAME) | ||
s3file_metadata_contents = s3file_metadata.get("Contents", []) | ||
response = collection.insert_many(s3file_metadata_contents) | ||
return response | ||
|
||
|
||
def insert_one_s3file_metadata(collection, s3_key): | ||
s3file_metadata = s3_client.list_objects_v2(Bucket=BUCKET_NAME) | ||
s3file_metadata_contents = s3file_metadata.get("Contents", []) | ||
target_s3file_metadata = None | ||
for d in s3file_metadata_contents: | ||
if d["Key"] == s3_key: | ||
target_s3file_metadata = d | ||
response = collection.insert_one(target_s3file_metadata) | ||
break | ||
if target_s3file_metadata == None: | ||
return None | ||
|
||
return response | ||
|
||
|
||
def remove_s3metadata(collection, key): | ||
metadata = collection.find_one({"Key": key}) | ||
if metadata == None: | ||
return None | ||
else: | ||
result = collection.delete_one({"Key": key}) | ||
return result |
Oops, something went wrong.