Skip to content

Commit

Permalink
Merge pull request #136 from amosproj/mid-project-release
Browse files Browse the repository at this point in the history
some bug fixed
  • Loading branch information
bhanuPrakashMa authored Jan 24, 2024
2 parents 76947c3 + 3ffd990 commit 32ea504
Show file tree
Hide file tree
Showing 12 changed files with 265 additions and 120 deletions.
8 changes: 8 additions & 0 deletions src/backend/api/dp_run.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
import json

from flask import request, jsonify, Blueprint

from database.models.dp_run import DatapipelineRun
Expand Down Expand Up @@ -73,6 +75,12 @@ def input_endpoint():

data = request.json

#TODO this is bad
data = data.replace("\'", "\"")

data = json.loads(data)


error_flag = False
if 'error' in data:
error_flag = True
Expand Down
69 changes: 37 additions & 32 deletions src/backend/api/file.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
from database.mongo_repo import fileDetailsDB
from services.auth_service import secure
from services.file_storage import (
download_file,
list_file,
get_file_upload_url,
delete_file,
Expand All @@ -15,7 +14,7 @@
file = Blueprint("file", __name__, template_folder="templates")


@file.route("/s3file", methods=['GET'])
@file.route("/s3file", methods=["GET"])
@secure
def get_all_s3_files():
# List objects in the bucket
Expand All @@ -30,45 +29,50 @@ def get_all_s3_files():
except Exception as e:
return jsonify({f"Error: {e}"})

@file.route("/file", methods=['GET'])

@file.route("/file", methods=["GET"])
@secure
def get_all_files():
data = fileDetailsDB.find()

allData = []
for d in data:
allData.append({
'uuid': d['uuid'],
'name': d['name'],
'mime_type': d['mime_type'],
'size': d['size'],
's3_uuid': d['s3_uuid'],
'content_type': d['content_type'],
'storage_class': d['storage_class'],
'last_modified': d['last_modified'],
'created_at': d['created_at']})

allData.append(
{
"uuid": d["uuid"],
"name": d["name"],
"mime_type": d["mime_type"],
"size": d["size"],
"s3_uuid": d["s3_uuid"],
"content_type": d["content_type"],
"storage_class": d["storage_class"],
"last_modified": d["last_modified"],
"created_at": d["created_at"],
"user": d["user"],
}
)

return jsonify(allData), 201

@file.route('/file/new', methods=['POST'])

@file.route("/file/new", methods=["POST"])
@secure
def create_file():
data = request.json
# todo add s3_uuid check
if 'fileName' not in data:
return jsonify({'error': 'Missing fileName in request'}), 400
file_name = data['fileName']
s3_uuid = data['s3_uuid']
mime_type = data['mime_type']
if "fileName" not in data:
return jsonify({"error": "Missing fileName in request"}), 400
file_name = data["fileName"]
s3_uuid = data["s3_uuid"]
mime_type = data["mime_type"]

insert_file_details(file_name, s3_uuid, mime_type)

if file_name:
return jsonify({'message': 'Saved successfully'})
return jsonify({"message": "Saved successfully"})


@file.route("/file/<id>", methods=['GET'])
@file.route("/file/<id>", methods=["GET"])
@secure
def get_file(id):
try:
Expand All @@ -84,40 +88,41 @@ def get_file(id):
@secure
def delete_file(id):
try:

file_details = fileDetailsDB.find_one({"uuid": id})
s3_uuid = file_details['s3_uuid']
s3_uuid = file_details["s3_uuid"]

# delete s3 bucket file
s3_delete_file(s3_uuid)

# delete file detail
fileDetailsDB.delete_one({"uuid": id})

return jsonify('Sucessfull deleted')
return jsonify("Sucessfull deleted")
except Exception as e:
return jsonify(f"Error: {e}")


@file.route('/file/upload', methods=['POST'])
@file.route("/file/upload", methods=["POST"])
@secure
def upload_file_with_url():
data = request.json
if 'fileName' not in data:
return jsonify({'error': 'Missing fileName in request'}), 400
if "fileName" not in data:
return jsonify({"error": "Missing fileName in request"}), 400
print(data["fileName"])

return jsonify(get_file_upload_url(data['fileName']))
return jsonify(get_file_upload_url(data["fileName"]))


@file.route("/file/<id>/download", methods=['GET'])
@file.route("/file/<id>/download", methods=["GET"])
@secure
def download_file(id):
try:
# Download the object from S3
file_details = fileDetailsDB.find_one({"uuid": id})
s3_uuid = file_details['s3_uuid']
s3_uuid = file_details["s3_uuid"]
file_name = file_details["name"]

download_url = s3_get_download_url(s3_uuid)
download_url = s3_get_download_url(s3_uuid, file_name)
return jsonify({"download_url": download_url})

# Send the file for download
Expand Down
32 changes: 22 additions & 10 deletions src/backend/database/models/file_details.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,17 @@


class FileDetails:
def __init__(self, name, mime_type, size, s3_uuid, content_type, storage_class, last_modified):
def __init__(
self,
name,
mime_type,
size,
s3_uuid,
content_type,
storage_class,
last_modified,
user_name,
):
self.uuid = str(uuid.uuid4())
self.name = name
self.mime_type = mime_type
Expand All @@ -13,16 +23,18 @@ def __init__(self, name, mime_type, size, s3_uuid, content_type, storage_class,
self.storage_class = storage_class
self.last_modified = last_modified
self.created_at = datetime.now()
self.user_name = user_name

def to_json(self):
return {
'uuid': self.uuid,
'name': self.name,
'mime_type': self.mime_type,
'size': self.size,
's3_uuid': self.s3_uuid,
'content_type': self.content_type,
'storage_class': self.storage_class,
'last_modified': self.last_modified,
'created_at': self.created_at,
"uuid": self.uuid,
"name": self.name,
"mime_type": self.mime_type,
"size": self.size,
"s3_uuid": self.s3_uuid,
"content_type": self.content_type,
"storage_class": self.storage_class,
"last_modified": self.last_modified,
"created_at": self.created_at,
"user": self.user_name,
}
29 changes: 21 additions & 8 deletions src/backend/services/file_storage.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,14 +7,24 @@
from database.models.file_details import FileDetails
import humanfriendly

from services.s3_storage import s3_generate_presigned_url, s3_get_head_object, s3_get_download_url, s3_list_objects, \
s3_delete_file
from services.s3_storage import (
s3_generate_presigned_url,
s3_get_head_object,
s3_get_download_url,
s3_list_objects,
s3_delete_file,
)


def generated_key_check(file_name):
if file_name_check(file_name):
get_name = file_name.split("_")
key = ''.join(random.choice(string.ascii_uppercase + string.ascii_lowercase + string.digits) for _ in range(10))
key = "".join(
random.choice(
string.ascii_uppercase + string.ascii_lowercase + string.digits
)
for _ in range(10)
)
file_name = str(key) + "_" + get_name[1]
generated_key_check(file_name)
return file_name
Expand All @@ -26,7 +36,7 @@ def get_file_upload_url(file_name):
try:
s3_uuid = str(uuid.uuid4())
url = s3_generate_presigned_url(s3_uuid)
response_data = {'presignedUrl': url, 'fileName': file_name, 's3_uuid': s3_uuid}
response_data = {"presignedUrl": url, "fileName": file_name, "s3_uuid": s3_uuid}

return response_data
except Exception as e:
Expand All @@ -38,7 +48,6 @@ def get_file_upload_url(file_name):
def get_file_details(file_name, s3_uuid, mime_type):
# Get details of a specific file
try:

response = s3_get_head_object(s3_uuid)

new_file_details = FileDetails(
Expand All @@ -47,9 +56,10 @@ def get_file_details(file_name, s3_uuid, mime_type):
mime_type=mime_type,
s3_uuid=s3_uuid,
last_modified=response["LastModified"],
size=response["ContentLength"],
size=humanfriendly.format_size(response["ContentLength"]),
content_type=response["ContentType"],
storage_class="dummy storage class"
storage_class="dummy storage class",
user_name="user1",
)
return new_file_details.to_json()

Expand Down Expand Up @@ -105,6 +115,9 @@ def delete_file(file_name):
except Exception as e:
return [{f"Error: {e}"}]


ALLOWED_EXTENSIONS = {"csv"}


def allowed_file(filename):
return "." in filename and filename.rsplit(".", 1)[1].lower() in ALLOWED_EXTENSIONS
return "." in filename and filename.rsplit(".", 1)[1].lower() in ALLOWED_EXTENSIONS
32 changes: 24 additions & 8 deletions src/backend/services/s3_storage.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,43 +13,58 @@
REGION = os.getenv("REGION")
BUCKET_NAME = os.getenv("BUCKET_NAME")


def get_s3_client():
s3 = boto3.client('s3', aws_access_key_id=AWS_ACCESS_KEY, aws_secret_access_key=AWS_SECRET_KEY,
region_name=REGION)
s3 = boto3.client(
"s3",
aws_access_key_id=AWS_ACCESS_KEY,
aws_secret_access_key=AWS_SECRET_KEY,
region_name=REGION,
)
return s3


def s3_generate_presigned_url(file_name):
s3 = get_s3_client()
url = s3.generate_presigned_url('put_object', Params={'Bucket': BUCKET_NAME, 'Key': file_name}, ExpiresIn=3600)
url = s3.generate_presigned_url(
"put_object", Params={"Bucket": BUCKET_NAME, "Key": file_name}, ExpiresIn=3600
)
return url


def s3_get_head_object(file_name):
s3 = get_s3_client()
response = s3.head_object(Bucket=BUCKET_NAME, Key=file_name)
# TODO error handling
return response

def s3_get_download_url(file_name):

def s3_get_download_url(file_name, file_name_original):
s3 = get_s3_client()

url = s3.generate_presigned_url(
"get_object",
Params={"Bucket": BUCKET_NAME, "Key": file_name},
Params={
"Bucket": BUCKET_NAME,
"Key": file_name,
"ResponseContentDisposition": f"attachment; filename = {file_name_original}",
},
ExpiresIn=3600,
)
return url


def s3_list_objects():
s3 = get_s3_client()
response = s3.list_objects(Bucket=BUCKET_NAME)
return response


def s3_delete_file(file_name):
s3 = get_s3_client()
s3.delete_object(Bucket=BUCKET_NAME, Key=file_name)



# Function to upload a file to AWS S3
def upload_to_s3(path, s3_key):
try:
Expand All @@ -61,12 +76,13 @@ def upload_to_s3(path, s3_key):
except NoCredentialsError:
return False


# Route to dynamically search for files based on a partial keyword
def search_files(key):
partial_keyword = request.args.get('partial_keyword', '')
partial_keyword = request.args.get("partial_keyword", "")

# Search for files in MongoDB based on the partial keyword
results = ""#s3filename.find({"filename": {"$regex": partial_keyword, "$options": "i"}}).limit(10)
results = "" # s3filename.find({"filename": {"$regex": partial_keyword, "$options": "i"}}).limit(10)

# Convert MongoDB Cursor to list of dictionaries
files = list(results)
Expand Down
Loading

0 comments on commit 32ea504

Please sign in to comment.