upload_service: add upload service

main
Anaz 2025-01-21 09:23:34 +04:00
parent bac9b4ac82
commit 00474e3ef2
1 changed files with 19 additions and 12 deletions

View File

@ -40,23 +40,30 @@ class UploadService:
# Generate unique file name
new_filename = f"{uuid.uuid4()}.{extension}"
file_type = EXTENSIONS_AUTHORIZED[extension]
storage_path = f"public/{FILE_PATH_S3[file_type]}{account_id}/{new_filename}"
folder_path = f"public/{FILE_PATH_S3[file_type]}{account_id}/"
full_path = f"{folder_path}{new_filename}"
# Initialize S3 client
try:
s3_client = boto3.client(
"s3",
endpoint_url=S3_CONFIG["endpoint_url"],
region_name=S3_CONFIG["region_name"],
aws_access_key_id=S3_CONFIG["aws_access_key_id"],
aws_secret_access_key=S3_CONFIG["aws_secret_access_key"],
)
s3_client = boto3.client(
"s3",
endpoint_url=S3_CONFIG["endpoint_url"],
region_name=S3_CONFIG["region_name"],
aws_access_key_id=S3_CONFIG["aws_access_key_id"],
aws_secret_access_key=S3_CONFIG["aws_secret_access_key"],
)
# Upload file
try:
# Check if the folder exists (by checking a dummy object in the folder)
result = s3_client.list_objects_v2(Bucket=S3_CONFIG["bucket"], Prefix=folder_path)
if result.get("KeyCount", 0) == 0:
# Create a dummy object to ensure the folder exists
s3_client.put_object(Bucket=S3_CONFIG["bucket"], Key=f"{folder_path}")
# Upload the file
s3_client.upload_fileobj(
file.file,
S3_CONFIG["bucket"],
storage_path,
full_path,
ExtraArgs={"ACL": "public-read"},
)
except (BotoCoreError, ClientError) as e:
@ -64,7 +71,7 @@ class UploadService:
return {
"success": True,
"path_with_name": storage_path.replace("public", ""),
"path_with_name": full_path.replace("public", ""),
"filename": new_filename,
"original_filename": file.filename,
"filetype": extension,