Sindbad~EG File Manager
from flask import Flask, request, Blueprint, jsonify, redirect, url_for,flash, render_template,session,json,Response
from random import randint
from core.model.Aws import Aws
import datetime
from datetime import timedelta,date,datetime
from .. import Cryptography,Auth,Helper,RouteGroup
from core.library.email import EMAIL
from flask import session, app
from os import listdir
from os.path import join, dirname, realpath
from werkzeug.utils import secure_filename
import os
import io
from os import path
import urllib.request
import math, random
import base64
from urllib.parse import unquote
import boto3, botocore
from botocore.exceptions import ClientError
import logging
import requests
import flask
import zipfile
from flask import send_file,make_response
import pdfkit
import shutil
from flask import send_file
from wsgiref.util import FileWrapper
app = Blueprint('aws', __name__)
@app.route('/test', methods=['GET', 'POST'])
def Test():
bucket = "delegateapp"
folder = "delegate_attachment/"
maxitems = 2000
pagesize = 1000
page_no = 2
session = boto3.Session( aws_access_key_id='AKIAILLDAQ7NFUMVOYLQ', aws_secret_access_key='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')
s3 = session.client("s3")
paginator = s3.get_paginator("list_objects_v2")
result = paginator.paginate(Bucket=bucket, Prefix=folder, Delimiter='/', PaginationConfig={
'MaxItems': maxitems,
'PageSize': pagesize,
})
list_data = []
CommonPrefixes = []
Contents = []
i = 0
for page in result:
print(i)
list_data.append(page)
if 'CommonPrefixes' in list_data[i]:
CommonPrefixes = CommonPrefixes + list_data[i]['CommonPrefixes']
if 'Contents' in list_data[i]:
print("add")
Contents = Contents + list_data[i]['Contents']
else:
print("not add")
i = i+1
print(len(Contents))
json_data = {'Contents':Contents , 'CommonPrefixes':CommonPrefixes }
return json_data
# # print(len(list_data))
# # print(list_data)
# if len(list_data) >= page_no:
# # # # print(list_data[page_no-1])
# return list_data[page_no-1] if list_data else None
# else:
# return None
@app.route('/test1', methods=['GET', 'POST'])
def Test1():
bucket = "delegateapp"
folder = "delegate_attachment/"
session = boto3.Session( aws_access_key_id='AKIAILLDAQ7NFUMVOYLQ', aws_secret_access_key='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')
s3 = session.client("s3")
result = s3.list_objects_v2(Bucket=bucket, Prefix=folder)
all_files=result['Contents']
return all_files
@app.route('/aws_upload', methods=['GET', 'POST'])
def AwsUpload():
if session.get("Awssession") :
userData = session.get("Awssession")
aws_user_id = userData['aws_user_id']
name = userData['name']
return render_template('aws_files/aws_uploader.html',aws_user_id=aws_user_id,name=name)
else:
flash("Invalid login" , "errorMsg")
return redirect (url_for('user.AWSLogin'))
@app.route('/get_list', methods = ["GET","POST"])
def GetList():
bucket = request.values.get('bucket')
folder = request.values.get('folder' or '')
prev_folder = prev_folder_list(folder)
try:
userData = Aws().get_user_logs()
aws_logs = {}
if userData:
for i in userData:
aws_logs[i["url"]]=i["aws_user_name"]
maxitems = 2000
pagesize = 1000
page_no = 1
result = Helper.s3_pagination_v2(bucket,folder,maxitems,pagesize)
commonPrefixes = result.get("CommonPrefixes",[]) if result else []
contents = result.get("Contents",[]) if result else []
html_data = render_template('aws_files/listing_file.html',commonPrefixes=commonPrefixes,contents=contents,bucketname=bucket,folder=folder,prev_folder=prev_folder,aws_logs=aws_logs)
data = {"html_data" : html_data ,"status" : 1 , "msg" : "success"}
return jsonify(data)
except Exception as e:
data = {"error_data" : str(e) ,"status" : 2 , "msg" : "error"}
return jsonify(data)
def prev_folder_list(folder):
prev_folder = ''
if folder:
tmp_folder = folder
tmp_folder = folder.split('/')
tmp_prev = []
folder_count = 0
for index,val in enumerate(tmp_folder):
if val:
folder_count= folder_count+1
tmp_prev.append(val)
if folder_count > 0:
del tmp_prev[folder_count-1]
prev_folder = "/".join(tmp_prev)
prev_folder = prev_folder+"/" if prev_folder else ""
else:
pass
return prev_folder
@app.route('/post_new_upload', methods=["GET","POST"])
def PostNewUpload():
now = datetime.now()
current_dt = now.strftime("%Y-%m-%d %H:%M:%S")
bucket=request.values.get('bucket')
folder=request.values.get('folder')
aws_user_id = request.values.get('aws_user_id')
name = request.values.get('name')
files = request.files.getlist("file")
for file in files:
if file.filename:
filename = secure_filename(file.filename)
acl="public-read"
s3 = boto3.client("s3",aws_access_key_id='AKIAILLDAQ7NFUMVOYLQ',aws_secret_access_key='EEudWCsWeCIt/J9/z5m5htmUdGFJNzWFPvNQBIp/')
if " " in filename :
filename = filename.replace(" ","")
else :
pass
try:
bucket = bucket
if folder:
foldername = folder
response = s3.upload_fileobj(file, bucket,foldername + filename, ExtraArgs={"ACL": acl,"ContentType": file.content_type})
file_path = "https://"+bucket+".s3.ap-southeast-1.amazonaws.com/" + foldername + filename
data_1 = {
'url' : file_path,
'aws_user_id' : aws_user_id,
'aws_user_name' : name,
'action' : 'Uploaded',
'log_date' : current_dt
}
else:
foldername = ''
response = s3.upload_fileobj(file, bucket,foldername + filename, ExtraArgs={"ACL": acl,"ContentType": file.content_type})
file_path = "https://"+bucket+".s3.ap-southeast-1.amazonaws.com/"+ filename
data_1 = {
'url' : file_path,
'aws_user_id' : aws_user_id,
'aws_user_name' : name,
'action' : 'Uploaded',
'log_date' : current_dt
}
Aws().insert_user_logs(data_1)
userData = Aws().get_user_logs()
aws_logs = {}
if userData:
for i in userData:
aws_logs[i["url"]]=i["aws_user_name"]
maxitems = 2000
pagesize = 1000
page_no = 1
result = Helper.s3_pagination_v2(bucket,folder,maxitems,pagesize)
commonPrefixes = result.get("CommonPrefixes",[]) if result else []
contents = result.get("Contents",[]) if result else []
html_data = render_template('aws_files/listing_file.html',commonPrefixes=commonPrefixes,contents=contents,bucketname=bucket,folder=folder,aws_logs=aws_logs)
data = {"html_data" : html_data ,"status" : 1 , "msg" : "Uploaded successfully"}
return jsonify(data)
except ClientError as e:
logging.error(e)
data = {'msg':'Image upload failed.','status' : 0,'data':''}
return jsonify(data)
@app.route('/delete_file', methods=['GET', 'POST'])
def Deletefile():
now = datetime.now()
current_dt = now.strftime("%Y-%m-%d %H:%M:%S")
path= request.values.get('path')
bucket=request.values.get('bucket')
folder = request.values.get('folder' or '')
name = request.values.get('name')
aws_user_id = request.values.get('aws_user_id')
s3= boto3.resource("s3",aws_access_key_id='AKIAILLDAQ7NFUMVOYLQ',aws_secret_access_key='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')
response = s3.Object(bucket,path).delete()
file_path = "https://"+bucket+".s3.ap-southeast-1.amazonaws.com" + path
data_1 = {
'url' : file_path,
'aws_user_id' : aws_user_id,
'aws_user_name' : name,
'action' : 'Deleted',
'log_date' : current_dt
}
output = Aws().insert_user_logs(data_1)
session = boto3.Session( aws_access_key_id='AKIAILLDAQ7NFUMVOYLQ', aws_secret_access_key='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')
try:
userData = Aws().get_user_logs()
aws_logs = {}
if userData:
for i in userData:
aws_logs[i["url"]]=i["aws_user_name"]
maxitems = 2000
pagesize = 1000
page_no = 1
result = Helper.s3_pagination_v2(bucket,folder,maxitems,pagesize)
commonPrefixes = result.get("CommonPrefixes",[]) if result else []
contents = result.get("Contents",[]) if result else []
html_data = render_template('aws_files/listing_file.html',commonPrefixes=commonPrefixes,contents=contents,bucketname=bucket,folder=folder,aws_logs=aws_logs)
data = {"html_data" : html_data ,"status" : 1 , "error_data" : "Deleted successfully"}
return jsonify(data)
except Exception as e:
data = {"error_data" : str(e) ,"no_data":no_data ,"status" : 2 , "msg" : "error"}
return jsonify(data)
@app.route('/delete_multi_file', methods=['GET', 'POST'])
def DeleteMultifile():
now = datetime.now()
current_dt = now.strftime("%Y-%m-%d %H:%M:%S")
path = request.values.get('delet_path')
bucket = request.values.get('bucket')
folder = request.values.get('folder' or '')
name = request.values.get('name')
aws_user_id = request.values.get('aws_user_id')
s3= boto3.resource("s3",aws_access_key_id='AKIAILLDAQ7NFUMVOYLQ',aws_secret_access_key='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')
response = s3.Object(bucket,path).delete()
file_path = "https://"+bucket+".s3.ap-southeast-1.amazonaws.com" + path
data_1 = {
'url' : file_path,
'aws_user_id' : aws_user_id,
'aws_user_name' : name,
'action' : 'Deleted',
'log_date' : current_dt
}
output = Aws().insert_user_logs(data_1)
session = boto3.Session( aws_access_key_id='AKIAILLDAQ7NFUMVOYLQ', aws_secret_access_key='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')
try:
userData = Aws().get_user_logs()
aws_logs = {}
if userData:
for i in userData:
aws_logs[i["url"]]=i["aws_user_name"]
maxitems = 2000
pagesize = 1000
page_no = 1
result = Helper.s3_pagination_v2(bucket,folder,maxitems,pagesize)
commonPrefixes = result.get("CommonPrefixes",[]) if result else []
contents = result.get("Contents",[]) if result else []
html_data = render_template('aws_files/listing_file.html',commonPrefixes=commonPrefixes,contents=contents,bucketname=bucket,folder=folder,aws_logs=aws_logs)
data = {"html_data" : html_data ,"status" : 1 , "error_data" : "Deleted successfully"}
return jsonify(data)
except Exception as e:
data = {"error_data" : str(e) ,"no_data":no_data ,"status" : 2 , "msg" : "error"}
return jsonify(data)
#---------------------
@app.route('/download_single_files', methods=["GET", "POST"])
def DownloadSingleFiles():
downloaded_files = []
bucket_name = request.values.get('bucket')
folder_prefix = request.values.get('download_path')
filename = os.path.basename(folder_prefix)
separator = "amazonaws.com/"
subdirname = ''+folder_prefix.split(separator, 1)[1]
subdirname = subdirname.split(filename, 1)[0]
file_name_1 = filename
folder_prefix = subdirname
local_directory='core/static/images/downloads/'
try:
os.makedirs('core/static/images/downloads')
except:
print("Already Exists")
try:
s3 = boto3.client("s3",aws_access_key_id='AKIAILLDAQ7NFUMVOYLQ',aws_secret_access_key='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')
response = s3.list_objects_v2(Bucket=bucket_name, Prefix=folder_prefix)
if 'Contents' in response:
for obj in response['Contents']:
object_key = obj['Key']
file_name = object_key.split('/')[-1] # Use the last part of the object key as the file name
if file_name_1 == file_name:
local_file_path = os.path.join(local_directory,file_name)
s3.download_file(bucket_name, object_key, local_file_path)
downloaded_files.append(local_file_path)
file_wrapper = FileWrapper(open(local_file_path, 'rb'))
response = make_response(file_wrapper)
response.headers['Content-Type'] = 'application/octet-stream'
response.headers['Content-Disposition'] = f'attachment; filename={file_name}'
# file_wrapper.close()
# os.remove(local_file_path) # Delete the file after sending
def delete_file(response):
if os.path.exists(local_file_path):
os.remove(local_file_path)
return response
return response
else:
return "No objects found in the specified folder."
except Exception as e:
flash("Invalid file type","errorMsg")
return redirect (url_for('aws.AwsUpload'))
@app.route('/download_multi_files_from_folder', methods = ["GET","POST"])
def DownloadMultiFilesFromFolder():
downloaded_files = []
now = datetime.now()
current_dt = now.strftime("%Y-%m-%d %H:%M:%S")
bucket_name = request.values.get('bucket')
folder_prefix = request.values.get('bull_download_path')
folder_prefix = folder_prefix.split(',')
try:
os.makedirs('core/static/images/downloads')
except:
print("Already Exists")
for i in folder_prefix:
filename = os.path.basename(i)
separator = "amazonaws.com/"
subdirname = ''+i.split(separator, 1)[1]
subdirname = subdirname.split(filename, 1)[0]
file_name_1 = filename
folder_prefix = subdirname
local_directory='core/static/images/downloads/'
s3 = boto3.client("s3",aws_access_key_id='AKIAILLDAQ7NFUMVOYLQ',aws_secret_access_key='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')
try:
response = s3.list_objects_v2(Bucket=bucket_name, Prefix=folder_prefix)
# print(response)
if 'Contents' in response:
for obj in response['Contents']:
object_key = obj['Key']
file_name = object_key.split('/')[-1]
if file_name_1 == file_name:
local_file_path = os.path.join(local_directory,file_name)
s3.download_file(bucket_name, object_key, local_file_path)
downloaded_files.append(local_file_path)
else:
print("No objects found in the specified folder.")
except Exception as e:
flash("Invalid file type","errorMsg")
return redirect (url_for('aws.AwsUpload'))
if folder_prefix:
folder_prefix_cleaned = folder_prefix.replace('/', '')
else:
folder_prefix_cleaned = "test"
shutil.make_archive("core/static/"+folder_prefix_cleaned, 'zip', 'core/static/images/downloads/')
try:
shutil.rmtree('core/static/images/downloads/')
except OSError as e:
print("Error: %s : %s" % ('core/static/images/downloads/', e.strerror))
return send_file('static/'+folder_prefix_cleaned+'.zip',as_attachment=True)
@app.route('/view_download_abs_from_aws', methods = ["GET","POST"])
def ViewDownloadAbsFromAws():
return render_template('aws_files/abs_download.html')
@app.route('/download_abs_from_aws', methods = ["GET","POST"])
def DownloadAbsFromAws():
downloaded_files = []
conf_id = request.values.get('conf_id')
bucket_name = request.values.get('bucket_list')
folder = request.values.get('folder')
absid = request.values.get('absid')
print(bucket_name)
print(conf_id)
print(folder)
print(absid)
try:
os.makedirs('core/static/images/downloads')
except:
print("Already Exists")
for i in absid:
folder_prefix = folder + i
local_directory='core/static/images/downloads/'
s3 = boto3.client("s3",aws_access_key_id='AKIAILLDAQ7NFUMVOYLQ',aws_secret_access_key='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')
try:
response = s3.list_objects_v2(Bucket=bucket_name, Prefix=folder_prefix)
# print(response)
if 'Contents' in response:
for obj in response['Contents']:
object_key = obj['Key']
result = object_key.split('/')
abs_id = result[3]
file = result[4]
if file:
split_tup = os.path.splitext(file)
file_name = split_tup[0]
file_extension = split_tup[1]
abs_type= Aws().get_common_abs_typ(abs_id,conf_id)
if file_extension:
abs_type=abs_type['abs_type'] + file_extension
else:
abs_type=abs_type['abs_type'] + ".pptx"
print(abs_type)
local_file_path = os.path.join(local_directory,abs_type)
s3.download_file(bucket_name, object_key, local_file_path)
downloaded_files.append(local_file_path)
else:
print("No objects found in the specified folder.")
except Exception as e:
print(f"Error downloading files: {e}")
return downloaded_files
shutil.make_archive("core/static/test", 'zip', 'core/static/images/downloads/')
try:
shutil.rmtree('core/static/images/downloads/')
except OSError as e:
print("Error: %s : %s" % ('core/static/images/downloads/', e.strerror))
return send_file('static/test.zip',as_attachment=True)
@app.route('/download_all_files_from_folder', methods = ["GET","POST"])
def DownloadAllFilesFromFolder():
downloaded_files = []
conf_id=4
bucket_name='aios-app'
absid = [2053,2132,2140,2160,2170,2175,2178,2182,2190,2191,2213,2233,2243,2259,2260,2261,2302,2316,2329,2334,2339,2351,2352,2360,2363,2369,2377,2379,2382,2385,2386,2387,2389,2393,2405,2406,2411,2412,2423,2430,2434,2443,2445,2453,2465,2472,2479,2491,2493,2506,2507,2515,2527,2532,2534,2539,2543,2552,2553,2560,2569,2570,2573,2590,2594,2596,2597,2612,2617,2619,2625,2632,2634,2647,2656,2658,2674,2695,2702,2704,2706,2712,2721,2736,2739,2751,2778,2779,2784,2785,2824,2826,2838,2845,2846,2857,2862,2908,2909,2910,2912,2916,2941,2943,2949,2979,3000,3003,3006,3018,3031,3033,3034,3038,3040,3043,3060,3066,3068,3069,3083,3097,3100,3101,3110,3111,3123,3124,3126,3129,3136,3142,3147,3155,3183,3185,3204,3216,3225,3237,3249,3251,3254,3258,3261,3263,3272,3280,3289,3298,3307,3308,3312,3315,3322,3336,3337,3392,3397,3402,3426,3430,3436,3461,3468,3480,3499,3505,3518,3523,3525,3526,3535,3537,3553,3562,3592,3593,3609,3611,3614,3621,3629,3636,3642,3655,3671,3676,3688,3698,3699,3701,3703,3709,3729,3742,3743,3767,3772,3783,3788,3791,3796,3797,3798,3821,3824,3828,3830,3831,3835,3852,3871,3913,3927,3930,3937,3942,3944,3945,3946,3951,3968,3974,3994,3995,4011,4014,4026,4029,4035,4054,4055,4059,4068,4070,4089,4092,4098,4100,4109,4117,4119,4124,4125,4139,4150,4156,4158,4170,4176,4183,4188,4190,4219,4221,4229,4247,4252,4265,4285,4333,4359,4380,4387,4389,4409,4411,4425,4426,4430,4437,4445,4462,4465,4483,4485,4488,4497,4510,4519,4521,4537,4544,4555,4558,4565,4568,4586,4598,4605,4611,4622,4627,4633,4641,4648,4671,4675,4687,4697,4718,4728,4755,4786,4787,4803,4816,4834,4846,4863,4885,4908,4923,4924,5027,5029,5068,5082,5164,5166,5185,5186,5201,5216,5217,5223,5261,5276,5283,5286,5309,5313,5325,5332,5372,5378,5393,5402,5403,5405,5462,5466,5507,5530,5553,5558,5560,5579,5602,5611,5662,5675,5676,5730,5764,5789,5794,5804,5818,5824,5831,5865,5869,5900,5939,6030,6060,6099,6117,6127,6149,6164,6173,6196,6213,6217,6222,6240,6244,6268,6280,6285,6289,6309,6339,6346,6349,6364,6369,6371,6397,6400,6410,6468,6472,6489,6606,6616,6617,6636,6657,6661,6665,6670,6717,6760,6852,6884,6889,6899,6913,6924,6983,6999,7079,7103,7117,7149,7154,7160,7171,7262,7266,7277,7283,7325,7340,7397,7406,7431,7435,7439,7457,7466,7471,7488,7557,7559,7587,7595,7637,7649,7654,7670,7704,7717,7803,7810,7811,7832,7854,7862,7887,7945,7972,7988,8043,8045,8532]
for i in absid:
folder_prefix = 'abs_uploads/aioc24/FPFT/'+str(i)+''
local_directory='D:\\AIOS2024_Full_text'
s3 = boto3.client("s3",aws_access_key_id='AKIAILLDAQ7NFUMVOYLQ',aws_secret_access_key='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')
try:
response = s3.list_objects_v2(Bucket=bucket_name, Prefix=folder_prefix)
# print(response)
if 'Contents' in response:
for obj in response['Contents']:
object_key = obj['Key']
result = object_key.split('/')
abs_id = result[3]
print(abs_id)
file = result[4]
print(file)
if file:
split_tup = os.path.splitext(file)
file_name = split_tup[0]
file_extension = split_tup[1]
abs_type= Aws().get_common_abs_typ(abs_id,conf_id)
if file_extension:
abs_type=abs_type['abs_type'] + file_extension
else:
abs_type=abs_type['abs_type'] + ".pptx"
print(abs_type)
local_file_path = os.path.join(local_directory,abs_type)
s3.download_file(bucket_name, object_key, local_file_path)
downloaded_files.append(local_file_path)
else:
print("No objects found in the specified folder.")
except Exception as e:
print(f"Error downloading files: {e}")
return downloaded_files
Sindbad File Manager Version 1.0, Coded By Sindbad EG ~ The Terrorists