Sindbad~EG File Manager
from sqlalchemy import create_engine, MetaData, Table, insert, null, select,update,delete,text
from sqlalchemy.sql import and_, or_
from sqlalchemy import asc, desc
from core import app
import json
from datetime import timedelta,date,datetime,time
#engine = create_engine(app.config['DATABASE_URI'])
# engine = create_engine(app.config['DATABASE_URI'],future=True)
# engine_fk = create_engine(app.config['DATABASE_URI_FK'],future=True)
# engine_conf_live = create_engine(app.config['DATABASE_URI_CONF'],future=True)
engine = create_engine(app.config['DATABASE_URI'],pool_pre_ping=True,pool_recycle=3600,future=True)
engine_fk = create_engine(app.config['DATABASE_URI_FK'],pool_pre_ping=True,pool_recycle=3600,future=True)
engine_conf_live = create_engine(app.config['DATABASE_URI_CONF'],pool_pre_ping=True,pool_recycle=3600,future=True)
class SyncModel():
def __init__(self):
try:
self.meta = MetaData()
except Exception as e:
print(e)
# def insert_table_data_to_live(self, conf_id, conf_key, hall_id, scan_data,current_dt):
# try:
# now = datetime.now()
# days = now.strftime('%d')
# scan_table_name = conf_key + "_" + days + "_" + hall_id + "_hall_scan"
# with engine_conf_live.connect() as conn:
# columns_to_insert = scan_data[0].keys()
# if not scan_data:
# return "No data to insert"
# for data in scan_data:
# data['new_sync_at'] = current_dt
# placeholders = ', '.join([':%s' % col for col in columns_to_insert])
# stmt_1 = f"INSERT INTO {scan_table_name} ({', '.join(columns_to_insert)}) VALUES ({placeholders})"
# result = conn.execute(text(stmt_1), scan_data)
# conn.commit()
# stmt = text("select MAX(new_sync_at) from "+scan_table_name+" where conf_id ="+str(conf_id)+" and new_sync_at is null and new_sync_at is not null limit 1; ")
# result = conn.execute(stmt)
# results = result.one_or_none()
# return dict(results._mapping) if results[0] else None
# # return "success"
# except Exception as e:
# return str(e)
def get_active_session_live(self,conf_id,hall_id):
with engine.connect() as conn:
stmt = text(f"select asession_id,updated_at from abs_sessions where conf_id = {conf_id} and hall_id = {hall_id} and is_active = 1;")
result = conn.execute(stmt).first()
result = dict(result._mapping) if result else None
return result
def get_last_update_session_live(self,conf_id,hall_id):
with engine.connect() as conn:
stmt = text(f"select asession_id,updated_at,is_active from abs_sessions where conf_id = {conf_id} and hall_id = {hall_id} order by updated_at desc limit 1")
result = conn.execute(stmt).first()
result = dict(result._mapping) if result else None
return result
def update_active_session(self,active_session_id,is_active,conf_id,hall_id,session_updated_at_local):
with engine.connect() as conn:
try:
stmt_bulk = text(f"update abs_sessions set is_active = Null where conf_id = {conf_id} and hall_id = {hall_id};")
restult_1 = conn.execute(stmt_bulk)
if active_session_id:
stmt_single = text(f"update abs_sessions set is_active = {is_active},updated_at='{session_updated_at_local}' where asession_id={active_session_id} and conf_id = {conf_id} and hall_id = {hall_id};")
restult_2 = conn.execute(stmt_single)
conn.commit()
return "success"
except Exception as e:
print("Error: ",e)
return str(e)
# Used
def get_scan_all_data(self,scan_table_name):
with engine.connect() as conn:
stmt = text(f"select * from {scan_table_name} ;")
# stmt = text("select * from "+scan_table_name+" where entry_at is not null and exist_at is null;")
result = conn.execute(stmt).all()
results = [dict(r._mapping) for r in result] if result else None
return results
# Used
def get_scan_data_from_last_sync_at_local(self,scan_table_name,last_sync_at_local):
with engine.connect() as conn:
stmt = text(f"select * from {scan_table_name} where (new_sync_at > '{last_sync_at_local}' or updated_sync_at > '{last_sync_at_local}' );")
print(stmt)
# stmt = text("select * from "+scan_table_name+" where entry_at is not null and exist_at is null;")
result = conn.execute(stmt).all()
results = [dict(r._mapping) for r in result] if result else None
return results
# Used
def insert_update_table_data_to_live(self, conf_id, conf_key, hall_id, scan_data,scan_table_name,curr_dt,conf_schema):
try:
with engine.connect() as conn:
columns_to_insert = scan_data[0].keys()
insert_data = []
for data in scan_data:
if data.get('new_sync_at') == None:
data['new_sync_at'] = curr_dt
data['scan_id'] = None
insert_data.append(data)
if insert_data and len(insert_data) > 0:
placeholders = ', '.join([':%s' % col for col in columns_to_insert])
stmt_1 = f" INSERT INTO {conf_schema}.{scan_table_name} ({', '.join(columns_to_insert)}) VALUES ({placeholders})"
print("stmt_1",stmt_1)
result = conn.execute(text(stmt_1), insert_data)
conn.commit()
return "success"
except Exception as e:
print("Error : ",e)
return str(e)
def insert_table_data_to_live(self, conf_id, conf_key, hall_id, scan_data,scan_table_name,curr_dt):
try:
with engine.connect() as conn:
columns_to_insert = scan_data[0].keys()
if not scan_data:
return "No data to insert"
for data in scan_data:
data['new_sync_at'] = curr_dt
data['updated_sync_at'] = curr_dt
# data['sync_scan_id'] = data['scan_id']
# dalegate_no = data['delegate_no']
data['scan_id'] = None
placeholders = ', '.join([':%s' % col for col in columns_to_insert])
stmt_1 = f"INSERT INTO {scan_table_name} ({', '.join(columns_to_insert)}) VALUES ({placeholders})"
result = conn.execute(text(stmt_1), scan_data)
conn.commit()
# stmt = text("select MAX(new_sync_at) as last_new_sync_at from "+scan_table_name+" where conf_id ="+str(conf_id)+" and new_sync_at is not null and updated_sync_at is null limit 1; ")
# result = conn.execute(stmt)
# results = result.one_or_none()
# return dict(results._mapping) if results else None
return "success"
except Exception as e:
return str(e)
def usp_generate_session_table_scan_logs_by_day(self,conf_id):
connection = engine.raw_connection()
cursor = connection.cursor()
cursor.callproc("usp_conf_generate_session_table_scan_logs_by_day",[conf_id])
if cursor.description :
columns = [column[0] for column in cursor.description]
results = []
for row in cursor.fetchall():
results.append(dict(zip(columns, row)))
cursor.close()
connection.commit()
if results :
return results
else :
return None
else :
cursor.close()
connection.co
def update_table_data_to_live(self, conf_id, conf_key, hall_id, update_scan_data,scan_table_name):
try:
# now = datetime.now()
# days = now.strftime('%d')
# scan_table_name = f"{conf_key}_{days}_{hall_id}_hall_scan"
with engine.connect() as conn:
if not update_scan_data:
return "No data to update"
stmt_update = ''
print(length(update_scan_data))
for data in update_scan_data:
# print(data)
data['updated_sync_at'] = data['exist_at'] # Set updated_sync_at to created_at
delegate_no = data["delegate_no"]
exist_at = data["exist_at"]
# set_values = ', '.join([f"{key} = :{key}" for key in data.keys()if key != 'scan_id'])
set_values = ', '.join([ f"{key} = '{value or 'NULL'}'".replace("'NULL'", "NULL") for key, value in data.items() if key != 'scan_id'])
stmt_update = stmt_update + f"UPDATE {scan_table_name} SET {set_values} WHERE conf_id = {conf_id} AND new_sync_at IS NOT NULL and delegate_no = {delegate_no} ;"
print("update query- ",stmt_update)
result = conn.execute(text(stmt_update))
conn.commit()
stmt = text("select MAX(updated_sync_at) as last_update_sync_at from "+scan_table_name+" where updated_sync_at is not null and exist_at is not null ;")
result = conn.execute(stmt)
results = result.one_or_none()
return dict(results._mapping) if results else None
# return "Update successful"
except Exception as e:
return str(e)
Sindbad File Manager Version 1.0, Coded By Sindbad EG ~ The Terrorists