def visit_path(self): #Deprecated? """ Visit path and create summary file in binary """ log.critical("RUNNING, DEPRECATED") level = 0 for root, subfolders, files in os.walk(self.path): #print root if not level: actual_remote_folder = settings.get_config('remote', 'folder') else: actual_remote_folder = '%s/%s' % \ (actual_remote_folder, root.split('/')[-1]) #For each files for fil in files: #print "SUBO %s a %s" % (fil ,actual_remote_folder) file_path = os.path.join(root, fil) #print "ORIGEN %s" % file_path #print file_path rem_desc = self.uploader.upload(actual_remote_folder, file_path) #For each subfolder for subfolder in subfolders: #print "CREO carpeta %s" % actual_remote_folder+'/'+subfolder folder = os.path.join(actual_remote_folder, subfolder) rem_desc = self.uploader.mkdir(folder) level += 1 #print root #print files #print subfolders
def visit_path(self): #Deprecated? """ Visit path and create summary file in binary """ log.critical("RUNNING, DEPRECATED") level = 0 for root, subfolders, files in os.walk(self.path): #print root if not level: actual_remote_folder = settings.get_config('remote', 'folder') else: actual_remote_folder = '%s/%s' % \ (actual_remote_folder, root.split('/')[-1]) #For each files for fil in files: #print "SUBO %s a %s" % (fil ,actual_remote_folder) file_path = os.path.join(root, fil) #print "ORIGEN %s" % file_path #print file_path rem_desc = self.uploader.upload(actual_remote_folder, file_path) #For each subfolder for subfolder in subfolders: #print "CREO carpeta %s" % actual_remote_folder+'/'+subfolder folder = os.path.join(actual_remote_folder, subfolder) rem_desc = self.uploader.mkdir(folder) level += 1
def run(self, options=None): """ This method is the main function in this class. Pre: - Previous execution of detect_mode() method. Return: None """ if self.initial_backup_mode: log.info("INITIAL BACKUP MODE") log.debug("0 - READY BACKUP") self.prepare_to_init_backup() log.debug("2 - GENERATE ACTUAL FS") self.actual_filesystem.generate() log.debug("5.5 - UPLOAD ALL LOCAL FS") self.upload_all() log.debug("6 - UPDATE REMOTE FS") self.upload_actual_fs_struct() elif self.remote_home_mode: log.info("REMOTE_HOME MODE") log.debug("1 - LOAD REMOTE FS") self.get_remote_fs_struct() log.debug("2 - SYNC REMOTE HOME") self.sync_remote_home() elif self.resync_mode: # Reprocess log.info("RESYNC") log.debug("1 - LOAD REMOTE FS") self.get_remote_fs_struct() log.debug("2 - GENERATE ACTUAL FS") self.actual_filesystem.generate() log.debug("3,4 - CALCULATE CHANGES") changes = filesystem.compare_fs(actual_fs=self.actual_filesystem, old_fs=self.remote_filesystem) log.debug("5 - APPLY DIFERENCES (DELETE/DOWNLOAD AND UPLOAD)") self.process_changes_in_remote(changes) log.debug("6 - UPDATE REMOTE FS") self.upload_actual_fs_struct() else: log.critical("UNKNOWN MODE, existing...")
def __init__(self): """ Constructor """ self.mega = mega_library.Mega({'verbose': settings.get_config('global', 'mega_verbose')}) try: self.mega.login( email=settings.get_config('local', 'login_mail'), password=settings.get_config('local', 'login_password')) except: log.critical("Impossible connect to Mega server") print "Impossible connect to Mega server" sys.exit()
def run(self, options=None): """ This method is the main function in this class. Pre: - Previous execution of detect_mode() method. Return: None """ if self.initial_backup_mode: log.info("INITIAL BACKUP MODE") log.debug("0 - READY BACKUP") self.prepare_to_init_backup() log.debug("2 - GENERATE ACTUAL FS") self.actual_filesystem.generate() log.debug("5.5 - UPLOAD ALL LOCAL FS") self.upload_all() log.debug("6 - UPDATE REMOTE FS") self.upload_actual_fs_struct() elif self.remote_home_mode: log.info("REMOTE_HOME MODE") log.debug("1 - LOAD REMOTE FS") self.get_remote_fs_struct() log.debug("2 - SYNC REMOTE HOME") self.sync_remote_home() elif self.resync_mode: # Reprocess log.info("RESYNC") log.debug("1 - LOAD REMOTE FS") self.get_remote_fs_struct() log.debug("2 - GENERATE ACTUAL FS") self.actual_filesystem.generate() log.debug("3,4 - CALCULATE CHANGES") changes = filesystem.compare_fs(actual_fs=self.actual_filesystem, old_fs=self.remote_filesystem) log.debug("5 - APPLY DIFERENCES (DELETE/DOWNLOAD AND UPLOAD)") self.process_changes_in_remote(changes) log.debug("6 - UPDATE REMOTE FS") self.upload_actual_fs_struct() else: log.critical("UNKNOWN MODE, existing...")
def create_file(path, name, content): try: os.makedirs(path) #First, directory except: log.debug("Already exists, do nothing with dir %s" % path) pass try: #print "CREANDO %s" % name #print "PATH %s NAME %s" % (path, name) desc = open(os.path.join(path,name), 'wb') desc.write(content) desc.close() except Exception, why: log.critical("Error saving file %s. Reason %s" % (name, why))
def detect_mode(self): """ This method, depends of remote repository, and local folder, decides the backup mode """ #Initial backup, when in mega doesn't exist anything. #Resync, when in mega exists something and in home too. #Remote-home, when mega has content and local folder is empty #or doesn't exist. remote = self.uploader.find_folder( settings.get_config('remote', 'folder')) summary = self.uploader.get_file( filename=settings.get_config('remote', 'summary_file'), path=settings.get_config('remote', 'folder')) empty_dir = filesystem.os_empty_dir(self.backup_path) if remote and summary and empty_dir: #(000) log.debug("REMOTE HOME 1") self.remote_home_mode = True elif remote and summary and not empty_dir: #(001) log.debug("RESYNC 1") self.resync_mode = True elif remote and not summary and empty_dir: #(010) log.debug("UNKNOWN MODE 1") self.unknown_mode = True elif remote and not summary and not empty_dir: #(011) log.debug("INITIAL BACKUP 1") self.initial_backup_mode = True elif not remote and summary and empty_dir: #(100) #Impossible log.debug("UNKNOWN MODE 2") self.unknown_mode = True elif not remote and summary and not empty_dir: #(101) #Impossible log.debug("UNKNOWN MODE 3") self.unknown_mode = True elif not remote and not summary and empty_dir: #(110) log.critical("Local directory doesn't exist and remote neither") print "Local directory doesn't exist & remote neither, existing..." log.debug("UNKNOWN MODE 4") self.unknown_mode = True elif not remote and not summary and not empty_dir: #(111) log.debug("INITIAL BACKUP 2") self.initial_backup_mode = True
def detect_mode(self): """ This method, depends of remote repository, and local folder, decides the backup mode """ #Initial backup, when in mega doesn't exist anything. #Resync, when in mega exists something and in home too. #Remote-home, when mega has content and local folder is empty #or doesn't exist. remote = self.uploader.find_folder( settings.get_config('remote', 'folder')) summary = self.uploader.get_file( filename=settings.get_config('remote','summary_file'), path=settings.get_config('remote', 'folder')) empty_dir = filesystem.os_empty_dir(self.backup_path) if remote and summary and empty_dir: #(000) log.debug("REMOTE HOME 1") self.remote_home_mode = True elif remote and summary and not empty_dir: #(001) log.debug("RESYNC 1") self.resync_mode = True elif remote and not summary and empty_dir: #(010) log.debug("UNKNOWN MODE 1") self.unknown_mode = True elif remote and not summary and not empty_dir: #(011) log.debug("INITIAL BACKUP 1") self.initial_backup_mode = True elif not remote and summary and empty_dir: #(100) #Impossible log.debug("UNKNOWN MODE 2") self.unknown_mode = True elif not remote and summary and not empty_dir: #(101) #Impossible log.debug("UNKNOWN MODE 3") self.unknown_mode = True elif not remote and not summary and empty_dir: #(110) log.critical("Local directory doesn't exist and remote neither") print "Local directory doesn't exist & remote neither, existing..." log.debug("UNKNOWN MODE 4") self.unknown_mode = True elif not remote and not summary and not empty_dir: #(111) log.debug("INITIAL BACKUP 2") self.initial_backup_mode = True
async def _login(client, token=None): if not token: try: log.info(f'{trace.cyan}> Attempting Login.') log.info( f'{trace.cyan}> Running on {trace.white}Discord{trace.green.s}Py ' f'{trace.cyan}v{trace.cyan.s}{discord.__version__}{trace.cyan}.' ) version.Discord.latest() version.YouTubeDL.latest() # token = json.json.reader('token') if token == enums.ReturnType.fail or token == enums.ReturnType.none: raise discord.errors.LoginFailure('No token') else: await client.start(crypt(token)) # client.run(crypt(token)) return except discord.errors.LoginFailure as e: if json.external.exists(json.default): try: os.remove(json.default) except OSError: pass log.critical( f'{type(e)} has occurred. Please check your login token') log.critical('SESSION HAS BEEN TERMINATED') log.critical(f'{e}') except Exception as err: # This should never occur. log.error(f'> {short_traceback()}') log.error(f'> {traceback.format_exc()}') else: await client.start(token) # client.run(token) return
import asyncio import session import sys if __name__ == '__main__': log.info(f'>{trace.cyan} Starting at {Time.readable.at()}.') # Initialize database log.info(f'{trace.cyan}> Initializing {trace.black.s}dataset{trace.cyan} Database.') try: data() log.info(f'{trace.cyan}> Initialized {trace.black.s}dataset{trace.cyan}.') except Exception as err: log.warning(f'> Failed to load {trace.black.s}dataset{trace.warn}. Please restart!') log.error(f'> {short_traceback()}') log.critical(f'> {traceback.format_exc()}') # Start API import api api.Main() # Initialize extensions # Append cCogs # append_cog('session.py') # Load session append_cog('debug.py') # Load debug things append_cog('main.py') # Load essentials # Login from core.bot import time time.uptime = datetime.datetime.utcnow()
def compare_fs(actual_fs, old_fs): to_ret = dict() to_ret['removed_files'] = list() to_ret['removed_folders'] = list() to_ret['new_files'] = list() to_ret['new_folders'] = list() to_ret['to_upload'] = list() to_ret['to_download'] = list() for file in old_fs.files: if file.type == FOLDER: #Find by path only #print "CARPETA %s" % file res = actual_fs.find_by_path_name(path=file.relative_path, name=file.name, filetype=FOLDER) #print res if res: #print "RES %s" % res[0] for folder in res: if file == folder: #print "Encontrada" file.status = THE_SAME folder.status = THE_SAME else: #Not res file.status = REMOVED to_ret['removed_folders'].append(file) elif file.type == FILE: #Find by hash/path res_hash = actual_fs.find_by_hash(file.hash) found = False #If hash is the same and path too, it's the same file for file2 in res_hash: if file2.relative_path == file.relative_path: file.status = THE_SAME file2.status = THE_SAME found = True #break if not found: #Not the same hash, maybe change or deleted #Find by path/name res_path = actual_fs.find_by_path_name( path=file.relative_path, name=file.name, filetype=FILE) if res_path: if len(res_path) > 1: log.critical( "More than one file with the same path/name") #File changed found = False for file2 in res_path: if file.name == file2.name: #Changed content if file > file2: file.status = NEWEST file2.status = OLDEST to_ret['to_download'].append(file) elif file < file2: file.status = OLDEST file2.status = NEWEST to_ret['to_upload'].append(file2) found = True #break if not found: #Removed file.status = REMOVED to_ret['removed_files'].append(file) #For not marked in previous loop for file in actual_fs.files: if not hasattr(file, 'status'): file.status = NEW if file.type == FOLDER: to_ret['new_folders'].append(file) elif file.type == FILE: #print "NUEVO: %s" % file to_ret['new_files'].append(file) return to_ret