Exemplo n.º 1
0
def get_videofile():
    """Downlaod zipped video, extract it to the download folder
    return -- path to video file"""

    if os.path.exists(config.downloaddir +
                      os.path.basename(config.videourl).replace('exe', 'wmv')):
        log.info("Media file already downloaded and extracted")
        return

    log.info("Downloading media file")
    log.debug(config.videourl)
    videoexe, _ = urllib.urlretrieve(config.videourl)

    log.info("Extracting video from ZIP (sic!)")
    log.debug(videoexe)
    archfile = zipfile.ZipFile(videoexe)
    for f in archfile.namelist():
        mediafile = open(f, "wb")
        mediafile.write(archfile.read(f))
        mediafile.close()
        path = config.downloaddir + mediafile.name
        os.rename(mediafile.name, path)
    archfile.close()

    return path
Exemplo n.º 2
0
def os_mkdir(path):
    log.debug("MKDIR: %s" % path)
    try:
        os.makedirs(path)
    except:
        log.debug("Directory already exists, do nothing")
        pass
Exemplo n.º 3
0
 async def on_member_update(self, before, member):
     if not member.bot:
         global last_live
         for x in member.activities:
             if x.type == discord.ActivityType.streaming:
                 streaming = False
                 for z in before.activities:
                     if z.type == discord.ActivityType.streaming:
                         streaming = True
                         break
                 if not streaming:  # If you just went live, this runs
                     if member.id != last_live:
                         last_live = member.id
                         _id = data.base['ctv_users'].find_one(
                             discorduid=member.id)
                         try:
                             _id = _id['userid']
                         except Exception:
                             pass
                         if not _id:
                             _id = await name_to_id(x.twitch_name)
                             if _id is not False:
                                 data.base['ctv_users'].upsert(
                                     dict(userid=_id, discorduid=member.id),
                                     ['discorduid'])
                         if test:
                             log.debug(
                                 f'{trace.alert}CTV | {member.name}: {x.twitch_name} / {_id}, {x.url}'
                             )
                         break
Exemplo n.º 4
0
    def generate(self):
        #Using relative path
        for root, subfolders, files in os.walk(self.path):
            #Calculate level
            level = self.calculate_level(root)
            #For each files
            for file_name in files:
                file_path = os.path.join(root, file_name) 
                file_obj = FileObject(
                        relative_path=self._extract_relative_path(root), 
                        path=file_path, 
                        level=level)
                self.files.append(file_obj)
            
            #For each subfolder
            for subfolder in subfolders:
                file_path = os.path.join(root,subfolder)
                #print "SUBFOLDER %s" % subfolder
                file_obj = FileObject(
                        relative_path=self._extract_relative_path(root), 
                        path=file_path, 
                        level=level)
                self.files.append(file_obj)

        log.debug("Tree generated")
Exemplo n.º 5
0
def _get_vba_parser(data):
    """Get an olevba VBA_Parser object for reading an Office file. This
    handles regular Office files and HTA files with VBScript script
    elements.

    @param data (str) The file contents for which to generate a
    VBA_Parser.

    @return (VBA_Parser object) On success, the olevba VBA_Parser
    object for the given file contents. On error, None.

    """
    
    # First just try the most common case where olevba can directly get the VBA.
    vba = None
    try:
        vba = VBA_Parser('', data, relaxed=True)
    except Exception as e:

        if (log.getEffectiveLevel() == logging.DEBUG):
            log.debug("Creating VBA_PArser() Failed. Trying as HTA. " + safe_str_convert(e))
        
        # If that did not work see if we can pull HTA wrapped VB from the data.
        extracted_data = get_vb_contents_from_hta(data)

        # If this throws an exception it will get passed up.
        vba = VBA_Parser('', extracted_data, relaxed=True)

    # Return the vba parser.
    return vba
Exemplo n.º 6
0
 async def on_connect(self):
     if not self.bot.is_ready():
         log.info(f'{trace.cyan}> Logging in at {time.readable.at()}.')
     else:
         log.debug(
             f'{trace.cyan}> Connection with {trace.white}Discord{trace.cyan} achieved.'
         )
Exemplo n.º 7
0
def convert_with_ffmpeg(input_song, output_song, folder):
    """ Convert the audio file using FFmpeg. """
    ffmpeg_pre = 'ffmpeg -y '

    if not log.level == 10:
        ffmpeg_pre += '-hide_banner -nostats -v panic '

    input_ext = input_song.split('.')[-1]
    output_ext = output_song.split('.')[-1]

    if input_ext == 'm4a':
        if output_ext == 'mp3':
            ffmpeg_params = '-codec:v copy -codec:a libmp3lame -q:a 2 '
        elif output_ext == 'webm':
            ffmpeg_params = '-c:a libopus -vbr on -b:a 192k -vn '

    elif input_ext == 'webm':
        if output_ext == 'mp3':
            ffmpeg_params = ' -ab 192k -ar 44100 -vn '
        elif output_ext == 'm4a':
            ffmpeg_params = '-cutoff 20000 -c:a libfdk_aac -b:a 192k -vn '

    command = '{0}-i {1} {2}{3}'.format(ffmpeg_pre,
                                        os.path.join(folder, input_song),
                                        ffmpeg_params,
                                        os.path.join(folder,
                                                     output_song)).split(' ')

    log.debug(command)

    return subprocess.call(command)
Exemplo n.º 8
0
 async def commands(self, ctx):
     com = []
     for x in self.bot.walk_commands():
         com.append(x)
     pos = len(com)
     com = remove_duplicates(com)
     for x in com:
         log.debug(x)
         # log(type(x))
     log.info(f'{len(com)} commands (including sub-commands)')
     log.info(f'{pos} possible command combinations (including aliases)')
Exemplo n.º 9
0
def startflask():
    """Start flask process, if none is running"""
    for pid in psutil.get_pid_list():
        if "flask" in " ".join(psutil.Process(pid).cmdline):
            log.debug("Flask is already running")
            return

    log.debug("Flask is not running - starting flask...")
    devnull = open(os.devnull, 'wb')  # stdout & stderr are redirected here
    subprocess.Popen(['python', 'core/flaskserver.py', os.getcwd() + '/core'],
                        stdout=devnull, stderr=devnull, close_fds=True)
Exemplo n.º 10
0
 async def async_get(self):
     try:
         async with httpx.AsyncClient() as client:
             r = await client.get(url=self.url)
         return r
     except httpx._exceptions.ConnectTimeout:
         log.debug(f'{self.url} Connect Timeout')
     except httpx._exceptions.ReadTimeout:
         log.debug(f'{self.url} Read Timeout')
     except Exception as exc:
         log.exception(exc)
     return None
Exemplo n.º 11
0
def init():
    """Prepare test environment"""
    log.debug("Initializing test environment")

    if not os.path.exists(config.downloaddir):
        log.debug("Creating download folder for downloaded files")
        os.makedirs(config.downloaddir)

    if not os.path.exists(".install_log.txt"):
        log.warning("install.sh script wasn't executed!")

    startflask()
Exemplo n.º 12
0
async def is_online():
    host = json.json.orm['api']
    try:
        async with requests.AsyncClient() as client:
            r = await client.get(
                url=f"http://{host['host']}:{host['port']}/ctv/online")
        return r.json()
    except requests._exceptions.ConnectTimeout:
        log.debug('Connect Timeout')
    except requests._exceptions.ReadTimeout:
        log.debug('Read Timeout')
    except Exception as exc:
        log.exception(exc)
    return None
Exemplo n.º 13
0
def name_to_id(name):
    url = "https://api.twitch.tv/kraken/users?login=" + name
    try:
        r = requests.get(url=url, headers=header)
        r_json = r.json()
        if r_json['users']:
            return r_json['users'][0]['_id']
    except requests._exceptions.ConnectTimeout:
        log.debug('Connect Timeout')
    except requests._exceptions.ReadTimeout:
        log.debug('Read Timeout')
    except Exception as exc:
        log.exception(exc)
    return None
Exemplo n.º 14
0
    def handle_request(self, json_data):
        data = json_data
        self.__reg_consumer(data)

        text = self.extract_text(data)

        if not is_command(text):
            return {'ok': True}

        cmd_id = parse_command_id(text)
        params = parse_command_params(text)
        log.debug(params)

        text = ''
        if cmd_id == '/start':
            text = '\n'.join(BOT_START_INFO)

        if cmd_id == '/haiku':
            text = jp.find_classic_haiku(params)

        if cmd_id == '/tag':
            text = jp.find_tagged_haiku(params)

        if cmd_id == '/author':
            found = jp.find_authors(params)
            names = [author['author_name'] for author in found]
            text = '\n'.join(names)

        if cmd_id == '/get100':
            text = jp.find_100verses()

        if cmd_id == '/tanka':
            text = jp.find_tanka()

        if cmd_id == '/saigyo':
            text = jp.find_saigyo()

        if cmd_id == '/calendar':
            text = get_calendar2016(params)

        if cmd_id == '/android':
            text = 'https://play.google.com/store/apps/details?id=edu.wbar.jpcalendar2016'

        chat_id = self.extract_chat_id(data)

        not_found_text = 'Не слышал о таком 👻'
        status_code = send_message(self.__token, chat_id,
                                   text if text else not_found_text)
        log.debug(status_code)
        return {'ok': status_code == 200}
Exemplo n.º 15
0
async def name_to_id(_name):
    if _name is not None:
        host = json.json.orm['api']
        try:
            async with requests.AsyncClient() as client:
                r = await client.get(
                    url=f"http://{host['host']}:{host['port']}/ctv/id/{_name}")
            return r.json()
        except requests._exceptions.ConnectTimeout:
            log.debug('Connect Timeout')
        except requests._exceptions.ReadTimeout:
            log.debug('Read Timeout')
        except Exception as exc:
            log.exception(exc)
    return False
Exemplo n.º 16
0
def create_file(path, name, content):
    try:
        os.makedirs(path) #First, directory
    except:
        log.debug("Already exists, do nothing with dir %s" % path)
        pass

    try:
        #print "CREANDO %s" % name
        #print "PATH %s NAME %s" % (path, name)
        desc = open(os.path.join(path,name), 'wb')
        desc.write(content)
        desc.close()
    except Exception, why:
        log.critical("Error saving file %s. Reason %s" % (name, why))
        
Exemplo n.º 17
0
def convert_with_avconv(input_song, output_song, folder):
    """ Convert the audio file using avconv. """
    if log.level == 10:
        level = 'debug'
    else:
        level = '0'

    command = [
        'avconv', '-loglevel', level, '-i',
        os.path.join(folder, input_song), '-ab', '192k',
        os.path.join(folder, output_song)
    ]

    log.debug(command)

    return subprocess.call(command)
Exemplo n.º 18
0
def get_vlc(version):
    """Download VLC sources from Internet and extract them
    version -- version of VLC to work with e.g. 2.0.4
    return -- path to VLC binary"""

    if os.path.exists(config.downloaddir + version):
        log.info("VLC {} already downloaded and extracted".format(version))
        return

    log.info("Downloading VLC version {} ".format(version))
    url = config.vlan['url'].format(version[4:], version)
    log.debug(url)
    archvlc, _ = urllib.urlretrieve(url)

    log.info("Extracting VLC sources {} ".format(archvlc))
    subprocess.check_call(['tar', '-C', config.downloaddir, '-Jxf', archvlc])

    return config.downloaddir + version + "/vlc"
Exemplo n.º 19
0
def is_online():
    _id = []
    for x in data.base['ctv_users']:
        _id.append(x['userid'])
    _id = ','.join([str(elem) for elem in _id])
    url = "https://api.twitch.tv/kraken/streams/?limit=100&channel=" + _id

    try:
        r = requests.get(url=url, headers=header)
        r_json = r.json()
        if r_json['streams']:
            return r_json
    except requests._exceptions.ConnectTimeout:
        log.debug('Connect Timeout')
    except requests._exceptions.ReadTimeout:
        log.debug('Read Timeout')
    except Exception as exc:
        log.exception(exc)
    return None
Exemplo n.º 20
0
def read_excel_sheets(fname):
    """Read all the sheets of a given Excel file as CSV and return them
    as a ExcelBook object.

    @param fname (str) The name of the Excel file to read.

    @return (core.excel.ExceBook object) On success return the Excel
    sheets as an ExcelBook object. Returns None on error.

    """

    # Read the sheets.
    try:
        f = open(fname, 'rb')
        data = f.read()
        f.close()
        return excel.load_excel_libreoffice(data)
    except Exception as e:
        if (log.getEffectiveLevel() == logging.DEBUG):
            log.debug("Reading Excel sheets failed. " + safe_str_convert(e))
        return None
Exemplo n.º 21
0
 async def loop(cls, self, ctx):
     if not Player.is_playing(ctx) and not Player.is_paused(ctx) and len(queue[ctx.guild.id]['q']) > 0:
         while Player.has_queue(ctx):
             if not Player.is_playing(ctx) and not Player.is_paused(ctx):
                 try:
                     try:
                         url = queue[ctx.guild.id]['q'][0]['webpage_url']
                     except Exception:
                         url = queue[ctx.guild.id]['q'][0]['id']
                     stream = await YTDLSource.create_source(url=url, loop=self.bot.loop)
                     Player.play(ctx, stream)
                     new = queue[ctx.guild.id]['q'].pop(0)
                     if 'ie_key' in new:  # if Playlist object
                         if new['ie_key'].lower() == 'youtube':
                             added_by = new['discord_mention']
                             data_trunk, new = await Player.info(new['id'], loop=self.bot.loop, ctx=ctx)
                             # new = new[0]  # Format into usable form
                             new.update({'discord_mention': added_by})
                     extractor = Player.Extractor.fetch(new)
                     new = await Player.process_picture(new, extractor[0])
                     # log.debug(new)
                     queue[ctx.guild.id]['playing'].insert(0, new)
                     queue[ctx.guild.id]['player'] = stream
                     embed = tls.Embed(ctx, title=new['title'], url=new['webpage_url'], description='is now playing.')
                     # log.debug(new['pfp'])
                     try:
                         embed.set_author(name=new['uploader'], url=new['uploader_url'], icon_url=new['pfp'])
                     except KeyError as err:
                         embed.set_author(name=new['uploader'], icon_url=new['pfp'])
                     # embed.set_image(url=new['thumbnail'])
                     try:
                         await ctx.send(embed=embed)
                     except Exception as e:
                         log.warn(f'> {e}')
                 except Exception as err:
                     log.debug(f'> {err}')
                     pass
             await asyncio.sleep(4)
Exemplo n.º 22
0
async def live_loop(self):
    await reset(self)
    log.debug('CTV Loop Started')
    # if test:
    #     log.debug('CTV Loop Started')
    from cogs.core.system import lockdown
    global looping
    while not lockdown and do_loop:
        if test_debug:
            log.debug(f'{self.bot.user.name} is looping')
        from cogs.core.system import lockdown
        if lockdown:
            break
        looping = True
        try:
            info = await is_online()
            if info:
                # print(info)
                global past
                past = now.copy()
                now.clear()
                for x in info['streams']:  # Add userID to memory
                    now.append(x['channel']['_id'])
                # if past:  # If has past history data, continue
                if True:
                    # print(info)
                    for x in now:  # Compare. If not in memory previously, it's new
                        if x not in past:
                            await on_live(self, x)
                    for x in past:  # Compare. If not in memory anymore, went offline
                        if x not in now:
                            await on_offline(self, x)
        except Exception as err:
            log.exception(err)
        await asyncio.sleep(10)
    looping = False
    log.error('CTV Loop Stopped')
Exemplo n.º 23
0
    async def info(cls, inurl, **kwargs):  # VIDEO INFO
        loop = kwargs.get('loop', asyncio.get_event_loop())
        do_log = kwargs.get('log', False)
        ctx = kwargs.get('ctx', None)
        from core.bot.funcs import respond
        url = inurl
        # if inurl.casefold() in MusicTests.__members__.keys():
        #     url = MusicTests[inurl.casefold()].value
        try:
            data = await loop.run_in_executor(None, lambda: ytdl.extract_info(url, download=False, process=False))
            # log.debug(data)
        except Exception as err:
            if ctx is not None:
                await respond(ctx, err, url)
            data = {}

        if do_log:
            log.debug(data)
        if 'extractor_key' in data:
            if data['extractor_key'] == 'YoutubeSearch':
                return None, None
            else:
                if 'entries' in data:
                    count = 0
                    entries = []
                    for x in data['entries']:  # Don't add to output
                        if not x['title'] == '[Deleted video]' or x['title'] == '[Private video]':
                            entries.append(x)
                        if count >= 1000:
                            break
                        count += 1
                    return entries, data
                else:
                    # log.debug(data)
                    return [data], data
        else:
            return None, None
Exemplo n.º 24
0
 def post(self, json):
     try:
         with httpx.Client() as client:
             client.post(url=self.url, json=dict(json))
     except httpx._exceptions.WriteError:
         log.debug(f'{self.url} Write Error')
     except httpx._exceptions.ConnectTimeout:
         log.debug(f'{self.url} Connect Timeout')
     except httpx._exceptions.ReadTimeout:
         log.debug(f'{self.url} Read Timeout')
     except Exception as exc:
         log.exception(exc)
Exemplo n.º 25
0
    def run(self, options=None):
        """
        This method is the main function in this class.
        Pre:
            - Previous execution of detect_mode() method.
        Return:
            None
        """
        if self.initial_backup_mode:
            log.info("INITIAL BACKUP MODE")

            log.debug("0 - READY BACKUP")
            self.prepare_to_init_backup()

            log.debug("2 - GENERATE ACTUAL FS")
            self.actual_filesystem.generate()

            log.debug("5.5 - UPLOAD ALL LOCAL FS")
            self.upload_all()

            log.debug("6 - UPDATE REMOTE FS")
            self.upload_actual_fs_struct()

        elif self.remote_home_mode:
            log.info("REMOTE_HOME MODE")

            log.debug("1 - LOAD REMOTE FS")
            self.get_remote_fs_struct()
            log.debug("2 - SYNC REMOTE HOME")
            self.sync_remote_home()
        elif self.resync_mode: # Reprocess
            log.info("RESYNC")

            log.debug("1 - LOAD REMOTE FS")
            self.get_remote_fs_struct()

            log.debug("2 - GENERATE ACTUAL FS")
            self.actual_filesystem.generate()

            log.debug("3,4 - CALCULATE CHANGES")
            changes = filesystem.compare_fs(actual_fs=self.actual_filesystem,
                                            old_fs=self.remote_filesystem)
     
            log.debug("5 - APPLY DIFERENCES (DELETE/DOWNLOAD AND UPLOAD)")
            self.process_changes_in_remote(changes)
        
            log.debug("6 - UPDATE REMOTE FS")
            self.upload_actual_fs_struct()
        else:
            log.critical("UNKNOWN MODE, existing...")
Exemplo n.º 26
0
async def ban_attempt(self, guild, snowflake):
    ban = await ban_list()
    if str(snowflake) in ban:
        if bDebug:
            log.debug(f'{snowflake} is flagged as globally banned')
        try:  # Try ban
            if bDebug:
                log.debug(f'Attempting ban on {snowflake}')

            await guild.ban(tls.Snowflake(snowflake), delete_message_days=0, reason=ban[str(snowflake)]['reason'])

            if bDebug:  # reee cant make it 1 line
                log.debug(f'Ban success on {snowflake}')

            return
        except Exception:  # Missing permissions/unable
            if bDebug:
                log.debug(f'Unable to ban {snowflake}')

        try:  # Try kick
            if bDebug:
                log.debug(f'Attempting kick on {snowflake}')

            await guild.kick(tls.Snowflake(snowflake), reason=ban[str(snowflake)]['reason'])

            if bDebug:
                log.debug(f'Kick success on {snowflake}')

            return
        except Exception:  # Missing permissions/unable
            if bDebug:
                log.debug(f'Unable to kick {snowflake}')

        if bDebug:
            log.debug(f'All attempts failed on {snowflake}')
Exemplo n.º 27
0
def process_file_scanexpr (container, filename, data):
    """Process a single file.

    @param container (str) Path and filename of container if the file is within
    a zip archive, None otherwise.

    @param filename (str) path and filename of file on disk, or within
    the container.

    @param data (bytes) Content of the file if it is in a container,
    None if it is a file on disk.

    """
    #TODO: replace print by writing to a provided output file (sys.stdout by default)
    if container:
        display_filename = '%s in %s' % (filename, container)
    else:
        display_filename = filename
    safe_print('='*79)
    safe_print('FILE: ' + safe_str_convert(display_filename))
    all_code = ''
    try:
        #TODO: handle olefile errors, when an OLE file is malformed
        import oletools
        oletools.olevba.enable_logging()
        if (log.getEffectiveLevel() == logging.DEBUG):
            log.debug('opening %r' % filename)
        vba = VBA_Parser(filename, data, relaxed=True)
        if vba.detect_vba_macros():

            # Read in document metadata.
            vm = core.ViperMonkey(filename, data)
            ole = olefile.OleFileIO(filename)
            try:
                vm.set_metadata(ole.get_metadata())
            except Exception as e:
                log.warning("Reading in metadata failed. Trying fallback. " + safe_str_convert(e))
                vm.set_metadata(get_metadata_exif(filename))
            
            #print 'Contains VBA Macros:'
            for (subfilename, stream_path, vba_filename, vba_code) in vba.extract_macros():
                # hide attribute lines:
                #TODO: option to disable attribute filtering
                vba_code = filter_vba(vba_code)
                safe_print('-'*79)
                safe_print('VBA MACRO %s ' % vba_filename)
                safe_print('in file: %s - OLE stream: %s' % (subfilename, repr(stream_path)))
                safe_print('- '*39)
                # detect empty macros:
                if vba_code.strip() == '':
                    safe_print('(empty macro)')
                else:
                    # TODO: option to display code
                    safe_print(vba_code)
                    vba_code = core.vba_collapse_long_lines(vba_code)
                    all_code += '\n' + vba_code
            safe_print('-'*79)
            safe_print('EVALUATED VBA EXPRESSIONS:')
            t = prettytable.PrettyTable(('Obfuscated expression', 'Evaluated value'))
            t.align = 'l'
            t.max_width['Obfuscated expression'] = 36
            t.max_width['Evaluated value'] = 36
            for expression, expr_eval in core.scan_expressions(all_code):
                t.add_row((repr(expression), repr(expr_eval)))
                safe_print(t)

        else:
            safe_print('No VBA macros found.')
    except Exception as e:
        log.error("Caught exception. " + safe_str_convert(e))
        if (log.getEffectiveLevel() == logging.DEBUG):
            traceback.print_exc()

    safe_print('')
Exemplo n.º 28
0
    def process_changes_in_remote(self, changes):
        """
        This method is used to changes changes in Mega (synchronize).
        """

        log.debug("Processing changes in remote")

        remove_files = changes['removed_files']
        for file in remove_files:
            log.debug("Removing file %s" % file)
            status = self.uploader.remove(
                path='%s/%s' %
                (settings.get_config('remote', 'folder'), file.relative_path),
                filename=file.name)

            if not status:
                log.error("ERROR DELETING REMOTE FILE %s" % file)

        remove_folders = changes['removed_folders']
        for folder in remove_folders:
            log.debug("Removing folder %s" % folder)
            status = self.uploader.remove(path='%s/%s' % (settings.get_config(
                'remote', 'folder'), folder.relative_path),
                                          filename=folder.name)
            if not status:
                log.error("Folder not deleted correctly in remote %s" % folder)

        new_folders = changes['new_folders']
        for folder in new_folders:
            log.debug("Creating remote folder %s" % folder)
            remote_folder = '%s/%s/%s' % (settings.get_config(
                'remote', 'folder'), folder.relative_path, folder.name)
            rem_desc = self.uploader.mkdir(remote_folder)

        new_files = changes['new_files']
        for file in new_files:
            log.debug("New file %s" % file)
            remote_folder = '%s/%s' % (settings.get_config(
                'remote', 'folder'), file.relative_path)
            rem_desc = self.uploader.upload(remote_folder, file.path)

        to_download = changes['to_download']
        for file in to_download:
            log.debug("Download modified %s" % file)
            path = '%s/%s' % (settings.get_config(
                'remote', 'folder'), file.relative_path)
            content = self.uploader.get_content_by_path(path=path,
                                                        filename=file.name)
            filesystem.create_file(path=os.path.join(self.backup_path,
                                                     file.relative_path),
                                   name=file.name,
                                   content=content)

        new_files = changes['to_upload']
        for file in new_files:
            log.debug("Uploading file %s" % file)
            remote_folder = '%s/%s' % (settings.get_config(
                'remote', 'folder'), file.relative_path)
            rem_desc = self.uploader.upload(remote_folder, file.path)
Exemplo n.º 29
0
 def detect_mode(self):
     """
     This method, depends of remote repository, and local folder, decides 
     the backup mode
     """
     #Initial backup, when in mega doesn't exist anything.
     #Resync, when in mega exists something and in home too.
     #Remote-home, when mega has content and local folder is empty 
         #or doesn't exist.
     remote = self.uploader.find_folder(
                                     settings.get_config('remote', 'folder'))
     summary = self.uploader.get_file(
                     filename=settings.get_config('remote','summary_file'),
                     path=settings.get_config('remote', 'folder'))
     empty_dir = filesystem.os_empty_dir(self.backup_path)
     
     if remote and summary and empty_dir: #(000)
         log.debug("REMOTE HOME 1")
         self.remote_home_mode = True
     elif remote and summary and not empty_dir: #(001)
         log.debug("RESYNC 1")
         self.resync_mode = True
     elif remote and not summary and empty_dir: #(010)
         log.debug("UNKNOWN MODE 1")
         self.unknown_mode = True
     elif remote and not summary and not empty_dir: #(011)
         log.debug("INITIAL BACKUP 1")
         self.initial_backup_mode = True
     elif not remote and summary and empty_dir: #(100)
         #Impossible
         log.debug("UNKNOWN MODE 2")
         self.unknown_mode = True
     elif not remote and summary and not empty_dir: #(101)
         #Impossible
         log.debug("UNKNOWN MODE 3")
         self.unknown_mode = True
     elif not remote and not summary and empty_dir: #(110)
         log.critical("Local directory doesn't exist and remote neither")
         print "Local directory doesn't exist & remote neither, existing..."
         log.debug("UNKNOWN MODE 4")
         self.unknown_mode = True
     elif not remote and not summary and not empty_dir: #(111)
         log.debug("INITIAL BACKUP 2")
         self.initial_backup_mode = True
Exemplo n.º 30
0
 async def looping(self, ctx):
     log.debug(looping)
Exemplo n.º 31
0
    def run(self, options=None):
        """
        This method is the main function in this class.
        Pre:
            - Previous execution of detect_mode() method.
        Return:
            None
        """
        if self.initial_backup_mode:
            log.info("INITIAL BACKUP MODE")

            log.debug("0 - READY BACKUP")
            self.prepare_to_init_backup()

            log.debug("2 - GENERATE ACTUAL FS")
            self.actual_filesystem.generate()

            log.debug("5.5 - UPLOAD ALL LOCAL FS")
            self.upload_all()

            log.debug("6 - UPDATE REMOTE FS")
            self.upload_actual_fs_struct()

        elif self.remote_home_mode:
            log.info("REMOTE_HOME MODE")

            log.debug("1 - LOAD REMOTE FS")
            self.get_remote_fs_struct()
            log.debug("2 - SYNC REMOTE HOME")
            self.sync_remote_home()
        elif self.resync_mode:  # Reprocess
            log.info("RESYNC")

            log.debug("1 - LOAD REMOTE FS")
            self.get_remote_fs_struct()

            log.debug("2 - GENERATE ACTUAL FS")
            self.actual_filesystem.generate()

            log.debug("3,4 - CALCULATE CHANGES")
            changes = filesystem.compare_fs(actual_fs=self.actual_filesystem,
                                            old_fs=self.remote_filesystem)

            log.debug("5 - APPLY DIFERENCES (DELETE/DOWNLOAD AND UPLOAD)")
            self.process_changes_in_remote(changes)

            log.debug("6 - UPDATE REMOTE FS")
            self.upload_actual_fs_struct()
        else:
            log.critical("UNKNOWN MODE, existing...")
Exemplo n.º 32
0
 async def backend(self, ctx):
     try:
         log.debug(queue[ctx.guild.id]['playing'][0])
     except Exception:
         log.debug(None)
     log.debug(queue[ctx.guild.id]['q'])
Exemplo n.º 33
0
 async def on_disconnect(self):
     if test:
         log.debug(
             f'{trace.cyan}> Connection with {trace.white}Discord{trace.cyan} lost.'
         )
Exemplo n.º 34
0
 async def on_resumed(self):
     if test:
         log.debug(
             f'{trace.cyan}> Connection with {trace.white}Discord{trace.cyan} resumed.'
         )
Exemplo n.º 35
0
    def detect_mode(self):
        """
        This method, depends of remote repository, and local folder, decides 
        the backup mode
        """
        #Initial backup, when in mega doesn't exist anything.
        #Resync, when in mega exists something and in home too.
        #Remote-home, when mega has content and local folder is empty
        #or doesn't exist.
        remote = self.uploader.find_folder(
            settings.get_config('remote', 'folder'))
        summary = self.uploader.get_file(
            filename=settings.get_config('remote', 'summary_file'),
            path=settings.get_config('remote', 'folder'))
        empty_dir = filesystem.os_empty_dir(self.backup_path)

        if remote and summary and empty_dir:  #(000)
            log.debug("REMOTE HOME 1")
            self.remote_home_mode = True
        elif remote and summary and not empty_dir:  #(001)
            log.debug("RESYNC 1")
            self.resync_mode = True
        elif remote and not summary and empty_dir:  #(010)
            log.debug("UNKNOWN MODE 1")
            self.unknown_mode = True
        elif remote and not summary and not empty_dir:  #(011)
            log.debug("INITIAL BACKUP 1")
            self.initial_backup_mode = True
        elif not remote and summary and empty_dir:  #(100)
            #Impossible
            log.debug("UNKNOWN MODE 2")
            self.unknown_mode = True
        elif not remote and summary and not empty_dir:  #(101)
            #Impossible
            log.debug("UNKNOWN MODE 3")
            self.unknown_mode = True
        elif not remote and not summary and empty_dir:  #(110)
            log.critical("Local directory doesn't exist and remote neither")
            print "Local directory doesn't exist & remote neither, existing..."
            log.debug("UNKNOWN MODE 4")
            self.unknown_mode = True
        elif not remote and not summary and not empty_dir:  #(111)
            log.debug("INITIAL BACKUP 2")
            self.initial_backup_mode = True
Exemplo n.º 36
0
 def testLibrary(self):
     lib = api.API.library
     log.debug(lib.get_song_struct(0))
Exemplo n.º 37
0
    def process_changes_in_remote(self, changes):
        """
        This method is used to changes changes in Mega (synchronize).
        """

        log.debug("Processing changes in remote")
        
        remove_files = changes['removed_files']
        for file in remove_files:
            log.debug("Removing file %s" % file)
            status = self.uploader.remove(
                path='%s/%s' % (settings.get_config('remote', 'folder'),
                                file.relative_path),
                filename=file.name)

            if not status:
                log.error("ERROR DELETING REMOTE FILE %s" % file)


        remove_folders = changes['removed_folders']
        for folder in remove_folders:
            log.debug("Removing folder %s" % folder)
            status = self.uploader.remove(
            path='%s/%s' % (settings.get_config('remote', 'folder'),
                            folder.relative_path),
            filename=folder.name)
            if not status:
                log.error("Folder not deleted correctly in remote %s" % folder)

        new_folders = changes['new_folders']
        for folder in new_folders:
            log.debug("Creating remote folder %s" % folder)
            remote_folder = '%s/%s/%s' % (
                                        settings.get_config('remote', 'folder'), 
                                        folder.relative_path,
                                        folder.name)
            rem_desc = self.uploader.mkdir(remote_folder)
        
        new_files = changes['new_files']
        for file in new_files:
            log.debug("New file %s" % file)
            remote_folder = '%s/%s' % (settings.get_config('remote', 'folder'), 
                                       file.relative_path)
            rem_desc = self.uploader.upload(remote_folder, file.path)


        to_download = changes['to_download']
        for file in to_download:
            log.debug("Download modified %s" % file)
            path = '%s/%s' % (settings.get_config('remote', 'folder'),
                                                        file.relative_path)
            content = self.uploader.get_content_by_path(path=path,
                                                        filename=file.name)
            filesystem.create_file(
                    path=os.path.join(
                           self.backup_path,
                           file.relative_path),
                    name=file.name, 
                    content=content)
        
        new_files = changes['to_upload']
        for file in new_files:
            log.debug("Uploading file %s" % file)
            remote_folder = '%s/%s' % (settings.get_config('remote', 'folder'),
                                file.relative_path)
            rem_desc = self.uploader.upload(remote_folder, file.path)