Example #1
0
async def _login(client, token=None):
    if not token:
        try:
            log.info(f'{trace.cyan}> Attempting Login.')
            log.info(
                f'{trace.cyan}> Running on {trace.white}Discord{trace.green.s}Py '
                f'{trace.cyan}v{trace.cyan.s}{discord.__version__}{trace.cyan}.'
            )
            version.Discord.latest()
            version.YouTubeDL.latest()
            # token = json.json.reader('token')
            if token == enums.ReturnType.fail or token == enums.ReturnType.none:
                raise discord.errors.LoginFailure('No token')
            else:
                await client.start(crypt(token))
                # client.run(crypt(token))
                return
        except discord.errors.LoginFailure as e:
            if json.external.exists(json.default):
                try:
                    os.remove(json.default)
                except OSError:
                    pass
            log.critical(
                f'{type(e)} has occurred. Please check your login token')
            log.critical('SESSION HAS BEEN TERMINATED')
            log.critical(f'{e}')
        except Exception as err:  # This should never occur.
            log.error(f'> {short_traceback()}')
            log.error(f'> {traceback.format_exc()}')
    else:
        await client.start(token)
        # client.run(token)
        return
Example #2
0
    def __init__(self, config):
        self.order = None
        self.subOrder = None
        self.orderItem = None
        self.data = None
        self.financeOrder = None
        self.test = None
        self.n_to_array = None
        self.data2 = None
        self.data3 = None
        self.full_outer_join = None
        self.testtest = None

        self.paymentFee = 0.02
        self.marketFee = 0.01
        self.platformFee = 0.05

        self.config = config

        self.spark = SparkSession.builder.master(self.config.master).appName(self.config.app_name).getOrCreate()
            # .config(f'spark.mongodb.input.uri={self.config.input_mongodb_uri}') \
            # .config(f'spark.mongodb.output.uri={self.config.output_mongodb_uri}') \
            # .config('spark.driver.extraClassPath', self.config.jars_dir) \
            # .getOrCreate()
        # self.spark.sparkContext.setLogLevel('INFO')

        log.info("=== Spark Info ===")
Example #3
0
 async def users(self, ctx):  # LIST ALL USERS
     if not ctx.invoked_subcommand:
         log.info(
             f'Number of Users that this bot can see: {len(self.bot.users)}'
         )
         for x in self.bot.users:
             log.info(f'{x.id}: {x}')
Example #4
0
 async def on_connect(self):
     if not self.bot.is_ready():
         log.info(f'{trace.cyan}> Logging in at {time.readable.at()}.')
     else:
         log.debug(
             f'{trace.cyan}> Connection with {trace.white}Discord{trace.cyan} achieved.'
         )
Example #5
0
    def run(self, silent=False):
        if custom_help:
            self.client.remove_command('help')
        cog_count = 0
        warnings = []
        cogs = extensions()
        for extension in cogs:
            try:
                self.client.load_extension(extension)
                cog_count += 1
            except Exception as e:
                if not silent:
                    warnings.append(f'Failed to load extension {extension}\n{e}')

        if not silent:
            if not cogs:
                log.warn('No extensions were found.')
            else:
                for x in warnings:
                    y = x.split('\n')
                    log.warning(f'> {y[0]}')
                    log.error(f'> {y[1]}')
                if len(warnings) > 0:
                    # if saved() < enums.LogLevel.error.value:
                    if len(warnings) == 1:
                        log.error(f'> Failed to load {trace.yellow.s}{len(warnings)}{trace.cyan} extension.')
                    else:
                        log.error(f'> Failed to load {trace.yellow.s}{len(warnings)}{trace.cyan} extensions.')
                log.info(f'{trace.cyan}> Loaded {trace.yellow.s}{cog_count}{trace.cyan} extensions!')
Example #6
0
def _save_embedded_files(out_dir, vm):
    """Save any extracted embedded files from the sample in the artifact
    directory.

    @param vm (ViperMonkey object) The ViperMonkey emulation engine
    object that did the emulation.

    @param out_dir (str) The artifact directory.
    """

    # Make the output directory if needed.
    out_dir = safe_str_convert(out_dir)
    if (not os.path.exists(out_dir)):
        log.info("Making dropped sample directory ...")
        os.mkdir(out_dir)
        
    # Save each file.
    out_dir = safe_str_convert(out_dir)
    for file_info in vm.embedded_files:
        short_name = safe_str_convert(file_info[0])
        long_name = safe_str_convert(file_info[1])
        contents = safe_str_convert(file_info[2])
        log.info("Saving embedded file " + long_name + " ...")
        try:
            f = open(out_dir + "/" + short_name, "w")
            f.write(contents)
            f.close()
        except IOError as e:
            log.error("Saving embedded file " + long_name + " failed. " + str(e))
Example #7
0
def pull_embedded_pe_files(data, out_dir):
    """Directly pull out any PE files embedded in the given data. The PE
    files will be saved in a directory and will be named things like
    embedded*.exe.

    @param data (str) The contents of the file being analyzed.

    @param out_dir (str) The directory in which to save extracted PE
    files.

    """

    # Is this a Office 2007 (zip) file?
    if core.filetype.is_office2007_file(data, is_data=True):

        # convert data to a BytesIO buffer so that we can use zipfile in memory
        # without writing a temp file on disk:
        data_io = io.BytesIO(data)
        # Pull embedded PE files from each file in the zip.
        with zipfile.ZipFile(data_io, "r") as f:
            for name in f.namelist():
                curr_data = f.read(name)
                pull_embedded_pe_files(curr_data, out_dir)
        return
    
    # Is a PE file in the data at all?
    pe_pat = r"MZ.{70,80}This program (?:(?:cannot be run in DOS mode\.)|(?:must be run under Win32))"
    if (re.search(pe_pat, data) is None):
        return

    # There is an embedded PE. Break them out.
    
    # Get where each PE file starts.
    pe_starts = []
    for match in re.finditer(pe_pat, data):
        pe_starts.append(match.span()[0])
    pe_starts.append(len(data))
    
    # Make the 2nd stage output directory if needed.
    if not os.path.isdir(out_dir):
        os.makedirs(out_dir)
    
    # Break out each PE file. Note that we probably will get extra data,
    # but due to the PE file format the file will be a valid PE (with an overlay).
    pos = 0
    out_index = 0
    while (pos < len(pe_starts) - 1):
        curr_data = data[pe_starts[pos]:pe_starts[pos+1]]
        curr_name = out_dir + "/embedded_pe" + safe_str_convert(out_index) + ".bin"
        # Make sure name is unique.
        while os.path.isfile(curr_name):
            out_index += 1
            curr_name = out_dir + "/embedded_pe" + safe_str_convert(out_index) + ".bin"
        f = open(curr_name, "wb")
        f.write(curr_data)
        f.close()
        pos += 1
        out_index += 1
        log.info("Wrote embedded PE file to " + curr_name)
Example #8
0
 async def restart(self, ctx, *, message=None):
     if message is None:
         await ctx.send(f'{self.bot.user.name} is restarting.')
     else:
         await ctx.send(f'{self.bot.user.name} is restarting {message}.')
     log.info(
         f'{trace.red.s}> Manual Restart: {trace.yellow.s}{self.bot.user.name}, {trace.cyan.s}{self.bot.user.id}, {trace.magenta.s}Restarting.'
     )
     await tls.Voice(ctx).disconnect()
     await self.bot.close()
Example #9
0
    def __init__(self, env):
        self.env = env
        self.file_dir = os.path.dirname(os.path.realpath('__file__'))
        self.file_path = '/config/%s-conf.json' % self.env
        self.full_path = self.file_dir + self.file_path

        log.info(f'Reading config from: {self.full_path}')

        with open(self.full_path) as config_file:
            self.data = json.load(config_file)
Example #10
0
 async def commands(self, ctx):
     com = []
     for x in self.bot.walk_commands():
         com.append(x)
     pos = len(com)
     com = remove_duplicates(com)
     for x in com:
         log.debug(x)
         # log(type(x))
     log.info(f'{len(com)} commands (including sub-commands)')
     log.info(f'{pos} possible command combinations (including aliases)')
Example #11
0
def song(input_song, output_song, folder, avconv=False):
    """ Do the audio format conversion. """
    if not input_song == output_song:
        log.info('Converting {0} to {1}'.format(input_song,
                                                output_song.split('.')[-1]))
        if avconv:
            exit_code = convert_with_avconv(input_song, output_song, folder)
        else:
            exit_code = convert_with_ffmpeg(input_song, output_song, folder)
        return exit_code
    return 0
Example #12
0
 async def quit(self, ctx):
     for x in self.bot.commands:
         if x.name not in exceptions:
             x.enabled = False
     await tls.Voice(ctx).disconnect()
     global lockdown
     lockdown = True
     log.info(
         f'{trace.red.s}> Lockdown: {trace.yellow.s}{self.bot.user.name}, {trace.cyan.s}{self.bot.user.id}, {trace.magenta.s}Halted.'
     )
     await self.bot.change_presence(status=discord.Status.do_not_disturb)
     await ctx.send(f'{self.bot.user.name} is now in lockdown.')
Example #13
0
    def __reg_consumer(self, update):
        msg = update.get('message')
        from_user = msg.get('from') if msg else None
        id = from_user.get('id') if from_user else None
        if not id:
            return

        f_name = from_user.get('first_name', '')
        l_name = from_user.get('last_name', '')
        u_name = from_user.get('username', '')

        log.info('[x] USER: {0} {1} {2} aka {3}'.format(
            id, f_name, l_name, u_name))
Example #14
0
def input_link(links):
    """ Let the user input a choice. """
    while True:
        try:
            log.info('Choose your number:')
            the_chosen_one = int(input('> '))
            if 1 <= the_chosen_one <= len(links):
                return links[the_chosen_one - 1]
            elif the_chosen_one == 0:
                return None
            else:
                log.warning('Choose a valid number!')
        except ValueError:
            log.warning('Choose a valid number!')
Example #15
0
def embed(music_file, meta_tags):
    """ Embed metadata. """
    if meta_tags is None:
        log.warning('Could not find metadata')
        return None
    elif music_file.endswith('.m4a'):
        log.info('Applying metadata')
        return embed_m4a(music_file, meta_tags)
    elif music_file.endswith('.mp3'):
        log.info('Applying metadata')
        return embed_mp3(music_file, meta_tags)
    else:
        log.warning('Cannot embed metadata into given output extension')
        return False
Example #16
0
 async def invite(self, ctx, guild_id):  # LIST ALL USERS
     guild = await self.bot.fetch_guild(guild_id)
     log.info(f'Getting invite for guild: {guild.id}: {guild}')
     channels = await guild.fetch_channels()
     invite = None
     for x in channels:
         if x.position == 0:
             if type(x).__name__ == 'TextChannel':
                 invite = await x.create_invite(
                     reason='Requested by bot owner.',
                     unique=False,
                     temporary=False,
                     max_age=3600,
                     max_uses=1)
                 break
     log.info(f'Invite for guild {guild.id}: {invite}')
Example #17
0
def getgcov(version):
    """Executes gcov script and returns its output
    version -- version of VLC to get gcov statistics
    return -- gcov output for version"""

    log.info("Collecting gcov data for {}".format(version))
    devnull = open(os.devnull, 'wb')  # stdout & stderr are redirected here)
    proc = subprocess.Popen(['./core/gcov.sh', config.downloaddir + version],
                            stdout=devnull, stderr=devnull)
    proc.wait()

    fp = open("." + version + ".txt", 'r')
    output = fp.read()
    fp.close()

    return output
Example #18
0
def build_vlc(version):
    """Build the vlc from sources (if needed)"""
    stampfile = config.downloaddir + "." + version
    vlcdir = os.getcwd() + "/" + config.downloaddir + version + "/"

    if os.path.exists(stampfile):
        log.info("No need to build {}".format(version))
        return

    subprocess.check_call(["./configure", "--prefix=/usr", "--enable-coverage",
        "--disable-dbus", "--disable-lua", "--disable-mad",
        "--disable-postproc", "--disable-a52", "--disable-fribidi",
        "--enable-pulse", "--enable-alsa"], cwd=vlcdir)

    subprocess.check_call(["make"], cwd=vlcdir)

    open(stampfile, 'w').close()
Example #19
0
 def login(self):
     log.info(f'{trace.cyan}> Attempting Login.')
     log.info(
         f'{trace.cyan}> Running on {trace.white}Discord{trace.green.s}Py '
         f'{trace.cyan}v{trace.cyan.s}{discord.__version__}{trace.cyan}.')
     version.Discord.latest()
     version.YouTubeDL.latest()
     tokens = json.json.orm['tokens']['discord']
     threads = [
         threading.Thread(target=login_threads,
                          args=(
                              self.prefix,
                              crypt(x),
                          ),
                          daemon=True) for x in tokens
     ]
     # threads = [threading.Thread(target=login_threads, args=(self.prefix, crypt(x),)) for x in tokens]
     return threads
Example #20
0
 async def user(self, ctx, *, arg):  # LIST ALL USERS
     user = await commands.UserConverter().convert(ctx=ctx, argument=arg)
     log.info(f'Viewing user: {user.id}: {user}')
     log.info(f'Joined: {user.created_at}')
     guilds = []
     for x in self.bot.guilds:
         if user in x.members:
             guilds.append(x)
     log.info(f'Number of Guilds that {user.id} is in: {len(guilds)}')
     for x in guilds:
         log.info(f'{x.id}: {x.name}')
Example #21
0
    def form_valid(self, form, data):
        # Сущесвующие в базе дни
        days_exists = list(
            Day.objects.filter(
                date__year=form.cleaned_data['year'], ).values_list('date',
                                                                    flat=True))

        # Дни для добавления
        days_raw = filter(
            lambda x: x['date'] not in days_exists,
            data,
        )

        log.info("Starting import CSV...")
        errors = False

        for day_raw in days_raw:
            # Добавляем дни, используя форму
            day_form = DayForm(day_raw)
            if day_form.is_valid():
                day_form.save()
            else:
                errors = True
                log.error("{}: {} ".format(
                    model_to_dict(day_form.instance),
                    day_form.errors.as_data(),
                ))

        if errors:
            log.error("CSV import finished with errors")
            messages.error(
                self.request,
                _("CSV import finished with errors (see more in logs)"),
            )
        else:
            log.info("CSV import finished without errors")
            messages.success(
                self.request,
                _("CSV import finished without errors"),
            )

        return super().form_valid(form)
Example #22
0
def get_videofile():
    """Downlaod zipped video, extract it to the download folder
    return -- path to video file"""

    if os.path.exists(config.downloaddir +
                      os.path.basename(config.videourl).replace('exe', 'wmv')):
        log.info("Media file already downloaded and extracted")
        return

    log.info("Downloading media file")
    log.debug(config.videourl)
    videoexe, _ = urllib.urlretrieve(config.videourl)

    log.info("Extracting video from ZIP (sic!)")
    log.debug(videoexe)
    archfile = zipfile.ZipFile(videoexe)
    for f in archfile.namelist():
        mediafile = open(f, "wb")
        mediafile.write(archfile.read(f))
        mediafile.close()
        path = config.downloaddir + mediafile.name
        os.rename(mediafile.name, path)
    archfile.close()

    return path
Example #23
0
 async def dev(self, ctx):
     if not ctx.invoked_subcommand:
         com = []
         for x in self.bot.walk_commands():
             com.append(x)
         pos = len(com)
         com = remove_duplicates(com)
         log.info(f'{len(com)} commands (including sub-commands)')
         log.info(
             f'{pos} possible command combinations (including aliases)')
         t = tls.search(ctx.command.name, com)
         if len(t) > 0:
             log.info(f'Sub-commands under this command ({len(t)}):')
         for x in t:
             log.info(x)
Example #24
0
def _remove_duplicate_iocs(iocs):
    """Remove IOC strings that are substrings of other IOC strings.

    @param iocs (list) List of IOCs (str).

    @return (set) The original IOC list with duplicate-ish IOC strings
    stripped out.

    """

    # Track whether to keep an IOC string.
    r = set()
    skip = set()
    log.info("Found " + safe_str_convert(len(iocs)) + " possible IOCs. Stripping duplicates...")
    for ioc1 in iocs:

        # Does this IOC look like straight up garbage?
        if (read_ole_fields.is_garbage_vba(ioc1, test_all=True, bad_pct=.25)):
            skip.add(ioc1)
            continue

        # Looks somewhat sensible. See if it is a duplicate.
        keep_curr = True
        for ioc2 in iocs:
            if (ioc2 in skip):
                continue
            if ((ioc1 != ioc2) and (ioc1 in ioc2)):
                keep_curr = False
                break
            if ((ioc1 != ioc2) and (ioc2 in ioc1)):
                skip.add(ioc2)
        if (keep_curr):
            r.add(ioc1)

    # Return stripped IOC set.
    return r
Example #25
0
    def run(self, options=None):
        """
        This method is the main function in this class.
        Pre:
            - Previous execution of detect_mode() method.
        Return:
            None
        """
        if self.initial_backup_mode:
            log.info("INITIAL BACKUP MODE")

            log.debug("0 - READY BACKUP")
            self.prepare_to_init_backup()

            log.debug("2 - GENERATE ACTUAL FS")
            self.actual_filesystem.generate()

            log.debug("5.5 - UPLOAD ALL LOCAL FS")
            self.upload_all()

            log.debug("6 - UPDATE REMOTE FS")
            self.upload_actual_fs_struct()

        elif self.remote_home_mode:
            log.info("REMOTE_HOME MODE")

            log.debug("1 - LOAD REMOTE FS")
            self.get_remote_fs_struct()
            log.debug("2 - SYNC REMOTE HOME")
            self.sync_remote_home()
        elif self.resync_mode:  # Reprocess
            log.info("RESYNC")

            log.debug("1 - LOAD REMOTE FS")
            self.get_remote_fs_struct()

            log.debug("2 - GENERATE ACTUAL FS")
            self.actual_filesystem.generate()

            log.debug("3,4 - CALCULATE CHANGES")
            changes = filesystem.compare_fs(actual_fs=self.actual_filesystem,
                                            old_fs=self.remote_filesystem)

            log.debug("5 - APPLY DIFERENCES (DELETE/DOWNLOAD AND UPLOAD)")
            self.process_changes_in_remote(changes)

            log.debug("6 - UPDATE REMOTE FS")
            self.upload_actual_fs_struct()
        else:
            log.critical("UNKNOWN MODE, existing...")
Example #26
0
    def run(self, options=None):
        """
        This method is the main function in this class.
        Pre:
            - Previous execution of detect_mode() method.
        Return:
            None
        """
        if self.initial_backup_mode:
            log.info("INITIAL BACKUP MODE")

            log.debug("0 - READY BACKUP")
            self.prepare_to_init_backup()

            log.debug("2 - GENERATE ACTUAL FS")
            self.actual_filesystem.generate()

            log.debug("5.5 - UPLOAD ALL LOCAL FS")
            self.upload_all()

            log.debug("6 - UPDATE REMOTE FS")
            self.upload_actual_fs_struct()

        elif self.remote_home_mode:
            log.info("REMOTE_HOME MODE")

            log.debug("1 - LOAD REMOTE FS")
            self.get_remote_fs_struct()
            log.debug("2 - SYNC REMOTE HOME")
            self.sync_remote_home()
        elif self.resync_mode: # Reprocess
            log.info("RESYNC")

            log.debug("1 - LOAD REMOTE FS")
            self.get_remote_fs_struct()

            log.debug("2 - GENERATE ACTUAL FS")
            self.actual_filesystem.generate()

            log.debug("3,4 - CALCULATE CHANGES")
            changes = filesystem.compare_fs(actual_fs=self.actual_filesystem,
                                            old_fs=self.remote_filesystem)
     
            log.debug("5 - APPLY DIFERENCES (DELETE/DOWNLOAD AND UPLOAD)")
            self.process_changes_in_remote(changes)
        
            log.debug("6 - UPDATE REMOTE FS")
            self.upload_actual_fs_struct()
        else:
            log.critical("UNKNOWN MODE, existing...")
Example #27
0
def get_vlc(version):
    """Download VLC sources from Internet and extract them
    version -- version of VLC to work with e.g. 2.0.4
    return -- path to VLC binary"""

    if os.path.exists(config.downloaddir + version):
        log.info("VLC {} already downloaded and extracted".format(version))
        return

    log.info("Downloading VLC version {} ".format(version))
    url = config.vlan['url'].format(version[4:], version)
    log.debug(url)
    archvlc, _ = urllib.urlretrieve(url)

    log.info("Extracting VLC sources {} ".format(archvlc))
    subprocess.check_call(['tar', '-C', config.downloaddir, '-Jxf', archvlc])

    return config.downloaddir + version + "/vlc"
Example #28
0
 async def guild(self, ctx, guild_id):  # LIST GUILD INFO
     guild = await self.bot.fetch_guild(guild_id)
     log.info(f'Viewing guild: {guild.id}: {guild}')
     owner = await commands.UserConverter().convert(ctx=ctx,
                                                    argument=str(
                                                        guild.owner_id))
     log.info(f'Owner: {guild.owner_id}: {owner}')
     # log(f'Members: {len(guild.members)} members')
     # log(f'Categories: {len(guild.categories)} categories')
     # log(f'Channels: {len(guild.channels)} channels')
     log.info(f'Roles: {len(guild.roles)} roles')
     log.info(f'Emojis: {len(guild.emojis)} emojis')
     log.info(f'Verification Level: {guild.verification_level}')
     log.info(f'Content Filter: {guild.explicit_content_filter}')
     log.info(f'Notifications: {guild.default_notifications}')
     log.info(f'Voice Region: {guild.region}')
     if len(guild.features) > 0:
         log.info(f'Features: {len(guild.features)}')
         for x in guild.features:
             log.info(f'Feature: {x}')
     log.info(f'Created at: {guild.created_at}')
Example #29
0
def init():
    init_dependencies()
    log.info("=== Singularity ===")
    args = init_args_parser()
    log.info(f'Current environment: {args.env}')
    config = Configuration(args.env)
    log.info("=== Spark project is running... ===")
    log.info("== Configurations ==")

    log.info(f'app_name: {config.app_name}')
    log.info(f'input_mongodb_uri: {config.input_mongodb_uri}')
    log.info(f'output_mongodb_uri: {config.output_mongodb_uri}')
    log.info(f'jars_dir: {config.jars_dir}')
    log.info(f'master: {config.master}')

    sp = SparkManager(config)
    sp.run()
Example #30
0
def init_dependencies():
    log.info("=== Initial libs and modules ===")

    if os.path.exists('config.zip'):
        log.info("=== Found config.zip ===")
        sys.path.insert(0, 'config.zip')
        log.info("=== Added config.zip to path ===")
    else:
        sys.path.insert(0, './config')
        log.info("=== Added ./config to path ===")

    if os.path.exists('core.zip'):
        log.info("=== Found core.zip ===")
        sys.path.insert(0, 'core.zip')
        log.info("=== Added core.zip to path ===")
    else:
        sys.path.insert(0, './core')
        log.info("=== Added ./core to path ===")

    if os.path.exists('jars.zip'):
        log.info("=== Found jars.zip ===")
        sys.path.insert(0, 'jars.zip')
        log.info("=== Added jars.zip to path ===")
    else:
        sys.path.insert(0, './jars')
        log.info("=== Added ./jars to path ===")

    if os.path.exists('jobs.zip'):
        log.info("=== Found jobs.zip ===")
        sys.path.insert(0, 'jobs.zip')
        log.info("=== Added jobs.zip to path ===")
    else:
        sys.path.insert(0, './jobs')
        log.info("=== Added ./jobs to path ===")

    if os.path.exists('libs.zip'):
        log.info("=== Found libs.zip ===")
        sys.path.insert(0, 'libs.zip')
        log.info("=== Added libs.zip to path ===")
    else:
        sys.path.insert(0, './libs')
        log.info("=== Added ./libs to path ===")

    if os.path.exists('log.zip'):
        log.info("=== Found log.zip ===")
        sys.path.insert(0, 'log.zip')
        log.info("=== Added log.zip to path ===")
    else:
        sys.path.insert(0, './log')
        log.info("=== Added ./log to path ===")
Example #31
0
 async def guilds(self, ctx):  # LIST ALL GUILDS
     log.info(
         f'Number of Guilds that this bot is in: {len(self.bot.guilds)}')
     for x in self.bot.guilds:
         log.info(f'{x.id}: {x.name}')
Example #32
0
def _process_file (filename,
                   data,
                   strip_useless=False,
                   entry_points=None,
                   time_limit=None,
                   display_int_iocs=False,
                   artifact_dir=None,
                   out_file_name=None,
                   do_jit=False):
    """Process a single file.

    @param container (str) Path and filename of container if the file is within
    a zip archive, None otherwise.

    @param filename (str) path and filename of file on disk, or within
    the container.

    @param data (bytes) content of the file if it is in a container,
    None if it is a file on disk.

    @param strip_useless (boolean) Flag turning on/off modification of
    VB code prior to parsing.

    @param entry_points (list) A list of the names (str) of the VB functions
    from which to start emulation.

    @param time_limit (int) The emulation time limit, in minutes. If
    None there is not time limit.

    @param display_int_iocs (boolean) Flag turning on/off the
    reporting of intermediate IOCs (base64 strings and URLs) found
    during the emulation process.

    @param artifact_dir (str) The directory in which to save artifacts
    dropped by the sample under analysis. If None the artifact

    @param out_file_name (str) The name of the file in which to store
    the ViperMonkey analysis results as JSON. If None no JSON results
    will be saved.

    @param do_jit (str) A flag turning on/off doing VB -> Python
    transpiling of loops to speed up loop emulation.

    @return (list) A list of actions if actions found, an empty list
    if no actions found, and None if there was an error.

    """

    # Increase Python call depth.
    sys.setrecursionlimit(13000)

    # Set the emulation time limit.
    if (time_limit is not None):
        core.vba_object.max_emulation_time = datetime.now() + timedelta(minutes=time_limit)

    # Create the emulator.
    log.info("Starting emulation...")
    vm = core.ViperMonkey(filename, data, do_jit=do_jit)
    orig_filename = filename
    if (entry_points is not None):
        for entry_point in entry_points:
            vm.user_entry_points.append(entry_point)
    try:
        #TODO: handle olefile errors, when an OLE file is malformed
        if (isinstance(data, Exception)):
            data = None
        vba = None
        try:
            vba = _get_vba_parser(data)
        except FileOpenError as e:

            # Is this an unrecognized format?
            if ("Failed to open file  is not a supported file type, cannot extract VBA Macros." not in safe_str_convert(e)):

                # No, it is some other problem. Pass on the exception.
                raise e

            # This may be VBScript with some null characters. Remove those and try again.
            data = data.replace("\x00", "")
            vba = _get_vba_parser(data)

        # Do we have analyzable VBA/VBScript? Do the analysis even
        # without VBA/VBScript if we are scraping for intermediate
        # IOCs.
        if (vba.detect_vba_macros() or display_int_iocs):

            # Read in document metadata.
            try:
                log.info("Reading document metadata...")
                ole = olefile.OleFileIO(data)
                vm.set_metadata(ole.get_metadata())
            except Exception as e:
                log.warning("Reading in metadata failed. Trying fallback. " + safe_str_convert(e))
                vm.set_metadata(get_metadata_exif(orig_filename))

            # If this is an Excel spreadsheet, read it in.
            vm.loaded_excel = excel.load_excel(data)

            # Set where to store directly dropped files if needed.
            if (artifact_dir is None):
                artifact_dir = "./"
                if ((filename is not None) and ("/" in filename)):
                    artifact_dir = filename[:filename.rindex("/")]
            only_filename = filename
            if ((filename is not None) and ("/" in filename)):
                only_filename = filename[filename.rindex("/")+1:]
            
            # Set the output directory in which to put dumped files generated by
            # the macros.
            out_dir = None
            if (only_filename is not None):
                out_dir = artifact_dir + "/" + only_filename + "_artifacts/"
                if os.path.exists(out_dir):
                    shutil.rmtree(out_dir)
            else:
                out_dir = "/tmp/tmp_file_" + safe_str_convert(random.randrange(0, 10000000000))
            log.info("Saving dropped analysis artifacts in " + out_dir)
            core.vba_context.out_dir = out_dir
            del filename # We already have this in memory, we don't need to read it again.
                
            # Parse the VBA streams.
            log.info("Parsing VB...")
            comp_modules = parse_streams(vba, strip_useless)
            if (comp_modules is None):
                return None
            got_code = False
            for module_info in comp_modules:
                m = module_info[0]
                stream = module_info[1]
                if (m != "empty"):
                    vm.add_compiled_module(m, stream)
                    got_code = True
            if ((not got_code) and (not display_int_iocs)):
                log.info("No VBA or VBScript found. Exiting.")
                return ([], [], [], [])

            # Get the VBA code.
            vba_code = ""
            for (_, _, _, macro_code) in vba.extract_macros():
                if (macro_code is not None):
                    vba_code += macro_code

            # Do not analyze the file if the VBA looks like garbage.
            if (read_ole_fields.is_garbage_vba(vba_code)):
                raise ValueError("VBA looks corrupted. Not analyzing.")

            # Read in text values from all of the various places in
            # Office 97/2000+ that text values can be hidden. So many
            # places.
            read_ole_fields.read_payload_hiding_places(data, orig_filename, vm, vba_code, vba)
            
            # Do Emulation.
            safe_print("")
            safe_print('-'*79)
            safe_print('TRACING VBA CODE (entrypoint = Auto*):')
            if (entry_points is not None):
                log.info("Starting emulation from function(s) " + safe_str_convert(entry_points))
            pyparsing.ParserElement.resetCache()
            vm.vba = vba
            vm.trace()

            # Done with emulation.

            # Report the results.
            str_actions, tmp_iocs, shellcode_bytes = _report_analysis_results(vm, data, display_int_iocs, orig_filename, out_file_name)

            # Save any embedded files as artifacts.
            _save_embedded_files(out_dir, vm)
            
            # Return the results.
            return (str_actions, vm.external_funcs, tmp_iocs, shellcode_bytes)

        # No VBA/VBScript found?
        else:
            safe_print('Finished analyzing ' + safe_str_convert(orig_filename) + " .\n")
            safe_print('No VBA macros found.')
            safe_print('')
            return ([], [], [], [])

    # Handle uncaught exceptions triggered during analysis.
    except Exception as e:

        # Print error info.
        if (("SystemExit" not in safe_str_convert(e)) and (". Aborting analysis." not in safe_str_convert(e))):
            traceback.print_exc()
        log.error(safe_str_convert(e))

        # If this is an out of memory error terminate the process with an
        # error code indicating that there are memory problems. This is so
        # that higer level systems using ViperMonkey can see that there is a
        # memory issue and handle it accordingly.
        if isinstance(e, MemoryError):
            log.error("Exiting ViperMonkey with error code 137 (out of memory)")
            sys.exit(137)

        # Done. Analysis failed.
        return None
Example #33
0
 async def find(self, ctx, *, user):
     matches = tls.search(user, self.bot.users)
     log.info(f'Found {len(matches)} users that match "{user}"')
     for x in matches:
         log.info(f'{x.id}: {x.name}#{x.discriminator}')
Example #34
0
    def run(self):
        log.info("=== Spark exec now ===")
        self.order = self.spark.read \
            .format("com.mongodb.spark.sql.DefaultSource") \
            .option("collection", "orders") \
            .load()
        self.order = self.order.alias('order')
        # order.limit(1).toPandas()
        self.order.show()
        log.info(Util.getShowString(self.order))

        self.subOrder = self.spark.read \
            .format("com.mongodb.spark.sql.DefaultSource") \
            .option("collection", "orders.suborders") \
            .load()
        self.subOrder = self.subOrder.alias('subOrder')
        # subOrder.select("*").toPandas()
        self.subOrder.show()
        log.info(self.subOrder.show())

        self.orderItem = self.spark.read \
            .format("com.mongodb.spark.sql.DefaultSource") \
            .option("collection", "orders.items") \
            .load()
        # orderItem = orderItem.alias('orderItem')
        self.orderItem.limit(1).toPandas()
        log.info(self.orderItem.limit(1).toPandas())
        # orderItem.show()

        self.data = self.order.join(self.subOrder, self.order._id == self.subOrder.orderId) \
            .join(self.orderItem, self.subOrder._id == self.orderItem.subOrderId) \
            .filter(self.order._id == "hKhl3HlwNMQ")

        self.data.select(["subOrderNo", "productId", "productName", "productPrice", "totalQuantity"]).toPandas()

        self.data = self.subOrder.join(self.orderItem, self.subOrder._id == self.orderItem.subOrderId) \
            .filter(self.subOrder.merchantId == "MEV747qhJ2f")

        self.data.select(["merchantId", "productName", "productPrice", "totalQuantity"]).toPandas()

        self.financeOrder = self.spark.read \
            .format("com.mongodb.spark.sql.DefaultSource") \
            .option("collection", "order.finance") \
            .load()
        # financeOrder = subOrder.alias('subOrder')
        # financeOrder.select("*").toPandas()

        self.financeOrder = self.financeOrder.select(["code", "financeType", "action"])
        self.financeOrder.show()
        log.info(self.financeOrder.show())
        # financeOrder.printSchema()
        # financeOrder = financeOrder.groupBy('action').agg(collect_list('code').alias('code'))
        # testfinanceOrder = financeOrder.select("code")
        # print(testfinanceOrder)
        # testfinanceOrder.show()

        self.test = self.data.select(["merchantName", "totalQuantity", "productPrice"])
        self.test = self.test.withColumn("productItem", lit(1))
        self.test = self.test.withColumn("action", lit("order"))
        self.test.show()
        log.info(self.test.show())

        log.info(self.test.show())

        self.n_to_array = udf(lambda n: [n] * n, ArrayType(IntegerType()))
        self.data2 = self.test.withColumn('totalQuantity', self.n_to_array('totalQuantity'))
        # data2.show()
        self.data3 = self.data2.withColumn('totalQuantity', explode('totalQuantity'))
        # # data3.printSchema()
        # data3 = data3.select("*")
        self.data3.show()
        log.info(self.data3.show())
        # ppp = financeOrder.crossJoin(data3)
        # # ppp = data3.crossJoin(financeOrder)
        # ppp.orderBy('merchantName', ascending=False).show()
        # df = df1.join(df2, on=['key'], how='inner')

        # df.show()
        # daaa = data3.join(financeOrder, data3.action == financeOrder.action).select("*")
        # # daaa.printSchema()
        # daaa.show(1)

        self.full_outer_join = self.data3.join(self.financeOrder, self.data3.action == self.financeOrder.action,
                                     how='full')  # Could also use 'full_outer'
        self.full_outer_join.select(["merchantName", "productPrice", "productItem", "code", "financeType"]).show()

        self.testtest = self.full_outer_join.withColumn('Amount', when((col("financeType") == "paymentFee"),
                                                             round((col('productPrice') * self.paymentFee), 2))
                                              .when((col("financeType") == "marketFee"),
                                                    round((col('productPrice') * self.marketFee), 2))
                                              .when((col("financeType") == "platformFee"),
                                                    round((col('productPrice') * self.platformFee), 2))
                                              .otherwise(col('productPrice'))
                                              )
        self.testtest.show()
        log.info(self.testtest.show())
Example #35
0
def main():
    """Main function, called when vipermonkey is run from the command
    line.

    """

    # Increase recursion stack depth.
    sys.setrecursionlimit(13000)
    
    # print banner with version
    # Generated with http://www.patorjk.com/software/taag/#p=display&f=Slant&t=ViperMonkey
    safe_print(''' _    ___                 __  ___            __             
| |  / (_)___  ___  _____/  |/  /___  ____  / /_____  __  __
| | / / / __ \/ _ \/ ___/ /|_/ / __ \/ __ \/ //_/ _ \/ / / /
| |/ / / /_/ /  __/ /  / /  / / /_/ / / / / ,< /  __/ /_/ / 
|___/_/ .___/\___/_/  /_/  /_/\____/_/ /_/_/|_|\___/\__, /  
     /_/                                           /____/   ''')
    safe_print('vmonkey %s - https://github.com/decalage2/ViperMonkey' % __version__)
    safe_print('THIS IS WORK IN PROGRESS - Check updates regularly!')
    safe_print('Please report any issue at https://github.com/decalage2/ViperMonkey/issues')
    safe_print('')

    DEFAULT_LOG_LEVEL = "info" # Default log level
    LOG_LEVELS = {
        'debug':    logging.DEBUG,
        'info':     logging.INFO,
        'warning':  logging.WARNING,
        'error':    logging.ERROR,
        'critical': logging.CRITICAL
        }

    usage = 'usage: %prog [options] <filename> [filename2 ...]'
    parser = optparse.OptionParser(usage=usage)
    parser.add_option("-r", action="store_true", dest="recursive",
                      help='find files recursively in subdirectories.')
    parser.add_option("-z", "--zip", dest='zip_password', type='str', default=None,
                      help='if the file is a zip archive, open first file from it, using the '
                           'provided password (requires Python 2.6+)')
    parser.add_option("-f", "--zipfname", dest='zip_fname', type='str', default='*',
                      help='if the file is a zip archive, file(s) to be opened within the zip. '
                           'Wildcards * and ? are supported. (default:*)')
    parser.add_option("-e", action="store_true", dest="scan_expressions",
                      help='Extract and evaluate/deobfuscate constant expressions')
    parser.add_option('-l', '--loglevel', dest="loglevel", action="store", default=DEFAULT_LOG_LEVEL,
                      help="logging level debug/info/warning/error/critical (default=%default)")
    parser.add_option("-s", '--strip', action="store_true", dest="strip_useless_code",
                      help='Strip useless VB code from macros prior to parsing.')
    parser.add_option("-j", '--jit', action="store_true", dest="do_jit",
                      help='Speed up emulation by JIT compilation of VB loops to Python.')
    parser.add_option('-i', '--init', dest="entry_points", action="store", default=None,
                      help="Emulate starting at the given function name(s). Use comma seperated "
                           "list for multiple entries.")
    parser.add_option('-t', '--time-limit', dest="time_limit", action="store", default=None,
                      type='int', help="Time limit (in minutes) for emulation.")
    parser.add_option("-c", '--iocs', action="store_true", dest="display_int_iocs",
                      help='Display potential IOCs stored in intermediate VBA variables '
                           'assigned during emulation (URLs and base64).')
    parser.add_option("-v", '--version', action="store_true", dest="print_version",
                      help='Print version information of packages used by ViperMonkey.')
    parser.add_option("-o", "--out-file", action="store", default=None, type="str",
                      help="JSON output file containing resulting IOCs, builtins, and actions")
    parser.add_option("-p", "--tee-log", action="store_true", default=False,
                      help="output also to a file in addition to standard out")
    parser.add_option("-b", "--tee-bytes", action="store", default=0, type="int",
                      help="number of bytes to limit the tee'd log to")

    (options, args) = parser.parse_args()

    # Print version information and exit?
    if (options.print_version):
        print_version()
        sys.exit(0)
    
    # Print help if no arguments are passed
    if len(args) == 0:
        safe_print(__doc__)
        parser.print_help()
        sys.exit(0)

    # setup logging to the console
    # logging.basicConfig(level=LOG_LEVELS[options.loglevel], format='%(levelname)-8s %(message)s')
    colorlog.basicConfig(level=LOG_LEVELS[options.loglevel], format='%(log_color)s%(levelname)-8s %(message)s')

    json_results = []

    for container, filename, data in xglob.iter_files(args,
                                                      recursive=options.recursive,
                                                      zip_password=options.zip_password,
                                                      zip_fname=options.zip_fname):

        # ignore directory names stored in zip files:
        if container and filename.endswith('/'):
            continue
        if options.scan_expressions:
            process_file_scanexpr(container, filename, data)
        else:
            entry_points = None
            if (options.entry_points is not None):
                entry_points = options.entry_points.split(",")
            process_file(container,
                         filename,
                         data,
                         strip_useless=options.strip_useless_code,
                         entry_points=entry_points,
                         time_limit=options.time_limit,
                         display_int_iocs=options.display_int_iocs,
                         tee_log=options.tee_log,
                         tee_bytes=options.tee_bytes,
                         out_file_name=options.out_file,
                         do_jit=options.do_jit)

            # add json results to list
            if (options.out_file):
                with open(options.out_file, 'r') as json_file:
                    try:
                        json_results.append(json.loads(json_file.read()))
                    except ValueError:
                        pass

    if (options.out_file):
        with open(options.out_file, 'w') as json_file:
            if (len(json_results) > 1):
                json_file.write(json.dumps(json_results, indent=2))
            else:
                json_file.write(json.dumps(json_results[0], indent=2))

        log.info("Saved results JSON to output file " + options.out_file)