Beispiel #1
0
 def hash_pastie(self):
     if self.pastie_content:
         try:
             self.md5 = hashlib.md5(self.pastie_content.encode('utf-8')).hexdigest()
             logger.debug('Pastie {site} {id} has md5: "{md5}"'.format(site=self.site.name, id=self.id, md5=self.md5))
         except Exception as e:
             logger.error('Pastie {site} {id} md5 problem: {e}'.format(site=self.site.name, id=self.id, e=e))
 def try_add_supergen(self, supergen):
     if (supergen):
         self.supergen_posts.append(supergen)
         return True
     else:
         logger.error("Failed to build supergen")
         return False
Beispiel #3
0
 def run(self):
     self.db_conn = sqlite3.connect(self.filename)
     # create the db if it doesn't exist
     self.c = self.db_conn.cursor()
     try:
         # LATER maybe create a table per site. Lookups will be faster as less text-searching is needed
         self.c.execute('''
             CREATE TABLE IF NOT EXISTS pasties (
                 site TEXT,
                 id TEXT,
                 md5 TEXT,
                 url TEXT,
                 local_path TEXT,
                 timestamp DATE,
                 matches TEXT
                 )''')
         self.db_conn.commit()
     except sqlite3.DatabaseError as e:
         logger.error('Problem with the SQLite database {0}: {1}'.format(self.filename, e))
         return None
     # loop over the queue
     while not self.kill_received:
         try:
             # grabs pastie from queue
             pastie = self.queue.get()
             # add the pastie to the DB
             self.add_or_update(pastie)
             # signals to queue job is done
             self.queue.task_done()
         # catch unknown errors
         except Exception as e:
             logger.error("Thread for SQLite crashed unexpectectly, recovering...: {e}".format(e=e))
             logger.debug(traceback.format_exc())
Beispiel #4
0
 def run(self):
     while not self.kill_received:
         sleep_time = random.randint(self.update_min, self.update_max)
         try:
             # grabs site from queue
             logger.info(
                 'Downloading list of new pastes from {name}. '
                 'Will check again in {time} seconds'.format(
                     name=self.name, time=sleep_time))
             # get the list of last pasties, but reverse it
             # so we first have the old entries and then the new ones
             last_pasties = self.get_last_pasties()
             if last_pasties:
                 for pastie in reversed(last_pasties):
                     queues[self.name].put(pastie)  # add pastie to queue
                 logger.info("Found {amount} new pasties for site {site}. There are now {qsize} pasties to be downloaded.".format(amount=len(last_pasties),
                                                                                                       site=self.name,
                                                                                                       qsize=queues[self.name].qsize()))
         # catch unknown errors
         except Exception as e:
             msg = 'Thread for {name} crashed unexpectectly, '\
                   'recovering...: {e}'.format(name=self.name, e=e)
             logger.error(msg)
             logger.debug(traceback.format_exc())
         time.sleep(sleep_time)
Beispiel #5
0
 def get_last_pasties(self):
     # reset the pasties list
     pasties = []
     # populate queue with data
     htmlPage, headers = download_url(self.archive_url)
     if not htmlPage:
         logger.warning("No HTML content for page {url}".format(url=self.archive_url))
         return False
     pasties_ids = re.findall(self.archive_regex, htmlPage)
     if pasties_ids:
         for pastie_id in pasties_ids:
             # check if the pastie was already downloaded
             # and remember that we've seen it
             if self.seen_pastie(pastie_id):
                 # do not append the seen things again in the queue
                 continue
             # pastie was not downloaded yet. Add it to the queue
             if self.pastie_classname:
                 class_name = globals()[self.pastie_classname]
                 pastie = class_name(self, pastie_id)
             else:
                 pastie = Pastie(self, pastie_id)
             pasties.append(pastie)
         return pasties
     logger.error("No last pasties matches for regular expression site:{site} regex:{regex}. Error in your regex? Dumping htmlPage \n {html}".format(site=self.name, regex=self.archive_regex, html=htmlPage.encode('utf8')))
     return False
Beispiel #6
0
    def ExtractTargetDirToPath(self, source_path_spec, configuration, dir_path=None, file_spec=None, output_path=None):
        #TODO(jbc): 함수명 변경, 파일 넣어도 됨.
        """Extract target directory to path

            Args:
                source_path_spec:
                configuration:
                dir_path:
                output_path:
        """
        try:
            if not file_spec:
                find_spec = file_system_searcher.FindSpec(
                    case_sensitive=False, location=dir_path,
                    location_separator='/')
            else:
                find_spec = file_spec

        except ValueError as exception:
            logger.error(
                'Unable to build find specification for path: "{0:s}" with '
                'error: {1!s}'.format(dir_path, exception))

        path_spec_generator = self._path_spec_extractor.ExtractPathSpecs(
            [source_path_spec], find_specs=[find_spec], recurse_file_system=False,
            resolver_context=configuration.resolver_context)

        for path_spec in path_spec_generator:
            self.DirectoryTraversal(path_spec, output_path)
Beispiel #7
0
async def remove_pugger(client, user, reason='No reason set'):
    '''Add the user to the specified role'''

    # Skiping logs for now
    #    logMessage = '[!pugme]' + \
    #        ' User ' + message.author.name + '#' + message.author.discriminator + \
    #        ' asked to be a pugger.'
    #    logger.info(logMessage)

    # Set the user role. If not possible, log it.
    try:
        pugger_role = discord.utils.get(user.guild.roles,
                                        id=config.sfcpugger_role_id)
        await user.remove_roles(pugger_role, reason=reason)

        try:
            del puggers[user.id]
        except KeyError:
            pass

    except discord.Forbidden:
        logmsg = '[!pugme] Insufficient permission to remove the specified role from the user.'
        print(logmsg)
        logger.error(logmsg)

    except discord.HTTPException:
        logmsg = '[!pugme] HTTPException (?!)'
        print(logmsg)
        logger.error(logmsg)

    # Message handled
    return True
Beispiel #8
0
async def add_pugger(client, user, reason='No reason set'):
    '''Add the user to the specified role'''

    # Skiping logs for now
    #    logMessage = '[!pugme]' + \
    #        ' User ' + message.author.name + '#' + message.author.discriminator + \
    #        ' asked to be a pugger.'
    #    logger.info(logMessage)

    # Set the user role. If not possible, log it.
    try:
        pugger_role = discord.utils.get(user.guild.roles,
                                        id=config.sfcpugger_role_id)
        await user.add_roles(pugger_role, reason=reason)
        puggers[user.id] = {
            'g': user.guild.id,
            'ts': datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
        }

    except discord.Forbidden:
        logmsg = '[!pugme] Insufficient permission to add the specified role to the user.'
        print(logmsg)
        logger.error(logmsg)

    except discord.HTTPException:
        logmsg = '[!pugme] HTTPException (?!)'
        print(logmsg)
        logger.error(logmsg)

    # Message handled
    return True
Beispiel #9
0
    def check_host_connection(host):
        """Checks to see if appetite can connect to the host"""
        if host.can_connect is None:
            host.can_connect = ConnManager.check_connection(host)

            if not host.can_connect:
                Logger.error("Can not connect to host", host=host.hostname)
        return host.can_connect
Beispiel #10
0
    def LoadTargetFileToMemory(self,
                               source_path_spec,
                               configuration,
                               file_path=None,
                               file_spec=None,
                               data_stream_name=None):
        try:
            if not file_spec:
                find_spec = file_system_searcher.FindSpec(
                    case_sensitive=False,
                    location=file_path,
                    location_separator=source_path_spec.location)
            else:
                find_spec = file_spec
        except ValueError as exception:
            logger.error(
                'Unable to build find specification for path: "{0:s}" with '
                'error: {1!s}'.format(file_path, exception))

        path_spec_generator = self._path_spec_extractor.ExtractPathSpecs(
            [source_path_spec],
            find_specs=[find_spec],
            recurse_file_system=False,
            resolver_context=configuration.resolver_context)

        for path_spec in path_spec_generator:
            display_name = path_helper.PathHelper.GetDisplayNameForPathSpec(
                path_spec)

            try:
                file_entry = path_spec_resolver.Resolver.OpenFileEntry(
                    path_spec, resolver_context=configuration.resolver_context)

                if file_entry is None or not file_entry.IsFile():
                    logger.warning(
                        'Unable to open file entry with path spec: {0:s}'.
                        format(display_name))
                    return False

                if data_stream_name:
                    file_object = file_entry.GetFileObject(
                        data_stream_name=data_stream_name)

                    if not file_object:
                        return False

                    return file_object

                elif not data_stream_name:
                    file_object = file_entry.GetFileObject()

                    if not file_object:
                        return False

                    return file_object

            except KeyboardInterrupt:
                return False
Beispiel #11
0
    def RecursiveDirOrFileSearch(self, path_spec, output_path):
        display_name = path_helper.PathHelper.GetDisplayNameForPathSpec(path_spec)

        file_entry = path_spec_resolver.Resolver.OpenFileEntry(path_spec)
        if file_entry is None:
            logger.warning(
                'Unable to open file entry with path spec: {0:s}'.format(
                    display_name))
            return

        if file_entry.IsDirectory():
            if not os.path.exists(output_path + os.sep + file_entry.name):
                os.mkdir(output_path + os.sep + file_entry.name)

            for sub_file_entry in file_entry.sub_file_entries:
                try:
                    if not sub_file_entry.IsAllocated():
                        continue

                except dfvfs_errors.BackEndError as exception:
                    logger.warning(
                        'Unable to process file: {0:s} with error: {1!s}'.format(
                            sub_file_entry.path_spec.comparable.replace(
                                '\n', ';'), exception))
                    continue

                if sub_file_entry.type_indicator == dfvfs_definitions.TYPE_INDICATOR_TSK:
                    if file_entry.IsRoot() and sub_file_entry.name == '$OrphanFiles':
                        continue

                self.RecursiveDirOrFileSearch(sub_file_entry.path_spec, output_path + os.sep + file_entry.name)

        if file_entry.IsFile():

            for data_stream in file_entry.data_streams:
                file_object = file_entry.GetFileObject(data_stream_name=data_stream.name)
                if not file_object:
                    return False

                try:

                    buffer_size = 65536
                    file = open(output_path + os.sep + file_entry.name, 'wb')
                    file_object.seek(0, os.SEEK_SET)
                    data = file_object.read(buffer_size)
                    while data:
                        file.write(data)
                        data = file_object.read(buffer_size)
                    file.close()

                except IOError as exception:
                    print(display_name)
                    logger.error(
                        'Failed to extract file "{0:s}" : {1!s}'.format(display_name, exception))
                finally:
                    file_object.close()
Beispiel #12
0
def load_proxies_from_file(filename):
    global proxies_list
    try:
        f = open(filename)
    except Exception as e:
        logger.error('Configuration problem: proxyfile "{file}" not found or not readable: {e}'.format(file=filename, e=e))
    for line in f:
        line = line.strip()
        if line:  # LATER verify if the proxy line has the correct structure
            proxies_list.add(line)
    logger.debug('Found {count} proxies in file "{file}"'.format(file=filename, count=len(proxies_list)))
Beispiel #13
0
def load_user_agents_from_file(filename):
    global user_agents_list
    try:
        f = open(filename)
    except Exception as e:
        logger.error('Configuration problem: user-agent-file "{file}" not found or not readable: {e}'.format(file=filename, e=e))
    for line in f:
        line = line.strip()
        if line:
            user_agents_list.append(line)
    logger.debug('Found {count} UserAgents in file "{file}"'.format(file=filename, count=len(user_agents_list)))
Beispiel #14
0
 def get_noise_machines(self, url):
     supergen_nms = []
     noise_machine_names = self.__parse_noise_machines_from_url(url)
     for noise_machine_name in noise_machine_names:
         if noise_machine_name not in self.__noise_machines:
             logger.error(
                 "Couldn't find noise machine {}. Consider scraping title at url: {}"
                 .format(noise_machine_name, url))
             # TODO: attempt scrape automatically?
         else:
             nm = self.__noise_machines[noise_machine_name]
             if (nm):
                 supergen_nms.append({nm})
     return supergen_nms
Beispiel #15
0
    def CreateTableWithSchema(self, _cursor, table_name=None, schema=None):
        """Create table with schema.

        Args:
            _cursor (Database object): database connection object.
            table_name (str): table name.
            schema (list): table schema.

        Returns:
            bool: True if database table was created.
        Raises:
            ValueError: If the database attribute is not valid.
        """
        if _cursor is None:
            raise ValueError('Invalid database cursor.')

        if table_name is None:
            raise ValueError('Invalid table name.')

        if schema is None:
            raise ValueError('Invalid schema.')

        query = []
        query.append("CREATE TABLE ")
        query.append(table_name)
        query.append("(")

        for i in range(len(schema)):
            if i != (len(schema) - 1):
                if schema[i] == "RequestHeaders" or schema[i] == "ResponseHeaders" or schema[i] == "ExtraData" \
                        or schema[i] == "Data" or schema[i] == "Data2":
                    query.append("`" + schema[i] + "`" + " BLOB,")
                else:
                    query.append("`" + schema[i] + "`" + " TEXT,")
            else:
                if schema[i] == "RequestHeaders" or schema[i] == "ResponseHeaders" or schema[i] == "ExtraData" \
                        or schema[i] == "Data" or schema[i] == "Data2":
                    query.append("`" + schema[i] + "`" + " BLOB);")
                else:
                    query.append("`" + schema[i] + "`" + " TEXT);")

        query = ''.join(query)

        try:
            _cursor.execute_query(query)
        except Exception as exception:
            logger.error(exception)
            return False

        return True
Beispiel #16
0
 def __init__(self):
     try:
         self.pg_pool = pool.SimpleConnectionPool(1,
                                                  5,
                                                  user=DB.USER,
                                                  password=DB.PASSWORD,
                                                  host=DB.HOST,
                                                  database=DB.NAME)
         self.conn = self.pg_pool.getconn()
         self.cur = self.conn.cursor()
         logger.info('Connection to database succeeded')
     except Exception as e:
         logger.error(f'Connection to database failed: {e}')
     self.start_time = time.time()
Beispiel #17
0
    def check_table_from_yaml(self, configuration, yaml_list, table_list):
        # Create all table
        for count in range(0, len(yaml_list)):
            if not self.LoadSchemaFromYaml(yaml_list[count]):
                logger.error('cannot load schema from yaml: {0:s}'.format(table_list[count]))
                return False
            # If table is not existed, create table
            if not configuration.cursor.check_table_exist(table_list[count]):
                ret = self.CreateTable(configuration.cursor, configuration.standalone_check)
                if not ret:
                    logger.error('cannot create database table name: {0:s}'.format(table_list[count]))
                    return False

        return True
Beispiel #18
0
 def add(self, pastie):
     try:
         data = {'site': pastie.site.name,
                 'id': pastie.id,
                 'md5': pastie.md5,
                 'url': pastie.url,
                 'local_path': pastie.site.archive_dir + os.sep + pastie.site.pastie_id_to_filename(pastie.id),
                 'timestamp': datetime.now(),
                 'matches': pastie.matches_to_text()
                 }
         self.c.execute('INSERT INTO pasties VALUES (:site, :id, :md5, :url, :local_path, :timestamp, :matches)', data)
         self.db_conn.commit()
     except sqlite3.DatabaseError as e:
         logger.error('Cannot add pastie {site} {id} in the SQLite database: {error}'.format(site=pastie.site.name, id=pastie.id, error=e))
     logger.debug('Added pastie {site} {id} in the SQLite database.'.format(site=pastie.site.name, id=pastie.id))
Beispiel #19
0
    def __load_noise_machine_info(self):
        data = io.read_json(ng_info_filename)
        if not (data): logger.error("Could not load noise machine input file")

        pattern = r"(?i)(?<=(.*NoiseMachines/)).+"
        for nm in data["noiseMachines"]:
            nm_name = re.search(pattern, nm["href"])
            # need to handle custom.php: [Can't Take The Sky From Me](https://mynoise.net/NoiseMachines/custom.php?l=3035403037323015253200&amp;m=CINEMATIC1~INTERGALACTIC1~BATTLE1~EASTASIA2~CINEMATIC3~CANYON5~EASTASIA6~CANYON7~EASTASIA7~CINEMATIC9&amp;d=0&amp;title=Can't%20Take%20The%20Sky%20From%20Me)",
            if (nm_name):
                if nm_name not in self.__noise_machines:
                    self.__noise_machines[nm_name] = nm
                else:
                    logger.info(
                        "Noise machine already exists for: {}".format(nm_name))
            else:
                logger.info("Could not parse noise machine HREF: {}".format(
                    nm["href"]))
def EVENTLOGMSALERTS(configuration):
    #db = database.Database()
    #db.open()

    ms_alerts_list = []
    ms_alerts_count = 0
    query = f"SELECT data, event_id, time_created, source, user_sid FROM lv1_os_win_evt_total WHERE (evd_id='{configuration.evidence_id}') and (event_id like '300' and source like '%OAlerts.evtx%')"
    #result_query = db.execute_query_mul(query)
    result_query = configuration.cursor.execute_query_mul(query)
    for result_data in result_query:
        ms_alerts_information = MS_Alerts_Information()
        try:
            ms_alerts_list.append(ms_alerts_information)
            ms_alerts_list[ms_alerts_count].event_id = result_data[1]
            ms_alerts_list[ms_alerts_count].time = result_data[2]
            ms_alerts_list[ms_alerts_count].source = result_data[3]
            ms_alerts_list[ms_alerts_count].user_sid = result_data[4]
            ms_alerts_list[ms_alerts_count].task = 'Alert'
            ms_alerts_list[ms_alerts_count].event_id_description = 'MS office program usage alert'

            try:
                root = ElementTree.fromstring(result_data[0])
                results = root.iter('{http://schemas.microsoft.com/win/2004/08/events/event}Data')
                data = []
                message = ''
                for result in results:
                    for txt in result.iter('{http://schemas.microsoft.com/win/2004/08/events/event}string'):
                        if txt.text != '\n':
                            data.append(txt.text)
                ms_alerts_list[ms_alerts_count].program_name = data[0]
                for i in range(1, len(data)-2):
                     message += data[i]
                ms_alerts_list[ms_alerts_count].message = message
                ms_alerts_list[ms_alerts_count].error_type = data[len(data)-2]
                ms_alerts_list[ms_alerts_count].program_version = data[len(data)-1]
            except Exception as exception:
                logger.error('EVENTLOGS MS Alerts Parsing Error: {0!s}'.format(exception))
            ms_alerts_count = ms_alerts_count + 1
        except Exception as exception:
            logger.error('EVENTLOGS MS Alerts Error: {0!s}'.format(exception))

    #db.close()

    return ms_alerts_list
Beispiel #21
0
async def delete_unwanted_welcome_messages(client, message):
    '''Delete every message on #welcome that is not from a whitelisted user'''

    # Check to see if the user who sent the message is on the whitelist. If not, delete it.
    if (set([role.id for role in message.author.roles
             ]).isdisjoint(config.welcome_allowed_roles_msg)):
        try:
            await message.delete()

        except discord.Forbidden:
            logger.error(
                '[unwanted welcome msg] Insufficient permission to delete the specified user message.'
            )

        except discord.HTTPException:
            logger.error("[unwanted welcome msg] HTTPException (?!)")

    # Message Handled
    return True
Beispiel #22
0
    def send_email_alert(self):
        msg = MIMEMultipart()
        alert = "Found hit for {matches} in pastie {url}".format(matches=self.matches_to_text(), url=self.url)
        # headers
        msg['Subject'] = yamlconfig['email']['subject'].format(subject=alert)
        msg['From'] = yamlconfig['email']['from']
        # build the list of recipients
        recipients = []
        recipients.append(yamlconfig['email']['to'])  # first the global alert email
        for match in self.matches:                    # per match, the custom additional email
            if 'to' in match and match['to']:
                recipients.extend(match['to'].split(","))
        msg['To'] = ','.join(recipients)  # here the list needs to be comma separated
        # message body including full paste rather than attaching it
        message = '''
I found a hit for a regular expression on one of the pastebin sites.

The site where the paste came from :        {site}
The original paste was located here:        {url}
And the regular expressions that matched:   {matches}

Below (after newline) is the content of the pastie:

{content}

        '''.format(site=self.site.name, url=self.url, matches=self.matches_to_regex(), content=self.pastie_content.encode('utf8'))
        msg.attach(MIMEText(message))
        # send out the mail
        try:
            s = smtplib.SMTP(yamlconfig['email']['server'], yamlconfig['email']['port'])
            if yamlconfig['email']['tls']:
                s.starttls()
            # login to the SMTP server if configured
            if 'username' in yamlconfig['email'] and yamlconfig['email']['username']:
                s.login(yamlconfig['email']['username'], yamlconfig['email']['password'])
            # send the mail
            s.sendmail(yamlconfig['email']['from'], recipients, msg.as_string())
            s.close()
        except smtplib.SMTPException as e:
            logger.error("ERROR: unable to send email: {0}".format(e))
        except Exception as e:
            logger.error("ERROR: unable to send email. Are your email setting correct?: {e}".format(e=e))
 def wrapper(*args, **kwargs):
     """ The wrapper function """
     try:
         return function(*args, **kwargs)
     except Exception as ex:
         exc_type, exc_value, exc_traceback = sys.exc_info()
         error("The traceback is ::::\n"+"" \
          .join(traceback.format_exception(exc_type, exc_value,
                                           exc_traceback)))
         error ("Exception Occurred: %s" %str(ex))
         try:
             img = args[0].take_screenshot()
             info("check screen shot %s" % img)
         except Exception as exc:
             info("not able to take screen shot : %s" % str(exc))
         try:
             args[0].exit_app()
         except Exception as exc :
             info("not able to exit the app : %s" % str(exc))
         raise Exception(str(ex))
Beispiel #24
0
 def update(self, pastie):
     try:
         data = {'site': pastie.site.name,
                 'id': pastie.id,
                 'md5': pastie.md5,
                 'url': pastie.url,
                 'local_path': pastie.site.archive_dir + os.sep + pastie.site.pastie_id_to_filename(pastie.id),
                 'timestamp': datetime.now(),
                 'matches': pastie.matches_to_text()
                 }
         self.c.execute('''UPDATE pasties SET md5 = :md5,
                                         url = :url,
                                         local_path = :local_path,
                                         timestamp  = :timestamp,
                                         matches = :matches
                  WHERE site = :site AND id = :id''', data)
         self.db_conn.commit()
     except sqlite3.DatabaseError as e:
         logger.error('Cannot add pastie {site} {id} in the SQLite database: {error}'.format(site=pastie.site.name, id=pastie.id, error=e))
     logger.debug('Updated pastie {site} {id} in the SQLite database.'.format(site=pastie.site.name, id=pastie.id))
Beispiel #25
0
 def run(self):
     while not self.kill_received:
         try:
             # grabs pastie from queue
             pastie = self.queue.get()
             pastie_content = pastie.fetch_and_process_pastie()
             logger.debug("Queue {name} size: {size}".format(
                 size=self.queue.qsize(), name=self.name))
             if pastie_content:
                 logger.debug(
                     "Saved new pastie from {0} "
                     "with id {1}".format(self.name, pastie.id))
             else:
                 # pastie already downloaded OR error ?
                 pass
             # signals to queue job is done
             self.queue.task_done()
         # catch unknown errors
         except Exception as e:
             msg = "ThreadPasties for {name} crashed unexpectectly, "\
                   "recovering...: {e}".format(name=self.name, e=e)
             logger.error(msg)
             logger.debug(traceback.format_exc())
Beispiel #26
0
def parse_config_file(configfile):
    global yamlconfig
    try:
        yamlconfig = yaml.load(file(configfile))
        for includes in yamlconfig.get("includes", []):
            yamlconfig.update(yaml.load(open(includes)))
    except yaml.YAMLError as exc:
        logger.error("Error in configuration file:")
        if hasattr(exc, 'problem_mark'):
            mark = exc.problem_mark
            logger.error("error position: (%s:%s)" % (mark.line + 1, mark.column + 1))
            exit(1)
    # TODO verify validity of config parameters
    if yamlconfig['proxy']['random']:
        load_proxies_from_file(yamlconfig['proxy']['file'])
    if yamlconfig['user-agent']['random']:
        load_user_agents_from_file(yamlconfig['user-agent']['file'])
    if yamlconfig['mongo']['save']:
        try:
            from pymongo import MongoClient
            client = MongoClient(yamlconfig['mongo']['url']) 

            database = yamlconfig['mongo']['database']
            db = client[database]
            collection = yamlconfig['mongo']['collection']
            global mongo_col
            mongo_col = db[collection]


        except:
            exit('ERROR: Cannot import PyMongo. Are you sure it is installed ?')
    if yamlconfig['redis']['queue']:
        try:
            import redis
        except:
            exit('ERROR: Cannot import the redis Python library. Are you sure it is installed?')
Beispiel #27
0
def run_model():
    req = request.get_json()

    try:
        beta = float(req['beta'])
        transportation = req['transportation']
        threshold = int(
            req['threshold']
        ) * 1000  # multiply to get the minute threshold to match distance matrix meter units
        bounds = req['bounds']
        logger.info(
            f'User parameters include beta: {beta}, transport: {transportation}, threshold: {threshold}'
        )
    except Exception as e:
        logger.error(f'Parameters provided are incorrect: {e}')
        return e

    supply = req['supply']
    demand = req['demand']
    capacity = req['capacity']

    scores = model.accessibility(bounds, beta, transportation, threshold,
                                 demand, supply, capacity)
    scores_col = str(list(scores.columns.values))
    scores_row = str(scores.index)
    max = scores['scores'].max()

    try:
        scores['boundary'] = scores['boundary'].apply(wkt.loads)
        features = scores.apply(
            lambda row: Feature(geometry=row['boundary'],
                                properties={
                                    'geouid': row['geouid'],
                                    'score': row['scores']
                                }),
            axis=1).tolist()
        feature_collection = FeatureCollection(
            score_vals=scores['scores'].tolist(), max=max, features=features)
        feature_collection = json.dumps(feature_collection)
        return feature_collection
    except Exception as e:
        logger.error(f'{scores_row}')
        logger.error(f'{scores_col}')
        logger.error(f'Could not return results as geojson: {e}')
        return e
Beispiel #28
0
async def user_accepted_terms(client, message):
    '''Add the user to the specified role'''
    logMessage = '[!accept]' + \
        ' User ' + message.author.name + '#' + message.author.discriminator + \
        ' has accepted the terms.'
    logger.info(logMessage)

    # Set the user role. If not possible, log it.
    try:
        member_role = discord.utils.get(message.guild.roles,
                                        id=config.member_role_id)
        await message.author.add_roles(member_role,
                                       reason='User accepted terms')

    except discord.Forbidden:
        logger.error(
            '[!accept] Insufficient permission to add the specified role to the user.'
        )

    except discord.HTTPException:
        logger.error("[!accept] HTTPException (?!)")

    # Delete the user message. If not possible, log it.
    try:
        await message.delete()

    except discord.Forbidden:
        logger.error(
            '[!accept] Insufficient permission to delete the specified user message.'
        )

    except discord.HTTPException:
        logger.error("[!accept] HTTPException (?!)")

    # Message handled
    return True
	def Connect(self, par_id, configuration, source_path_spec, knowledge_base):

		this_file_path = os.path.dirname(os.path.abspath(__file__)) + os.sep + 'schema' + os.sep + 'kakaotalk_mobile'

		# Load schema
		yaml_list = [this_file_path + 'lv1_app_kakaotalk_mobile_chatlogs.yaml',
					 this_file_path + 'lv1_app_kakaotalk_mobile_chatrooms.yaml',
					 this_file_path + 'lv1_app_kakaotalk_mobile_friends.yaml',
					 this_file_path + 'lv1_app_kakaotalk_mobile_channel_history.yaml',
					 this_file_path + 'lv1_app_kakaotalk_mobile_block_friends.yaml']

		table_list = ['lv1_app_kakaotalk_mobile_chatlogs',
					  'lv1_app_kakaotalk_mobile_chatrooms',
					  'lv1_app_kakaotalk_mobile_friends',
					  'lv1_app_kakaotalk_mobile_channel_history',
					  'lv1_app_kakaotalk_mobile_block_friends']

		# Create all table
		for count in range(0, len(yaml_list)):
			if not self.LoadSchemaFromYaml(yaml_list[count]):
				logger.error('cannot load schema from yaml: {0:s}'.format(table_list[count]))
				return False
			# If table is not existed, create table
			if not configuration.cursor.check_table_exist(table_list[count]):
				ret = self.CreateTable(configuration.cursor)
				if not ret:
					logger.error('cannot create database table name: {0:s}'.format(table_list[count]))
					return False
				query = f"ALTER TABLE {table_list[count]} CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci;"
				configuration.cursor.execute_query(query)

		# extension -> sig_type 변경해야 함

		query = f"SELECT name, parent_path, extension, ctime, ctime_nano FROM file_info WHERE par_id like '{par_id}' and " \
				f"(name like 'Kakaotalk.db' or name like 'Kakaotalk2.db') and parent_path = 'root/data/com.kakao.talk/databases';"

		kakaotalk_db_files = configuration.cursor.execute_query_mul(query)

		if len(kakaotalk_db_files) == 0:
			return False

		kakaotalk_db_list = list()
		for kakaotalk_db in kakaotalk_db_files:
			kakaotalk_db_path = kakaotalk_db[1][kakaotalk_db[1].find('/'):] + '/' + kakaotalk_db[0]  # document full path
			fileExt = kakaotalk_db[2]
			fileName = kakaotalk_db[0]
			output_path = configuration.root_tmp_path + os.sep + configuration.case_id + os.sep + configuration.evidence_id + os.sep + par_id
			self.ExtractTargetFileToPath(
				source_path_spec=source_path_spec,
				configuration=configuration,
				file_path=kakaotalk_db_path,
				output_path=output_path)

			self.ExtractTargetFileToPath(
				source_path_spec=source_path_spec,
				configuration=configuration,
				file_path=kakaotalk_db_path + '-wal',
				output_path=output_path)

			self.ExtractTargetFileToPath(
				source_path_spec=source_path_spec,
				configuration=configuration,
				file_path=kakaotalk_db_path + '-journal',
				output_path=output_path)

			self.ExtractTargetFileToPath(
				source_path_spec=source_path_spec,
				configuration=configuration,
				file_path=kakaotalk_db_path + '-shm',
				output_path=output_path)

			kakaotalk_db_list.append(output_path + os.path.sep + fileName)  # for file deletion

		kakaotalk_mobile_decrypt.main(kakaotalk_db_list[0], kakaotalk_db_list[1])

		### Kakaotalk.db ###
		con = sqlite3.connect(kakaotalk_db_list[0])
		cur = con.cursor()

		# chatlogs
		cur.execute("SELECT * FROM chat_logs_dec;")
		data_chatlogs = cur.fetchall()
		insert_kakaotalk_mobile_chatlogs = []

		for row in data_chatlogs:
			insert_kakaotalk_mobile_chatlogs.append(tuple(
				[par_id, configuration.case_id, configuration.evidence_id, row[0], row[1], row[2], row[3], row[4], row[5], row[6], row[7], row[8], row[9], row[10], row[11]]))

		query = "Insert into lv1_app_kakaotalk_mobile_chatlogs values (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s);"
		configuration.cursor.bulk_execute(query, insert_kakaotalk_mobile_chatlogs)

		# chatrooms
		cur.execute("SELECT * FROM chat_rooms_dec;")
		data_chatrooms = cur.fetchall()
		insert_kakaotalk_mobile_chatrooms = []

		for row in data_chatrooms:
			insert_kakaotalk_mobile_chatrooms.append(tuple(
				[par_id, configuration.case_id, configuration.evidence_id, row[0], row[1], row[2], row[3], row[4], row[5], row[6], row[7], row[8], row[9], row[10], row[11], row[12], row[13], row[14], row[15], row[16], row[17], row[18], row[19], row[20], row[21], row[22], row[23], row[24], row[25], row[26], row[27], row[28]]))

		query = "Insert into lv1_app_kakaotalk_mobile_chatrooms values (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s);"
		configuration.cursor.bulk_execute(query, insert_kakaotalk_mobile_chatrooms)

		### Kakaotalk2.db ###
		con2 = sqlite3.connect(kakaotalk_db_list[1])
		cur2 = con2.cursor()

		# friends
		cur2.execute("SELECT * FROM friends_dec;")
		data_friends = cur2.fetchall()
		insert_kakaotalk_mobile_friends = []

		for row in data_friends:
			insert_kakaotalk_mobile_friends.append(tuple(
				[par_id, configuration.case_id, configuration.evidence_id, row[0], row[1], row[2], row[3], row[4],
				 row[5], row[6], row[7], row[8], row[9], row[10], row[11], row[12], row[13], row[14], row[15], row[16],
				 row[17], row[18], row[19], row[20], row[21], row[22], row[23], row[24], row[25], row[26], row[27],
				 row[28], row[29], row[30], row[31], row[32], row[33], row[34], row[35], row[36]]))

		query = "Insert into lv1_app_kakaotalk_mobile_friends values (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s);"
		configuration.cursor.bulk_execute(query, insert_kakaotalk_mobile_friends)

		# block_friends
		cur2.execute("SELECT * FROM block_friends;")
		data_block_friends = cur2.fetchall()
		insert_kakaotalk_mobile_block_friends = []

		for row in data_block_friends:
			insert_kakaotalk_mobile_block_friends.append(tuple(
				[par_id, configuration.case_id, configuration.evidence_id, row[0], row[1], row[2], row[3]]))

		query = "Insert into lv1_app_kakaotalk_mobile_block_friends values (%s, %s, %s, %s, %s, %s, %s);"
		configuration.cursor.bulk_execute(query, insert_kakaotalk_mobile_block_friends)

		# channel_history
		cur2.execute("SELECT * FROM channel_history;")
		data_channel_history = cur2.fetchall()
		insert_kakaotalk_mobile_channel_history = []

		for row in data_channel_history:
			insert_kakaotalk_mobile_channel_history.append(tuple(
				[par_id, configuration.case_id, configuration.evidence_id, row[0], row[1], row[2], row[3], row[4], row[5], row[6], row[7]]))

		query = "Insert into lv1_app_kakaotalk_mobile_channel_history values (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s);"
		configuration.cursor.bulk_execute(query, insert_kakaotalk_mobile_channel_history)


		os.remove(kakaotalk_db_list[0])
		os.remove(kakaotalk_db_list[1])

		pass
Beispiel #30
0
    def Connect(self, par_id, configuration, source_path_spec, knowledge_base):
        """Connector to connect to ESE database modules.

		Args:
			configuration: configuration values.
			source_path_spec (dfvfs.PathSpec): path specification of the source file.
			knowledge_base (KnowledgeBase): knowledge base.

		"""

        self._configuration = configuration
        self._time_zone = knowledge_base.time_zone

        # Load Schema
        yaml_path = os.path.dirname(os.path.abspath(__file__)) + os.sep + 'schema' + os.sep + 'esedb' \
                    + os.sep + 'lv1_os_win_esedb.yaml'
        if not self.LoadSchemaFromYaml(yaml_path):
            logger.error('cannot load schema from yaml: {0:s}'.format(
                self.TABLE_NAME))
            return False

        # Search artifact paths
        paths = self._schema['Paths']
        separator = self._schema['Path_Separator']
        environment_variables = knowledge_base.GetEnvironmentVariables()

        find_specs = self.BuildFindSpecs(paths, separator,
                                         environment_variables)
        if len(find_specs) < 0:
            return False

        esedb_file = pyesedb.file()
        for spec in find_specs:
            try:
                file_object = self.LoadTargetFileToMemory(
                    source_path_spec=source_path_spec,
                    configuration=configuration,
                    file_spec=spec)

                esedb_file.open_file_object(file_object)
            except IOError as exception:
                logger.debug(
                    '[{0:s}] unable to open file with error: {0!s}'.format(
                        self.NAME, exception))
                return

            path_spec_generator = self._path_spec_extractor.ExtractPathSpecs(
                [source_path_spec],
                find_specs=[spec],
                recurse_file_system=False,
                resolver_context=configuration.resolver_context)

            for path_spec in path_spec_generator:
                edb_path = path_spec.location
            try:
                esedb_parsers = esedb_parser.ESEDBParser.GetESEDBParserObjects(
                )
                table_names = frozenset(
                    esedb_parser.ESEDBParser.GetTableNames(esedb_file))

                for parser in esedb_parsers.values():

                    if not parser.required_tables.issubset(table_names):
                        continue

                    try:
                        parser.Process(database=esedb_file)

                        info = tuple([
                            par_id, configuration.case_id,
                            configuration.evidence_id
                        ])
                        # Internet Explorer
                        if 'Containers' in parser.required_tables:
                            # Internet Explorer History
                            if len(parser.GetHistoryRecords) > 0:
                                history_schema = ['par_id', 'case_id', 'evd_id'] + list(parser.GetHistorySchema) \
                                                 + ['source']
                                table_name = 'lv1_os_win_esedb_ie_history'
                                if not configuration.cursor.check_table_exist(
                                        table_name):
                                    ret = self.CreateTableWithSchema(
                                        configuration.cursor,
                                        table_name=table_name,
                                        schema=history_schema)
                                    if not ret:
                                        logger.error(
                                            'cannot create database table name: {0:s}'
                                            .format(table_name))
                                        return False

                                for record in parser.GetHistoryRecords:
                                    tmp_record = list(record)
                                    tmp_record[17] = tmp_record[17].replace(
                                        '"', '""')
                                    tmp_record[17] = tmp_record[17].replace(
                                        '\'', '\'\'')
                                    result = info + tuple(tmp_record) + tuple(
                                        [edb_path])
                                    query = self.InsertQueryBuilder(
                                        table_name=table_name,
                                        schema=history_schema,
                                        data=result)
                                    try:
                                        configuration.cursor.execute_query(
                                            query)
                                    except Exception as exception:
                                        logger.error(
                                            'database execution failed: {0!s}'.
                                            format(exception))

                            # Internet Explorer Content
                            if len(parser.GetContentRecords) > 0:
                                content_schema = ['par_id', 'case_id', 'evd_id'] + list(parser.GetContentSchema) \
                                                 + ['source']
                                table_name = 'lv1_os_win_esedb_ie_content'
                                if not configuration.cursor.check_table_exist(
                                        table_name):
                                    ret = self.CreateTableWithSchema(
                                        configuration.cursor,
                                        table_name=table_name,
                                        schema=content_schema)
                                    if not ret:
                                        logger.error(
                                            'cannot create database table name: {0!s}'
                                            .format(table_name))
                                        return False

                                for record in parser.GetContentRecords:
                                    tmp_record = list(record)
                                    if isinstance(tmp_record[17], bytes):
                                        tmp_record[17] = "Unknown"
                                    else:
                                        tmp_record[17] = tmp_record[
                                            17].replace('"', '""').replace(
                                                '\'', '\'\'')
                                    result = info + tuple(tmp_record) + tuple(
                                        [edb_path])
                                    query = self.InsertQueryBuilder(
                                        table_name=table_name,
                                        schema=content_schema,
                                        data=result)
                                    try:
                                        configuration.cursor.execute_query(
                                            query)
                                    except Exception as exception:
                                        logger.error(
                                            'database execution failed: {0!s}'.
                                            format(exception))

                            # Internet Explorer Cookies
                            if len(parser.GetCookiesRecords) > 0:
                                cookie_schema = ['par_id', 'case_id', 'evd_id'] + list(parser.GetCookiesSchema) \
                                                + ['source']
                                table_name = 'lv1_os_win_esedb_ie_cookies'
                                if not configuration.cursor.check_table_exist(
                                        table_name):
                                    ret = self.CreateTableWithSchema(
                                        configuration.cursor,
                                        table_name=table_name,
                                        schema=cookie_schema)
                                    if not ret:
                                        logger.error(
                                            'cannot create database table name: {0!s}'
                                            .format(table_name))
                                        return False

                                for record in parser.GetCookiesRecords:
                                    tmp_record = list(record)
                                    result = info + tuple(tmp_record) + tuple(
                                        [edb_path])
                                    query = self.InsertQueryBuilder(
                                        table_name=table_name,
                                        schema=cookie_schema,
                                        data=result)
                                    try:
                                        configuration.cursor.execute_query(
                                            query)
                                    except Exception as exception:
                                        logger.error(
                                            'database execution failed: {0!s}'.
                                            format(exception))

                            if len(parser.GetDownloadRecords) > 0:
                                download_schema = ['par_id', 'case_id', 'evd_id'] + list(parser.GetDownloadSchema) \
                                                  + ['source']
                                table_name = 'lv1_os_win_esedb_ie_download'
                                if not configuration.cursor.check_table_exist(
                                        table_name):
                                    ret = self.CreateTableWithSchema(
                                        configuration.cursor,
                                        table_name=table_name,
                                        schema=download_schema)
                                    if not ret:
                                        logger.error(
                                            'cannot create database table name: {0!s}'
                                            .format(table_name))
                                        return False

                                for record in parser.GetDownloadRecords:
                                    tmp_record = list(record)
                                    tmp_record[17] = tmp_record[17].replace(
                                        '"', '""')
                                    tmp_record[17] = tmp_record[17].replace(
                                        '\'', '\'\'')
                                    result = info + tuple(tmp_record) + tuple(
                                        [edb_path])
                                    query = self.InsertQueryBuilder(
                                        table_name=table_name,
                                        schema=download_schema,
                                        data=result)
                                    try:
                                        configuration.cursor.execute_query(
                                            query)
                                    except Exception as exception:
                                        logger.error(
                                            'database execution failed: {0!s}'.
                                            format(exception))

                    except errors.UnableToParseFile as exception:
                        logger.debug(
                            '[{0:s}] unable to parse file with error: {1!s}'.
                            format(self.NAME, exception))

            finally:
                esedb_file.close()
                file_object.close()
    def Connect(self, par_id, configuration, source_path_spec, knowledge_base):
        """Connector to connect to Android User Apps modules.

        Args:
            par_id: partition id.
            configuration: configuration values.
            source_path_spec (dfvfs.PathSpec): path specification of the source file.
            knowledge_base (KnowledgeBase): knowledge base.

        """

        # Check Filesystem
        query = f"SELECT filesystem FROM partition_info WHERE par_id like '{par_id}'"
        filesystem = configuration.cursor.execute_query(query)

        if filesystem == None or filesystem[0] != "TSK_FS_TYPE_EXT4":
            print("No EXT filesystem.")
            return False

        this_file_path = os.path.dirname(os.path.abspath(__file__)) + os.sep + 'schema' + os.sep + 'android' + os.sep

        ### Create LV1 Table ###
        # 모든 yaml 파일 리스트
        yaml_list = [this_file_path + 'lv1_os_and_geodata.yaml']
        # 모든 테이블 리스트
        table_list = ['lv1_os_and_geodata']

        if not self.check_table_from_yaml(configuration, yaml_list, table_list):
            return False

        ### Load Application List ###
        if not self.LoadSchemaFromYaml(this_file_path + 'lv1_os_and_user_apps.yaml'):
            logger.error('cannot load schema from yaml: {0:s}'.format(self.NAME))
            return False

        # Search artifact paths
        paths = self._schema['Paths']
        separator = self._schema['Path_Separator']

        find_specs = self.BuildFindSpecs(paths, separator)
        if len(find_specs) < 1:
            return False

        output_path = configuration.root_tmp_path + os.sep + configuration.case_id + os.sep \
                      + configuration.evidence_id + os.sep + par_id + os.sep + 'AU2A_Raw_Files'

        if not os.path.exists(output_path):
            os.mkdir(output_path)

        for spec in find_specs:
            self.ExtractTargetDirToPath(source_path_spec=source_path_spec,
                                        configuration=configuration,
                                        file_spec=spec,
                                        output_path=output_path)

        results = android_user_apps.main(output_path)

        insert_geodata = list()
        for result in results:
            if result['title'] == "geodata":
                for data in result['data']:
                    if (data[2] == float(0) and data[3] == float(0)) or (
                            data[2] is None and data[3] is None):  # check longitude, latitude
                        continue

                    if data[1] is None:  # check timestamp
                        continue

                    if len(str(data[1])) == 10:  # Unixtime_seconds
                        time = str(datetime(1970, 1, 1) + timedelta(seconds=float(data[1]))).replace(' ', 'T') + 'Z'
                    elif len(str(data[1])) == 13:  # Unixtime_milliseconds
                        time = str(datetime(1970, 1, 1) + timedelta(milliseconds=float(data[1]))).replace(' ',
                                                                                                          'T') + 'Z'

                    insert_geodata.append(tuple([par_id, configuration.case_id, configuration.evidence_id,
                                                 data[0], time, data[2], data[3], data[4], data[5], data[6]]))

        query = "Insert into lv1_os_and_geodata values (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s);"
        configuration.cursor.bulk_execute(query, insert_geodata)
Beispiel #32
0
    def ExtractTargetFileToPath(self,
                                source_path_spec,
                                configuration,
                                file_path=None,
                                file_spec=None,
                                output_path=None,
                                data_stream_name=None):
        # TODO: find_spec 있을 경우 처리 해야함. Load참조

        try:
            if not file_spec:
                find_spec = file_system_searcher.FindSpec(
                    case_sensitive=False,
                    location=file_path,
                    location_separator=source_path_spec.location)
            else:
                find_spec = file_spec
        except ValueError as exception:
            logger.error(
                'Unable to build find specification for path: "{0:s}" with '
                'error: {1!s}'.format(file_path, exception))

        path_spec_generator = self._path_spec_extractor.ExtractPathSpecs(
            [source_path_spec],
            find_specs=[find_spec],
            recurse_file_system=False,
            resolver_context=configuration.resolver_context)

        for path_spec in path_spec_generator:
            display_name = path_helper.PathHelper.GetDisplayNameForPathSpec(
                path_spec)
            try:
                file_entry = path_spec_resolver.Resolver.OpenFileEntry(
                    path_spec, resolver_context=configuration.resolver_context)

                if file_entry is None or not file_entry.IsFile():
                    logger.warning(
                        'Unable to open file entry with path spec: {0:s}'.
                        format(display_name))
                    return False

                if data_stream_name:
                    file_object = file_entry.GetFileObject(
                        data_stream_name=data_stream_name)

                    if not file_object:
                        return False

                    try:
                        buffer_size = 65536
                        file = open(
                            output_path + os.sep + file_entry.name + '_' +
                            data_stream_name, 'wb')
                        file_object.seek(0, os.SEEK_SET)
                        data = file_object.read(buffer_size)
                        while data:
                            file.write(data)
                            data = file_object.read(buffer_size)
                        file.close()

                    except IOError as exception:
                        # TODO: replace location by display name.
                        location = getattr(file_entry.path_spec, 'location',
                                           '')
                        logger.error(
                            'Failed to extract file "{0:s}" : {1!s}'.format(
                                data_stream_name, exception))
                        return False

                    finally:
                        file_object.close()

                elif not data_stream_name:
                    file_object = file_entry.GetFileObject()

                    if not file_object:
                        return False

                    try:
                        buffer_size = 65536
                        file = open(output_path + os.sep + file_entry.name,
                                    'wb')
                        file_object.seek(0, os.SEEK_SET)
                        data = file_object.read(buffer_size)
                        while data:
                            file.write(data)
                            data = file_object.read(buffer_size)
                        file.close()
                    except IOError as exception:
                        logger.error(
                            'Failed to extract file "{0:s}" : {1!s}'.format(
                                display_name, exception))
                    finally:
                        file_object.close()

            except KeyboardInterrupt:
                return False
Beispiel #33
0
def main():

    signal.signal(signal.SIGINT, signal_handle)
    signal.signal(signal.SIGTERM, signal_handle)

    parser = argparse.ArgumentParser()
    parser.add_argument("name")
    parser.add_argument("--verbose", action="store_true")
    parser.add_argument("--mqtt-host")
    parser.add_argument("--mqtt-port")
    parser.add_argument("--mqtt-user")
    parser.add_argument("--mqtt-pass")
    parser.add_argument("--mqtt-topic-req")
    parser.add_argument("--mqtt-topic-res")

    args = parser.parse_args()
    name = args.name

    load_dotenv()

    log_level = os.getenv("LOG_LEVEL", "info").lower()
    setLevel(args.verbose or log_level == 'debug')

    mqtt_host = args.mqtt_host or os.getenv("MQTT_HOST")
    mqtt_port = args.mqtt_port or os.getenv("MQTT_PORT")
    mqtt_user = args.mqtt_user or os.getenv("MQTT_USER")
    mqtt_pass = args.mqtt_pass or os.getenv("MQTT_PASS")

    mqtt_topic_req = args.mqtt_topic_req or os.getenv('MQTT_TOPIC_REQ')
    mqtt_topic_res = args.mqtt_topic_res or os.getenv('MQTT_TOPIC_RES')

    topic_req = f"{mqtt_topic_req}/{name}"
    topic_res = f"{mqtt_topic_res}/{name}"

    logger.debug("Starting MQTT")

    nextConnectionAt = datetime.now()
    connected = False

    HOME = os.getenv("HOME")

    pattern = re.compile(r'^Modify: (.*)\n')

    while True:

        now = datetime.now()

        if not connected and now > nextConnectionAt:
            try:

                @subscribe(
                    topic_req, {
                        "host": mqtt_host,
                        "port": int(mqtt_port),
                        "user": mqtt_user,
                        "pass": mqtt_pass
                    })
                def message_handle(payload, emit):

                    try:
                        if 'id' not in payload:
                            raise Exception("request id is not present")

                        if 'command' not in payload:
                            raise Exception("command is not present")

                        command = payload['command']

                        if command == 'status':
                            settings = read_config()
                            logger.info("settings: [%s]", settings)

                            found = glob.glob(
                                f"{HOME}/.pm2/pids/hackrf-control-*")
                            status = 'stopped'
                            uptime = None

                            if found:
                                status = 'online'

                                with open(found[0]) as fd:
                                    pid = fd.read()

                                out = subprocess.check_output(
                                    f"stat /proc/{pid} | grep Modify",
                                    shell=True,
                                    encoding="utf-8")
                                res = pattern.findall(out)

                                uptime = res[0] if res else None

                            emit(
                                topic_res, {
                                    'id': payload['id'],
                                    'settings': settings,
                                    'process': {
                                        'status': status,
                                        'uptime': uptime
                                    }
                                })

                        elif command == 'logs':

                            lines = payload.get('lines', 10)
                            out = subprocess.check_output(
                                f"tail {HOME}/.pm2/logs/hackrf-control-error.log -n {lines}",
                                shell=True,
                                encoding="utf-8")

                            data = []

                            for x in out.split('\n'):

                                created_at = x[0:23]

                                pos = x.find(" ", 24)
                                level = x[23:pos]

                                pos = x.find(" ", pos + 1)
                                content = x[pos:]

                                data.append({
                                    'created_at': created_at,
                                    'level': level,
                                    'content': content
                                })

                            emit(topic_res, {
                                'id': payload['id'],
                                'data': data
                            })

                        elif command == 'config':

                            if 'settings' not in payload:
                                raise Exception("settings is not present")

                            settings = payload['settings']
                            settings['_waveform'] = 'waveform' in settings

                            logger.info("settings: [%s]", settings)
                            save_config(settings)

                            emit(topic_res, {'id': payload['id']})

                        else:
                            emit(topic_res, {'id': payload['id']})

                    except Exception as ex:
                        logger.warning("%s", payload)
                        logger.error(ex)

                        emit(topic_res, {'id': payload['id'], 'error': ex})

                logger.info("mqtt connected")
                connected = True

            except Exception as ex:
                logger.error(ex)

                connected = False
                nextConnectionAt = now + timedelta(seconds=10)

                logger.debug("Reconnecting mqtt at 10 seconds")

        time.sleep(0.1)
Beispiel #34
0
    def Connect(self, par_id, configuration, source_path_spec, knowledge_base):
        """Connector to connect to ESE database modules.

		Args:
			par_id: partition id.
			configuration: configuration values.
			source_path_spec (dfvfs.PathSpec): path specification of the source file.
			knowledge_base (KnowledgeBase): knowledge base.

		"""

        this_file_path = os.path.dirname(
            os.path.abspath(__file__)) + os.sep + 'schema' + os.sep

        # 모든 yaml 파일 리스트
        yaml_list = [
            this_file_path + 'lv1_os_win_searchdb_gthr.yaml',
            this_file_path + 'lv1_os_win_searchdb_gthrpth.yaml'
        ]

        # 모든 테이블 리스트
        table_list = [
            'lv1_os_win_searchdb_gthr', 'lv1_os_win_searchdb_gthrpth'
        ]

        if not self.check_table_from_yaml(configuration, yaml_list,
                                          table_list):
            return False

        query_separator = self.GetQuerySeparator(source_path_spec,
                                                 configuration)
        # extension -> sig_type 변경해야 함
        query = f"SELECT name, parent_path, extension, ctime, ctime_nano FROM file_info WHERE par_id='{par_id}' and " \
                f"parent_path = 'root{query_separator}ProgramData{query_separator}Microsoft{query_separator}" \
                f"Search{query_separator}Data{query_separator}Applications{query_separator}Windows' and name = 'Windows.edb';"

        searchdb_file = configuration.cursor.execute_query_mul(query)

        if len(searchdb_file) == 0:
            print("There are no searchdb files")
            return False

        # Search artifact paths
        path = f'{query_separator}ProgramData{query_separator}Microsoft{query_separator}Search' \
               f'{query_separator}Data{query_separator}Applications{query_separator}Windows{query_separator}Windows.edb'
        file_object = self.LoadTargetFileToMemory(
            source_path_spec=source_path_spec,
            configuration=configuration,
            file_path=path)
        try:
            results = searchdb_parser.main(database=file_object)
        except Exception as e:
            logger.error(str(e))
            print(str(e))
            return False
        if results is None:
            return False
        file_object.close()
        insert_searchdb_gthr = []
        insert_searchdb_gthrpth = []

        try:
            for idx, result in enumerate(results['SystemIndex_Gthr']):
                if idx == 0:
                    continue
                timestamp = struct.unpack('>Q', result[3])[0]  # last_modified
                try:
                    time = str(
                        datetime.utcfromtimestamp(timestamp / 10000000 -
                                                  11644473600)).replace(
                                                      ' ', 'T') + 'Z'
                    time = configuration.apply_time_zone(
                        time, knowledge_base.time_zone)
                except Exception:
                    time = None
                insert_searchdb_gthr.append(
                    tuple([
                        par_id,
                        configuration.case_id,
                        configuration.evidence_id,
                        str(result[0]),
                        str(result[1]),
                        str(result[2]),
                        time,
                        str(result[4]),
                        str(result[5]),
                        str(result[6]),
                        str(result[7]),
                        str(result[8]),
                        str(result[9]),
                        str(None),
                        str(result[11]),
                        str(result[12]),  # user_data blob 임시 처리
                        str(result[13]),
                        str(result[14]),
                        str(result[15]),
                        str(result[16]),
                        str(result[17]),
                        str(result[18]),
                        str(result[19])
                    ]))
        except Exception as e:
            print(e)

        for idx, result in enumerate(results['SystemIndex_GthrPth']):
            if idx == 0:
                continue
            insert_searchdb_gthrpth.append(
                tuple([
                    par_id, configuration.case_id, configuration.evidence_id,
                    str(result[0]),
                    str(result[1]),
                    str(result[2]),
                    '/ProgramData/Microsoft/Search/Data/Applications/Windows/Windows.edb'
                ]))

        query = "Insert into lv1_os_win_searchdb_gthr values (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, " \
                "%s, %s, %s, %s, %s, %s, %s, %s, %s, %s);"
        configuration.cursor.bulk_execute(query, insert_searchdb_gthr)

        query = "Insert into lv1_os_win_searchdb_gthrpth values (%s, %s, %s, %s, %s, %s, %s);"
        configuration.cursor.bulk_execute(query, insert_searchdb_gthrpth)
Beispiel #35
0
                def message_handle(payload, emit):

                    try:
                        if 'id' not in payload:
                            raise Exception("request id is not present")

                        if 'command' not in payload:
                            raise Exception("command is not present")

                        command = payload['command']

                        if command == 'status':
                            settings = read_config()
                            logger.info("settings: [%s]", settings)

                            found = glob.glob(
                                f"{HOME}/.pm2/pids/hackrf-control-*")
                            status = 'stopped'
                            uptime = None

                            if found:
                                status = 'online'

                                with open(found[0]) as fd:
                                    pid = fd.read()

                                out = subprocess.check_output(
                                    f"stat /proc/{pid} | grep Modify",
                                    shell=True,
                                    encoding="utf-8")
                                res = pattern.findall(out)

                                uptime = res[0] if res else None

                            emit(
                                topic_res, {
                                    'id': payload['id'],
                                    'settings': settings,
                                    'process': {
                                        'status': status,
                                        'uptime': uptime
                                    }
                                })

                        elif command == 'logs':

                            lines = payload.get('lines', 10)
                            out = subprocess.check_output(
                                f"tail {HOME}/.pm2/logs/hackrf-control-error.log -n {lines}",
                                shell=True,
                                encoding="utf-8")

                            data = []

                            for x in out.split('\n'):

                                created_at = x[0:23]

                                pos = x.find(" ", 24)
                                level = x[23:pos]

                                pos = x.find(" ", pos + 1)
                                content = x[pos:]

                                data.append({
                                    'created_at': created_at,
                                    'level': level,
                                    'content': content
                                })

                            emit(topic_res, {
                                'id': payload['id'],
                                'data': data
                            })

                        elif command == 'config':

                            if 'settings' not in payload:
                                raise Exception("settings is not present")

                            settings = payload['settings']
                            settings['_waveform'] = 'waveform' in settings

                            logger.info("settings: [%s]", settings)
                            save_config(settings)

                            emit(topic_res, {'id': payload['id']})

                        else:
                            emit(topic_res, {'id': payload['id']})

                    except Exception as ex:
                        logger.warning("%s", payload)
                        logger.error(ex)

                        emit(topic_res, {'id': payload['id'], 'error': ex})
Beispiel #36
0
    def create_host_directories_and_tar(self):
        """Main packaging function

        Works in 3 parts:
          1. Validate app data and configurations
          2. Create tmp directories for each host with loaded apps and manifest
          3. Package (tar) up host tmp directories for distribution
        """

        Helpers.delete_path(self.tmp_folder)
        Helpers.create_path(self.tars_folder, True)

        self.repo_manager.set_commit_id()
        master_commit_log = self.repo_manager.get_commit_log()

        errors_found = False
        changes_found = False

        for host in self.appetite_hosts:  # pylint: disable=too-many-nested-blocks
            # Per host build apps folder and tar up based on class
            hostname = host.hostname
            apps = host.get_apps(self.args.refname)
            tarname = host.tarname

            apps = sorted(apps, key=lambda app: app.commit_id)

            tmp_hostname_dir = os.path.join(self.hosts_folder, hostname)
            tmp_hostname_meta = os.path.join(tmp_hostname_dir, Consts.META_DIR)

            apps_meta = []

            if len(apps) < 1:
                Logger.warn("Host with no apps", hostname=hostname)
                continue
            # Parse the remote meta file from the host
            # This file might not exist
            remote_meta_file = host.local_meta_file
            remote_metas_loaded = False
            if os.path.exists(remote_meta_file):
                try:
                    with open(remote_meta_file) as remote_data_file:
                        remote_metas_master = json.load(remote_data_file)
                        remote_metas_content = remote_metas_master['content'] \
                            if 'content' in remote_metas_master else remote_metas_master
                        remote_metas = [
                            AppetiteHost.create_app_from_object(
                                self.repo_manager, self.deployment_manager,
                                meta_data)
                            for meta_data in remote_metas_content
                        ]

                        remote_metas_loaded = True
                except Exception as exception:
                    Logger.error("Problems loading meta file",
                                 error=exception.message,
                                 path=remote_meta_file)
            elif not self.args.dryrun:
                Logger.warn("Local version of remote meta not found",
                            file=remote_meta_file)

            ordered_unique_apps = sorted(list(set(apps)),
                                         key=lambda single_app:
                                         (single_app.name, single_app.
                                          commit_id, single_app.method_name))

            for iapp in ordered_unique_apps:
                app_occurrences = apps.count(iapp)
                if app_occurrences > 1:
                    Logger.warn("Dup app found",
                                host=host.hostname,
                                app_info=iapp.app_key,
                                occurences=app_occurrences)

            # Validate app data and configurations

            # Go through the apps and checks to see if there are any errors
            # This is where the remote meta is compared to the newly generated
            # lists of apps from the manifest
            for app in apps:
                raw_app_path = os.path.join(self.apps_folder, app.name)

                # Check the commit Id for problems
                if app.commit_id:
                    self.repo_manager.set_commit_id(app.commit_id)
                else:  # pylint: disable=else-if-used
                    if self.args.strict_commitids:
                        Logger.error("Application with missing commit Id",
                                     hostname=hostname,
                                     app=app.name)
                        errors_found = True
                        continue
                    else:
                        app._commit_id = master_commit_log['app_commit_id']  # pylint: disable=protected-access
                        self.repo_manager.set_commit_id(app.commit_id)

                # Checks if app listed in the manifest
                # exists with the correct commit id
                if Helpers.check_path(raw_app_path):
                    meta_to_append = None
                    app.refresh_version_info(self.args.refname,
                                             Consts.META_APP_UNCHANGED)
                    remote_meta = None

                    # Check to see what has changed
                    if remote_metas_loaded:
                        # Searches remote meta to see if application already exists
                        remote_meta = next((rmeta for rmeta in remote_metas
                                            if app.check_names(rmeta)), None)

                        if remote_meta:
                            # If app does exist on system, have the commit ids changed
                            if remote_meta.commit_id != app.commit_id:
                                meta_to_append = app.set_status_changed()
                            else:
                                # meta has not changed so use existing meta
                                meta_to_append = app.clone
                                meta_to_append.update_app_version(app)

                            # to track if an app is removed from the remote meta
                            remote_metas.remove(remote_meta)

                    if not meta_to_append:
                        # There is no remote meta so all files should be added
                        meta_to_append = app.set_status_added()

                    if remote_meta and meta_to_append:
                        meta_outcome = Helpers.debug_app_versions(
                            meta_to_append, remote_meta, meta_to_append.status)
                        Logger.debug("Check meta logic", outcome=meta_outcome)

                        if meta_to_append.has_changed:
                            Logger.info("App change", logic=meta_outcome)

                    apps_meta.append(meta_to_append)
                else:
                    Logger.error("Missing application",
                                 hostname=hostname,
                                 app=app.name,
                                 path=raw_app_path)
                    continue

            if remote_metas_loaded and len(remote_metas) > 0:
                # Any apps left in the remote meta do not exist in the current
                # manifest and should be deleted
                delete_list = []
                for deleted_app in remote_metas:
                    if deleted_app.method_info:
                        deleted_app.set_status_deleted()
                        # Added logic check to catch method changes
                        added_app_found = next((
                            app for app in apps_meta
                            if app.status == Consts.META_APP_ADDED and app.name
                            == deleted_app.name and app.method_info['path'] ==
                            deleted_app.method_info['path']), None)
                        if added_app_found:
                            added_app_found.set_status_changed()
                        else:
                            delete_list.append(deleted_app)
                    else:
                        Logger.error(
                            "Problems with method info for deleted app.",
                            hostname=hostname,
                            app=deleted_app.name)

                apps_meta += delete_list

            # Only do something if there has been a change
            if len([app for app in apps_meta if not app.is_unchanged]) < 1:
                continue

            # No point continuing if there is no connection to the host
            if not self.check_host_connection(host):
                continue

            # Clean command lines for auth params
            # This data is ingested so creds should be removed
            # apps_meta = [updated_app.clone for updated_app in apps_meta]

            if not self.args.disable_logging:
                for updated_app in apps_meta:
                    Logger.log_event(updated_app.to_dict)

            # Applications that actually needs to be updated
            tar_apps = sorted([
                updated_app for updated_app in apps_meta if updated_app.updated
            ],
                              key=lambda tar_app: tar_app.app)

            use_templating = self.template_values and self.args.templating

            # Checking will allow templating otherwise will skip steps
            Helpers.create_path(
                os.path.join(tmp_hostname_meta, Consts.HOST_LOGS_FOLDER_NAME),
                True)
            if len(tar_apps) > 0:
                # All error checks have been done above, build out
                # the hosts directory and tar up
                for updated_app in tar_apps:
                    app_path = os.path.join(tmp_hostname_dir,
                                            updated_app.method_info['path'])
                    Helpers.create_path(app_path, True)
                    raw_app_path = os.path.join(self.apps_folder,
                                                updated_app.name)

                    self.repo_manager.set_commit_id(updated_app.commit_id)

                    if updated_app.update_method_is_copy:
                        app_dest = os.path.join(app_path,
                                                updated_app.app_clean)
                    else:
                        app_dest = app_path

                    copy_tree(raw_app_path, app_dest)

                    lookups_inclusion_location = os.path.join(
                        app_dest, self.deployment_manager.inclusion_filename)

                    ignore_dir = os.path.join(app_dest, Consts.TMP_IGNORE_DIR)

                    # Ignore files/folders set in the global configurations
                    if self.args.install_ignore:
                        content_ignored_results = Helpers.move_regexed_files(
                            self.args.install_ignore.split(';'), app_dest,
                            ignore_dir)
                        files_included = content_ignored_results['files_moved']

                        if len(files_included) > 0:
                            Logger.error(
                                "Globally these files should not exist in the App. "
                                "The files have been removed from the install.",
                                files=files_included,
                                hostname=hostname,
                                app=updated_app.name)

                        # Users should not have the capability to include files from the
                        # global ignore.
                        Helpers.delete_path(ignore_dir)

                    # Defined folders/files are to move out of application.
                    # This is defined in the deploymentmethods.conf
                    # If an app is installed for the first time, all files should be included
                    if 'install_ignore' in updated_app.method_info and not updated_app.is_added:
                        Helpers.move_regexed_files(
                            updated_app.method_info['install_ignore'],
                            app_dest, ignore_dir)

                        # If there is a inclusion file, include files back into app.
                        # This is defined on a per app basis
                        if os.path.isfile(lookups_inclusion_location):
                            with open(lookups_inclusion_location, "r") as f:
                                lines = [l.strip() for l in f.readlines()]

                            lookup_inclusion_results = Helpers.move_regexed_files(
                                lines, ignore_dir, app_dest)

                            if lookup_inclusion_results['errors_found']:
                                Logger.warn(
                                    "Lookup inclusion error found",
                                    paths=lookup_inclusion_results[
                                        'path_errors'],
                                    hostname=hostname,
                                    app=updated_app.name,
                                    todo="Remove file/path from inclusion..")
                                # Problem with host inclusion,
                                # move to next host
                                continue

                            updated_app.method_info['inclusions'] = \
                                lookup_inclusion_results['files_moved']

                            # Update objects with inclusions
                            updated_app.copy_value_to_method_info(
                                'inclusions', apps_meta)
                            os.remove(lookups_inclusion_location)

                    Helpers.delete_path(ignore_dir)

                    if use_templating and not updated_app.method_info[
                            'skip_templating']:
                        # Can template based on vars from templated
                        # values, hosts vars and app vars
                        Helpers.template_directory(app_dest, [
                            self.template_values, host.to_dict,
                            updated_app.to_dict
                        ])

                    # Should only change access and create version file if a whole app is copied
                    if updated_app.update_method_is_copy:
                        for host_path, host_dir, host_files in os.walk(
                                app_dest):  # pylint: disable=unused-variable
                            for host_file in host_files:
                                # Splunk apps can have active binaries in multiple languages
                                # This is a catch all to make sure apps have all the required
                                # permissions.
                                chmod = 0755
                                os.chmod(os.path.join(host_path, host_file),
                                         chmod)

                        if not updated_app.method_info['no_appetite_changes']:
                            with open(
                                    os.path.join(
                                        app_dest,
                                        Helpers.get_app_version_filename()),
                                    "w") as f:
                                f.write(updated_app.to_json)

                            AppVersioning.create_app_version(
                                app_dest,
                                updated_app.commit_log['app_abbrev_commit_id'])
            apps_distro = Helpers.content_wrapper(apps_meta,
                                                  Consts.META_CURRENT,
                                                  hostname, self.track)

            # Meta file used as source of truth on instance
            master_meta = self.create_meta_files(tmp_hostname_meta, '',
                                                 apps_distro)
            # check be used to update and test manifest changes locally
            if self.args.dryrun:
                Helpers.create_path(host.local_meta_file)
                shutil.copy(master_meta, host.local_meta_file)

            # Always want clean logs ingested
            selected_apps = Helpers.select_and_update_apps(
                apps_meta, Consts.META_CURRENT, False)

            self.create_meta_log(tmp_hostname_meta, '', selected_apps,
                                 Helpers.get_utc())

            host.updates = Helpers.content_process(apps_meta,
                                                   Consts.META_UPDATED,
                                                   hostname, self.track, True)

            # Create the meta change file
            self.create_meta_files(tmp_hostname_meta, '_update',
                                   Helpers.content_convert(host.updates))

            # Clean updates file for logging
            selected_apps = Helpers.select_and_update_apps(
                apps_meta, Consts.META_UPDATED, True)

            self.create_meta_log(tmp_hostname_meta, '_update', selected_apps,
                                 Helpers.get_utc())

            Logger.info("Changes found",
                        updates=Helpers.content_wrapper(
                            apps_meta, Consts.META_UPDATED, hostname,
                            self.track, True))

            # Package (tar) up host tmp directories for distribution
            tar = tarfile.open(
                os.path.join(self.tars_folder, "%s.tar.gz" % tarname), "w:gz")
            tar.add(tmp_hostname_dir, arcname=os.path.basename(self.base_name))
            tar.close()

            changes_found = True

        if errors_found:
            sys.exit(1)

        self.repo_manager.set_commit_id()

        return changes_found
    def Connect(self, par_id, configuration, source_path_spec, knowledge_base):

        # Load schema
        yaml_path = os.path.dirname(
            os.path.abspath(__file__)
        ) + os.sep + 'schema' + os.sep + 'lv1_app_google_drive_path.yaml'
        if not self.LoadSchemaFromYaml(yaml_path):
            logger.error('cannot load schema from yaml: {0:s}'.format(
                self.NAME))
            return False

        # Search artifact paths
        paths = self._schema['Paths']
        separator = self._schema['Path_Separator']
        environment_variables = knowledge_base.GetEnvironmentVariables()

        find_specs = self.BuildFindSpecs(paths, separator,
                                         environment_variables)
        if len(find_specs) < 0:
            return False

        output_path = configuration.root_tmp_path + os.sep + configuration.case_id + os.sep + \
                      configuration.evidence_id + os.sep + par_id + os.sep + 'DB'
        tmp = configuration.root_tmp_path + os.sep + configuration.case_id + os.sep + \
                      configuration.evidence_id + os.sep + par_id
        if not os.path.exists(tmp):
            os.mkdir(tmp)
            if not os.path.exists(output_path):
                os.mkdir(output_path)

        google_drive_fs_path = ''
        for spec in find_specs:
            self.ExtractTargetDirToPath(source_path_spec=source_path_spec,
                                        configuration=configuration,
                                        file_spec=spec,
                                        output_path=output_path)
            path_spec_generator = self._path_spec_extractor.ExtractPathSpecs(
                [source_path_spec],
                find_specs=[spec],
                recurse_file_system=False,
                resolver_context=configuration.resolver_context)

            for path_spec in path_spec_generator:
                google_drive_fs_path = path_spec.location

        if not os.path.exists(output_path):
            print("There are no google drive files")
            return False

        this_file_path = os.path.dirname(
            os.path.abspath(__file__)) + os.sep + 'schema' + os.sep
        yaml_list = [this_file_path + 'lv1_app_google_drive_fschange.yaml']
        table_list = ['lv1_app_google_drive_fschange']

        if not self.check_table_from_yaml(configuration, yaml_list,
                                          table_list):
            return False

        try:
            f_data = []
            file_list = os.listdir(output_path)
            info = [par_id, configuration.case_id, configuration.evidence_id]

            for db in file_list:
                fs_data = gs.fschange_parse(output_path + os.sep, db)
                for d in fs_data:
                    f_data.append(info + d + [google_drive_fs_path])
        except:
            return False

        if f_data is None:
            return False

        query = f"INSERT INTO lv1_app_google_drive_fschange values (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s)"
        configuration.cursor.bulk_execute(query, f_data)
        shutil.rmtree(output_path)
Beispiel #38
0
    def Connect(self, par_id, configuration, source_path_spec, knowledge_base):
        this_file_path = (os.path.dirname(os.path.abspath(__file__)) + os.sep +
                          "schema" + os.sep + "evernote")
        # 모든 yaml 파일 리스트
        yamls = [
            this_file_path + os.sep + "lv1_app_evernote_accounts.yaml",
            this_file_path + os.sep + "lv1_app_evernote_notes.yaml",
            this_file_path + os.sep + "lv1_app_evernote_workchats.yaml",
        ]
        # 모든 테이블 리스트
        tables = [
            "lv1_app_evernote_accounts",
            "lv1_app_evernote_notes",
            "lv1_app_evernote_workchats",
        ]
        # 모든 테이블 생성
        if not self.check_table_from_yaml(configuration, yamls, tables):
            return False

        # extension -> sig_type 변경해야 함
        query = f"""
            SELECT name, parent_path, extension 
            FROM file_info 
            WHERE par_id = '{par_id}' 
            AND extension = 'exb'
            AND parent_path like '%Evernote%' > 0; 
        """
        evernote_db_query_results: List = configuration.cursor.execute_query_mul(
            query)

        if evernote_db_query_results == -1 or len(
                evernote_db_query_results) == 0:
            logger.error('db execution failed.')
            print("There are no evernote files")
            return False

        query_separator = self.GetQuerySeparator(source_path_spec,
                                                 configuration)
        path_separator = self.GetPathSeparator(source_path_spec)

        for file_name, parent_path, _ in evernote_db_query_results:
            file_path = (parent_path[parent_path.find(path_separator):] +
                         path_separator + file_name)

            output_path = (configuration.root_tmp_path + os.sep +
                           configuration.case_id + os.sep +
                           configuration.evidence_id + os.sep + par_id)

            if not os.path.exists(output_path):
                os.mkdir(output_path)
            self.ExtractTargetFileToPath(
                source_path_spec=source_path_spec,
                configuration=configuration,
                file_path=file_path,
                output_path=output_path,
            )

            parse_result = evernote_parser.main(output_path + os.sep +
                                                file_name)
            account = parse_result["user"]
            query = "INSERT INTO lv1_app_evernote_accounts values (%s, %s, %s, %s, %s, %s, %s)"

            account_tuple = tuple([
                par_id,
                configuration.case_id,
                configuration.evidence_id,
            ] + list(account.values()))
            configuration.cursor.execute_query(query, account_tuple)
            notes = parse_result["notes"]
            query = "INSERT INTO lv1_app_evernote_notes values (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)"

            for note in notes:
                note_tuple = tuple([
                    par_id,
                    configuration.case_id,
                    configuration.evidence_id,
                ] + list(note.values()))

                configuration.cursor.execute_query(query, note_tuple)

            workchats = parse_result["workchats"]
            query = "INSERT INTO lv1_app_evernote_workchats values (%s, %s, %s, %s, %s, %s, %s, %s, %s)"
            for workchat in workchats:
                workchat_tuple = tuple(
                    [
                        par_id,
                        configuration.case_id,
                        configuration.evidence_id,
                    ]
                    # json.dumps for stringify array
                    + [
                        json.dumps(column) if index > 3 else column
                        for index, column in enumerate(workchat.values())
                    ])

                configuration.cursor.execute_query(query, workchat_tuple)

            os.remove(output_path + os.sep + file_name)
    def Connect(self, configuration, source_path_spec, knowledge_base):
        """Connector to connect to Android User Apps modules.

        Args:
            configuration: configuration values.
            source_path_spec (dfvfs.PathSpec): path specification of the source file.
            knowledge_base (KnowledgeBase): knowledge base.

        """
        if source_path_spec.parent.type_indicator != dfvfs_definitions.TYPE_INDICATOR_TSK_PARTITION:
            par_id = configuration.partition_list['p1']
        else:
            par_id = configuration.partition_list[getattr(
                source_path_spec.parent, 'location', None)[1:]]

        if par_id == None:
            return False

        print(
            '[MODULE]: Android User Apps Analyzer Start! - partition ID(%s)' %
            par_id)

        this_file_path = os.path.dirname(os.path.abspath(
            __file__)) + os.sep + 'schema' + os.sep + 'android' + os.sep
        ### Create LV1 Table ###
        # 모든 yaml 파일 리스트
        yaml_list = [this_file_path + 'lv1_os_and_geodata.yaml']
        # 모든 테이블 리스트
        table_list = ['lv1_os_and_geodata.yaml']

        if not self.check_table_from_yaml(configuration, yaml_list,
                                          table_list):
            return False

        ### Load Application List ###
        if not self.LoadSchemaFromYaml(this_file_path +
                                       'lv1_os_and_user_apps.yaml'):
            logger.error('cannot load schema from yaml: {0:s}'.format(
                self.NAME))
            return False

        # Search artifact paths
        paths = self._schema['Paths']
        separator = self._schema['Path_Separator']

        find_specs = self.BuildFindSpecs(paths, separator)
        if len(find_specs) < 1:
            return False

        if not configuration.standalone_check:
            output_path = configuration.root_tmp_path
        else:
            output_path = configuration.output_file_path
        output_path += os.sep + configuration.case_id + os.sep + configuration.evidence_id + os.sep + par_id\
                       + os.sep + 'AU2A_Raw_Files'

        for spec in find_specs:
            self.ExtractTargetDirToPath(source_path_spec=source_path_spec,
                                        configuration=configuration,
                                        file_spec=spec,
                                        output_path=output_path)

        results = android_user_apps.main(output_path)

        insert_geodata = list()
        for result in results:
            if result['title'] == "geodata":
                for data in result['data']:
                    if (data[2] == float(0) and data[3] == float(0)) or (
                            data[2] == None
                            and data[3] == None):  # check longitude, latitude
                        continue

                    if data[1] == None:  # check timestamp
                        continue

                    if len(str(data[1])) == 10:  # Unixtime_seconds
                        time = str(
                            datetime(1970, 1, 1) +
                            timedelta(seconds=float(data[1]))).replace(
                                ' ', 'T') + 'Z'
                    elif len(str(data[1])) == 13:  # Unixtime_milliseconds
                        time = str(
                            datetime(1970, 1, 1) +
                            timedelta(milliseconds=float(data[1]))).replace(
                                ' ', 'T') + 'Z'

                    insert_geodata.append(
                        tuple([
                            par_id, configuration.case_id,
                            configuration.evidence_id, data[0], time, data[2],
                            data[3], data[4], data[5], data[6]
                        ]))

        query = "Insert into lv1_os_and_geodata values (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s);"
        configuration.cursor.bulk_execute(query, insert_geodata)
    def Connect(self, par_id, configuration, source_path_spec, knowledge_base):
        """Connector to connect to Android Basic Apps modules.

        Args:
            par_id: partition id.
            configuration: configuration values.
            source_path_spec (dfvfs.PathSpec): path specification of the source file.
            knowledge_base (KnowledgeBase): knowledge base.

        """

        # Check Filesystem
        query = f"SELECT filesystem FROM partition_info WHERE par_id like '{par_id}'"
        filesystem = configuration.cursor.execute_query(query)

        if filesystem is None or filesystem[0] != "TSK_FS_TYPE_EXT4":
            print("No EXT filesystem.")
            return False

        yaml_list = []
        table_list = []

        # Load Schema
        if not self.LoadSchemaFromYaml(
                '../modules/schema/android/lv1_os_and_basic_apps.yaml'):
            logger.error('cannot load schema from yaml: {0:s}'.format(
                self.NAME))
            return False

        # Search artifact paths
        paths = self._schema['Paths']
        separator = self._schema['Path_Separator']

        find_specs = self.BuildFindSpecs(paths, separator)
        if len(find_specs) < 1:
            return False

        output_path = configuration.root_tmp_path + os.sep + configuration.case_id + os.sep + \
                      configuration.evidence_id + os.sep + par_id + os.sep + 'AB2A_Raw_Files'

        if not os.path.exists(output_path):
            os.mkdir(output_path)

        for spec in find_specs:
            self.ExtractTargetDirToPath(source_path_spec=source_path_spec,
                                        configuration=configuration,
                                        file_spec=spec,
                                        output_path=output_path)

        results = android_basic_apps.main(output_path)

        header = tuple(['par_id', 'case_id', 'evd_id'])
        header_data = tuple(
            [par_id, configuration.case_id, configuration.evidence_id])
        for result in results:
            if result['number_of_data'] > 0:
                table_name = 'lv1_os_and_basic_app_' + result['title']
                schema = header + result['data_header']

                if not configuration.cursor.check_table_exist(table_name):
                    ret = self.CreateTableWithSchema(
                        configuration.cursor, table_name, schema,
                        configuration.standalone_check)
                    if not ret:
                        logger.error(
                            'cannot create database table name: {0:s}'.format(
                                table_name))
                        return False

                header_len = len(header) + result['number_of_data_headers']
                query = f"Insert into {table_name} values ("
                for i in range(0, header_len):
                    if i == header_len - 1:
                        query += "%s);"
                    else:
                        query += "%s, "

                data_list = []
                for data in result['data']:
                    data = header_data + data
                    data_list.append(tuple(data))
                configuration.cursor.bulk_execute(query, data_list)
Beispiel #41
0
def accessibility(bounds, beta, transportation, threshold, demand_col,
                  supply_col, capacity_col):

    demand_col_sql = '"' + demand_col + '"'
    supply_col_sql = '"' + supply_col + '"'
    capacity_col_sql = '"' + capacity_col + '"'

    try:
        xmin = float(bounds['_southWest']['lng'])
        ymin = float(bounds['_southWest']['lat'])
        xmax = float(bounds['_northEast']['lng'])
        ymax = float(bounds['_northEast']['lat'])
    except Exception as e:
        logger.error(f'Data provided is not correct: {e}')

    # store in an array the geouids that are contained within the client's window view (bounding box)
    demand_query = """
        SELECT geouid, ST_AsText(ST_Transform(ST_Simplify(boundary,0.5), 4326)) as boundary, %s
        FROM demand
        WHERE ST_Contains(
            ST_Transform(
                ST_MakeEnvelope(%s, %s, %s, %s, 4326)
                , 3347)
            , demand.centroid)
        ORDER BY geouid;
    """ % (demand_col_sql, xmin, ymin, xmax, ymax)

    with db.DbConnect() as db_conn:
        db_conn.cur.execute(demand_query)
        demand = pd.DataFrame(
            db_conn.cur.fetchall(),
            columns=[desc[0] for desc in db_conn.cur.description])
        demand_array = np.array(demand[demand_col])
        geouid_array = np.array(demand['geouid'])

    # store in an array the demand population counts that are contained within the client's window view (bounding box)
    supply_query = """
        SELECT geouid, %s::float, %s::float
        FROM poi
        WHERE ST_Contains(
            ST_Transform(
                ST_MakeEnvelope(%s, %s, %s, %s, 4326)
                , 3347)
            , poi.point)
        ORDER BY geouid;
    """ % (supply_col_sql, capacity_col_sql, xmin, ymin, xmax, ymax)

    with db.DbConnect() as db_conn:
        db_conn.cur.execute(supply_query)
        poi = pd.DataFrame(
            db_conn.cur.fetchall(),
            columns=[desc[0] for desc in db_conn.cur.description])
        poi_array = np.array(poi['geouid'])
        supply_array = np.array(poi[supply_col])
        capacity_array = np.array(poi[capacity_col])

    # script to derive column and population demand geouids that need to be in data frame
    cols = ", poiuid_".join(poi_array)
    ids = ", ".join(map(str, geouid_array))
    dist = ["poiuid_" + col + " <= " + str(threshold) for col in poi_array]
    where = " OR ".join(
        dist
    )  # where statement to get all distances equal to or within threshold

    # create data frame of distance matrix by first subsetting it based on the geouids and poiuids
    # filter also by distance threshold
    '''
    dm_query = """
        SELECT geouid, poiuid_%s
        FROM distance_matrix_%s
        WHERE geouid = ANY(ARRAY[%s]) AND (%s) 
        ORDER BY geouid;
    """ % (cols, transportation, ids, where)
    '''
    dm_query = """
        SELECT geouid, poiuid_%s
        FROM distance_matrix_%s
        WHERE (%s) 
        ORDER BY geouid;
    """ % (cols, transportation, where)

    with db.DbConnect() as db_conn:
        try:
            db_conn.cur.execute(dm_query)
        except Exception as e:
            # TO DO: Improve error call for when pouid's don't exist in distance matrix
            pouid = str(e).split('"')[1]
            abort(
                500,
                f'Access could not be measured because {pouid} is not accessible via the selected mode of transporation (likely a remote community or no data). Please pan to a different view to remove {pouid} from view'
            )
        distance_matrix = pd.DataFrame(
            db_conn.cur.fetchall(),
            columns=[desc[0] for desc in db_conn.cur.description])
        # filtering in pandas because db call is giving strange error
        distance_matrix = distance_matrix[distance_matrix.geouid.isin(
            geouid_array)]
        geouid_filtered_array = np.array(distance_matrix['geouid'])
        distance_matrix = distance_matrix.drop('geouid', axis=1)

    # store the population demand geouids that are within the threshold
    ids_filtered = ", ".join(map(str, geouid_filtered_array))

    # get the population demand data now with the filtered geouids
    '''
    demand_filtered_query = """
        SELECT geouid, ST_AsText(ST_Transform(ST_Simplify(boundary,0.5), 4326)) as boundary, pop
        FROM demand
        WHERE geouid = ANY(ARRAY[%s]) 
        ORDER BY geouid;
    """ % (ids_filtered)

    demand_filtered_query = """
    SELECT geouid, ST_AsText(ST_Transform(boundary, 4326)) as boundary, pop
    FROM demand
    ORDER BY geouid;
    """

    with db.DbConnect() as db_conn:
        db_conn.cur.execute(demand_filtered_query)
        demand_filtered = pd.DataFrame(db_conn.cur.fetchall(), columns=[desc[0] for desc in db_conn.cur.description])
        demand_filtered = demand_filtered[demand_filtered.geouid.isin(geouid_filtered_array)]
        demand_filtered_array = np.array(demand_filtered['pop'])
    '''
    demand_filtered = demand[demand.geouid.isin(geouid_filtered_array)]
    demand_filtered_array = np.array(demand_filtered[demand_col])

    # filtering with pandas because db call is giving strange error

    # geouid_array_len = str(len(geouid_array))
    # demand_array_len = str(len(demand_filtered_array))
    # poi_array_len = str(len(poi_array))
    # supply_array_len = str(len(supply_array))
    # distance_matrix_col_len = str(list(distance_matrix.columns.values))
    # distance_matrix_row_len = str(len(distance_matrix.index))

    # for now just to confirm subsetting is correct and lengths match what is in the distance matrix
    # logger.info(f'ARRAY LENGTH OF DEMAND CENTROID COUNTS: {geouid_array_len}')
    # logger.info(f'ARRAY LENGTH OF DEMAND POPULATION COUNTS BASED ON FILTERED DISTANCE THRESHOLD: {demand_array_len}')
    # logger.info(f'ARRAY LENGTH OF SUPPLY SITE COUNTS: {poi_array_len}')
    # logger.info(f'ARRAY LENGTH OF SUPPLY COUNTS: {supply_array_len}')
    # logger.info(f'COLUMNS OF DISTANCE MATRIX: {distance_matrix_col_len}')
    # logger.info(f'ROWS OF DISTANCE MATRIX: {distance_matrix_row_len}')

    try:
        model = aceso.ThreeStepFCA(decay_function='negative_power',
                                   decay_params={'beta': beta})
        demand_filtered['scores'] = model.calculate_accessibility_scores(
            distance_matrix=distance_matrix,
            demand_array=demand_filtered_array,
            supply_array=supply_array,
            capacity_array=capacity_array)
        logger.info(
            f'Successfully calculated accessibility scores with beta: {beta}, transport: {transportation}, threshold: {threshold}'
        )
    except Exception as e:
        logger.error(f'Unsuccessfully calculated accessibility scores: {e}')
        return e

    return demand_filtered