Beispiel #1
0
 def hash_pastie(self):
     if self.pastie_content:
         try:
             self.md5 = hashlib.md5(self.pastie_content.encode('utf-8')).hexdigest()
             logger.debug('Pastie {site} {id} has md5: "{md5}"'.format(site=self.site.name, id=self.id, md5=self.md5))
         except Exception as e:
             logger.error('Pastie {site} {id} md5 problem: {e}'.format(site=self.site.name, id=self.id, e=e))
Beispiel #2
0
 def run(self):
     while not self.kill_received:
         sleep_time = random.randint(self.update_min, self.update_max)
         try:
             # grabs site from queue
             logger.info(
                 'Downloading list of new pastes from {name}. '
                 'Will check again in {time} seconds'.format(
                     name=self.name, time=sleep_time))
             # get the list of last pasties, but reverse it
             # so we first have the old entries and then the new ones
             last_pasties = self.get_last_pasties()
             if last_pasties:
                 for pastie in reversed(last_pasties):
                     queues[self.name].put(pastie)  # add pastie to queue
                 logger.info("Found {amount} new pasties for site {site}. There are now {qsize} pasties to be downloaded.".format(amount=len(last_pasties),
                                                                                                       site=self.name,
                                                                                                       qsize=queues[self.name].qsize()))
         # catch unknown errors
         except Exception as e:
             msg = 'Thread for {name} crashed unexpectectly, '\
                   'recovering...: {e}'.format(name=self.name, e=e)
             logger.error(msg)
             logger.debug(traceback.format_exc())
         time.sleep(sleep_time)
Beispiel #3
0
 def run(self):
     self.db_conn = sqlite3.connect(self.filename)
     # create the db if it doesn't exist
     self.c = self.db_conn.cursor()
     try:
         # LATER maybe create a table per site. Lookups will be faster as less text-searching is needed
         self.c.execute('''
             CREATE TABLE IF NOT EXISTS pasties (
                 site TEXT,
                 id TEXT,
                 md5 TEXT,
                 url TEXT,
                 local_path TEXT,
                 timestamp DATE,
                 matches TEXT
                 )''')
         self.db_conn.commit()
     except sqlite3.DatabaseError as e:
         logger.error('Problem with the SQLite database {0}: {1}'.format(self.filename, e))
         return None
     # loop over the queue
     while not self.kill_received:
         try:
             # grabs pastie from queue
             pastie = self.queue.get()
             # add the pastie to the DB
             self.add_or_update(pastie)
             # signals to queue job is done
             self.queue.task_done()
         # catch unknown errors
         except Exception as e:
             logger.error("Thread for SQLite crashed unexpectectly, recovering...: {e}".format(e=e))
             logger.debug(traceback.format_exc())
Beispiel #4
0
def _get_data(key: int, file_type: str, url: str) -> dict:
    try:
        return_object = None

        file = '{0}/{1}_{2}.json'.format(config['data']['directory'], key,
                                         file_type)
        abs_path = os.path.abspath(file)

        if os.path.isfile(abs_path):
            logger.debug('Returning {0} {1} data from file {2}'.format(
                key, file_type, abs_path))
            with open(abs_path) as f:
                return_object = json.load(f)
                f.close()
                return return_object

        result = requests.get(url)

        if result.status_code == requests.codes.ok:
            return_object = result.json()
            with open(abs_path, 'w') as f:
                logger.debug('Saving {0} {1} data to file {2}'.format(
                    key, file_type, abs_path))
                json.dump(return_object, f)
                f.close()

            return return_object

        raise ValueError('API call returned status code {0}'.format(
            result.status_code))
    except Exception as ex:
        logger.exception(ex)
Beispiel #5
0
    def __init__(self, name, download_url, archive_url, archive_regex):
        threading.Thread.__init__(self)
        self.kill_received = False

        self.name = name
        self.download_url = download_url
        self.archive_url = archive_url
        self.archive_regex = archive_regex
        try:
            self.ip_addr = yamlconfig['network']['ip']
            true_socket = socket.socket
            socket.socket = make_bound_socket(self.ip_addr)
        except:
            logger.debug("Using default IP address")

        self.save_dir = yamlconfig['archive']['dir'] + os.sep + name
        self.archive_dir = yamlconfig['archive']['dir-all'] + os.sep + name
        if yamlconfig['archive']['save'] and not os.path.exists(self.save_dir):
            os.makedirs(self.save_dir)
        if yamlconfig['archive']['save-all'] and not os.path.exists(self.archive_dir):
            os.makedirs(self.archive_dir)
        self.archive_compress = yamlconfig['archive']['compress']
        self.update_max = 30  # TODO set by config file
        self.update_min = 10  # TODO set by config file
        self.pastie_classname = None
        self.seen_pasties = deque('', 1000)  # max number of pasties ids in memory
Beispiel #6
0
 def run(self):
     while not self.kill_received:
        mtime = os.stat(self.filename).st_mtime
        if mtime != self.last_mtime:
             logger.debug('Proxy configuration file changed. Reloading proxy list.')
             proxies_lock.acquire()
             load_proxies_from_file(self.filename)
             self.last_mtime = mtime
             proxies_lock.release()
Beispiel #7
0
def load_proxies_from_file(filename):
    global proxies_list
    try:
        f = open(filename)
    except Exception as e:
        logger.error('Configuration problem: proxyfile "{file}" not found or not readable: {e}'.format(file=filename, e=e))
    for line in f:
        line = line.strip()
        if line:  # LATER verify if the proxy line has the correct structure
            proxies_list.add(line)
    logger.debug('Found {count} proxies in file "{file}"'.format(file=filename, count=len(proxies_list)))
Beispiel #8
0
def load_user_agents_from_file(filename):
    global user_agents_list
    try:
        f = open(filename)
    except Exception as e:
        logger.error('Configuration problem: user-agent-file "{file}" not found or not readable: {e}'.format(file=filename, e=e))
    for line in f:
        line = line.strip()
        if line:
            user_agents_list.append(line)
    logger.debug('Found {count} UserAgents in file "{file}"'.format(file=filename, count=len(user_agents_list)))
Beispiel #9
0
 def add(self, pastie):
     try:
         data = {'site': pastie.site.name,
                 'id': pastie.id,
                 'md5': pastie.md5,
                 'url': pastie.url,
                 'local_path': pastie.site.archive_dir + os.sep + pastie.site.pastie_id_to_filename(pastie.id),
                 'timestamp': datetime.now(),
                 'matches': pastie.matches_to_text()
                 }
         self.c.execute('INSERT INTO pasties VALUES (:site, :id, :md5, :url, :local_path, :timestamp, :matches)', data)
         self.db_conn.commit()
     except sqlite3.DatabaseError as e:
         logger.error('Cannot add pastie {site} {id} in the SQLite database: {error}'.format(site=pastie.site.name, id=pastie.id, error=e))
     logger.debug('Added pastie {site} {id} in the SQLite database.'.format(site=pastie.site.name, id=pastie.id))
Beispiel #10
0
def get(file: str = None) -> dict:
    global _secrets

    if _secrets == None:
        if file == None:
            file = 'secrets.yaml'

        if os.path.isfile(file):
            logger = modules.logger.get()
            abs_path = os.path.abspath(file)
            logger.debug('reading secrets from {0}'.format(abs_path))
            with open(abs_path, 'r') as f:
                _secrets = yaml.safe_load(f.read())
            f.close()
        else:
            raise ValueError('{0} is not a file'.format(file))
        
    return _secrets
Beispiel #11
0
 def update(self, pastie):
     try:
         data = {'site': pastie.site.name,
                 'id': pastie.id,
                 'md5': pastie.md5,
                 'url': pastie.url,
                 'local_path': pastie.site.archive_dir + os.sep + pastie.site.pastie_id_to_filename(pastie.id),
                 'timestamp': datetime.now(),
                 'matches': pastie.matches_to_text()
                 }
         self.c.execute('''UPDATE pasties SET md5 = :md5,
                                         url = :url,
                                         local_path = :local_path,
                                         timestamp  = :timestamp,
                                         matches = :matches
                  WHERE site = :site AND id = :id''', data)
         self.db_conn.commit()
     except sqlite3.DatabaseError as e:
         logger.error('Cannot add pastie {site} {id} in the SQLite database: {error}'.format(site=pastie.site.name, id=pastie.id, error=e))
     logger.debug('Updated pastie {site} {id} in the SQLite database.'.format(site=pastie.site.name, id=pastie.id))
Beispiel #12
0
 def run(self):
     while not self.kill_received:
         try:
             # grabs pastie from queue
             pastie = self.queue.get()
             pastie_content = pastie.fetch_and_process_pastie()
             logger.debug("Queue {name} size: {size}".format(
                 size=self.queue.qsize(), name=self.name))
             if pastie_content:
                 logger.debug(
                     "Saved new pastie from {0} "
                     "with id {1}".format(self.name, pastie.id))
             else:
                 # pastie already downloaded OR error ?
                 pass
             # signals to queue job is done
             self.queue.task_done()
         # catch unknown errors
         except Exception as e:
             msg = "ThreadPasties for {name} crashed unexpectectly, "\
                   "recovering...: {e}".format(name=self.name, e=e)
             logger.error(msg)
             logger.debug(traceback.format_exc())
Beispiel #13
0
    def Open(self, file_object, wal_file_object=None):
        """Opens a SQLite database file.

        Since pysqlite cannot read directly from a file-like object a temporary
        copy of the file is made. After creating a copy the database file this
        function sets up a connection with the database and determines the names
        of the tables.

        Args:
            file_object (dfvfs.FileIO): file-like object.
            wal_file_object (Optional[dfvfs.FileIO]): file-like object for the
            Write-Ahead Log (WAL) file.

        Raises:
            IOError: if the file-like object cannot be read.
            OSError: if the file-like object cannot be read.
            sqlite3.DatabaseError: if the database cannot be parsed.
            ValueError: if the file-like object is missing.
        """
        if not file_object:
            raise ValueError('Missing file object.')

        # TODO: Current design copies the entire file into a buffer
        # that is parsed by each SQLite parser. This is not very efficient,
        # especially when many SQLite parsers are ran against a relatively
        # large SQLite database. This temporary file that is created should
        # be usable by all SQLite parsers so the file should only be read
        # once in memory and then deleted when all SQLite parsers have completed.

        # TODO: Change this into a proper implementation using APSW
        # and virtual filesystems when that will be available.
        # Info: http://apidoc.apsw.googlecode.com/hg/vfs.html#vfs and
        # http://apidoc.apsw.googlecode.com/hg/example.html#example-vfs
        # Until then, just copy the file into a tempfile and parse it.
        temporary_file = tempfile.NamedTemporaryFile(
            delete=False, dir=self._temporary_directory)

        try:
            self._CopyFileObjectToTemporaryFile(file_object, temporary_file)
            self._temp_db_file_path = temporary_file.name

        except IOError:
            os.remove(temporary_file.name)
            raise

        finally:
            temporary_file.close()

        if wal_file_object:
            # Create WAL file using same filename so it is available for
            # sqlite3.connect()
            temporary_filename = '{0:s}-wal'.format(self._temp_db_file_path)
            temporary_file = open(temporary_filename, 'wb')
            try:
                self._CopyFileObjectToTemporaryFile(wal_file_object,
                                                    temporary_file)
                self._temp_wal_file_path = temporary_filename

            except IOError:
                os.remove(temporary_filename)
                raise

            finally:
                temporary_file.close()

        self._database = sqlite3.connect(self._temp_db_file_path)
        try:
            self._database.row_factory = sqlite3.Row
            cursor = self._database.cursor()

            sql_results = cursor.execute(self.SCHEMA_QUERY)

            self.schema = {
                table_name: ' '.join(query.split())
                for table_name, query in sql_results
            }

            for table_name in self.schema.keys():
                self.columns_per_table.setdefault(table_name, [])
                pragma_results = cursor.execute(
                    'PRAGMA table_info({0:s})'.format(table_name))

                for pragma_result in pragma_results:
                    self.columns_per_table[table_name].append(
                        pragma_result['name'])

        except sqlite3.DatabaseError as exception:
            self._database.close()
            self._database = None

            os.remove(self._temp_db_file_path)
            self._temp_db_file_path = ''
            if self._temp_wal_file_path:
                os.remove(self._temp_wal_file_path)
                self._temp_wal_file_path = ''

            logger.debug(
                'Unable to parse SQLite database: {0:s} with error: {1!s}'.
                format(self._filename, exception))
            raise

        self._is_open = True
Beispiel #14
0
def main():

    signal.signal(signal.SIGINT, signal_handle)
    signal.signal(signal.SIGTERM, signal_handle)

    parser = argparse.ArgumentParser()
    parser.add_argument("name")
    parser.add_argument("--verbose", action="store_true")
    parser.add_argument("--mqtt-host")
    parser.add_argument("--mqtt-port")
    parser.add_argument("--mqtt-user")
    parser.add_argument("--mqtt-pass")
    parser.add_argument("--mqtt-topic-req")
    parser.add_argument("--mqtt-topic-res")

    args = parser.parse_args()
    name = args.name

    load_dotenv()

    log_level = os.getenv("LOG_LEVEL", "info").lower()
    setLevel(args.verbose or log_level == 'debug')

    mqtt_host = args.mqtt_host or os.getenv("MQTT_HOST")
    mqtt_port = args.mqtt_port or os.getenv("MQTT_PORT")
    mqtt_user = args.mqtt_user or os.getenv("MQTT_USER")
    mqtt_pass = args.mqtt_pass or os.getenv("MQTT_PASS")

    mqtt_topic_req = args.mqtt_topic_req or os.getenv('MQTT_TOPIC_REQ')
    mqtt_topic_res = args.mqtt_topic_res or os.getenv('MQTT_TOPIC_RES')

    topic_req = f"{mqtt_topic_req}/{name}"
    topic_res = f"{mqtt_topic_res}/{name}"

    logger.debug("Starting MQTT")

    nextConnectionAt = datetime.now()
    connected = False

    HOME = os.getenv("HOME")

    pattern = re.compile(r'^Modify: (.*)\n')

    while True:

        now = datetime.now()

        if not connected and now > nextConnectionAt:
            try:

                @subscribe(
                    topic_req, {
                        "host": mqtt_host,
                        "port": int(mqtt_port),
                        "user": mqtt_user,
                        "pass": mqtt_pass
                    })
                def message_handle(payload, emit):

                    try:
                        if 'id' not in payload:
                            raise Exception("request id is not present")

                        if 'command' not in payload:
                            raise Exception("command is not present")

                        command = payload['command']

                        if command == 'status':
                            settings = read_config()
                            logger.info("settings: [%s]", settings)

                            found = glob.glob(
                                f"{HOME}/.pm2/pids/hackrf-control-*")
                            status = 'stopped'
                            uptime = None

                            if found:
                                status = 'online'

                                with open(found[0]) as fd:
                                    pid = fd.read()

                                out = subprocess.check_output(
                                    f"stat /proc/{pid} | grep Modify",
                                    shell=True,
                                    encoding="utf-8")
                                res = pattern.findall(out)

                                uptime = res[0] if res else None

                            emit(
                                topic_res, {
                                    'id': payload['id'],
                                    'settings': settings,
                                    'process': {
                                        'status': status,
                                        'uptime': uptime
                                    }
                                })

                        elif command == 'logs':

                            lines = payload.get('lines', 10)
                            out = subprocess.check_output(
                                f"tail {HOME}/.pm2/logs/hackrf-control-error.log -n {lines}",
                                shell=True,
                                encoding="utf-8")

                            data = []

                            for x in out.split('\n'):

                                created_at = x[0:23]

                                pos = x.find(" ", 24)
                                level = x[23:pos]

                                pos = x.find(" ", pos + 1)
                                content = x[pos:]

                                data.append({
                                    'created_at': created_at,
                                    'level': level,
                                    'content': content
                                })

                            emit(topic_res, {
                                'id': payload['id'],
                                'data': data
                            })

                        elif command == 'config':

                            if 'settings' not in payload:
                                raise Exception("settings is not present")

                            settings = payload['settings']
                            settings['_waveform'] = 'waveform' in settings

                            logger.info("settings: [%s]", settings)
                            save_config(settings)

                            emit(topic_res, {'id': payload['id']})

                        else:
                            emit(topic_res, {'id': payload['id']})

                    except Exception as ex:
                        logger.warning("%s", payload)
                        logger.error(ex)

                        emit(topic_res, {'id': payload['id'], 'error': ex})

                logger.info("mqtt connected")
                connected = True

            except Exception as ex:
                logger.error(ex)

                connected = False
                nextConnectionAt = now + timedelta(seconds=10)

                logger.debug("Reconnecting mqtt at 10 seconds")

        time.sleep(0.1)
Beispiel #15
0
    def Connect(self, configuration, source_path_spec, knowledge_base):
        """Connector to connect to ESE database modules.

		Args:
			configuration: configuration values.
			source_path_spec (dfvfs.PathSpec): path specification of the source file.
			knowledge_base (KnowledgeBase): knowledge base.

		"""
        if source_path_spec.parent.type_indicator != dfvfs_definitions.TYPE_INDICATOR_TSK_PARTITION:
            par_id = configuration.partition_list['p1']
        else:
            par_id = configuration.partition_list[getattr(
                source_path_spec.parent, 'location', None)[1:]]

        if par_id == None:
            return False

        print('[MODULE]: ESE Database Analyzer Start! - partition ID(%s)' %
              par_id)

        # Load Schema
        if not self.LoadSchemaFromYaml(
                '../modules/schema/esedb/lv1_os_win_esedb.yaml'):
            logger.error('cannot load schema from yaml: {0:s}'.format(
                self.TABLE_NAME))
            return False

        # Search artifact paths
        paths = self._schema['Paths']
        separator = self._schema['Path_Separator']
        environment_variables = knowledge_base.GetEnvironmentVariables()

        find_specs = self.BuildFindSpecs(paths, separator,
                                         environment_variables)
        if len(find_specs) < 0:
            return False

        esedb_file = pyesedb.file()
        for spec in find_specs:
            file_object = self.LoadTargetFileToMemory(
                source_path_spec=source_path_spec,
                configuration=configuration,
                file_spec=spec)
            try:
                esedb_file.open_file_object(file_object)
            except IOError as exception:
                logger.debug(
                    '[{0:s}] unable to open file with error: {0!s}'.format(
                        self.NAME, exception))
                return

            try:
                esedb_parsers = esedb_parser.ESEDBParser.GetESEDBParserObjects(
                )
                table_names = frozenset(
                    esedb_parser.ESEDBParser.GetTableNames(esedb_file))

                for parser in esedb_parsers.values():

                    if not parser.required_tables.issubset(table_names):
                        continue

                    try:
                        parser.Process(database=esedb_file)

                        info = tuple([
                            par_id, configuration.case_id,
                            configuration.evidence_id
                        ])
                        # internet explorer
                        if 'Containers' in parser.required_tables:

                            if len(parser.GetHistoryRecords) > 0:
                                table_name = 'lv1_os_win_esedb_ie_history'
                                if not configuration.cursor.check_table_exist(
                                        table_name):
                                    ret = self.CreateTableWithSchema(
                                        configuration.cursor,
                                        table_name=table_name,
                                        schema=tuple(
                                            ['par_id', 'case_id', 'evd_id']) +
                                        parser.GetHistorySchema)
                                    if not ret:
                                        logger.error(
                                            'cannot create database table name: {0:s}'
                                            .format(table_name))
                                        return False

                                for record in parser.GetHistoryRecords:
                                    tmp_record = list(record)
                                    tmp_record[17] = tmp_record[17].replace(
                                        '"', '""')
                                    tmp_record[17] = tmp_record[17].replace(
                                        '\'', '\'\'')
                                    result = info + tuple(tmp_record)
                                    query = self.InsertQueryBuilder(
                                        table_name=table_name,
                                        schema=tuple(
                                            ['par_id', 'case_id', 'evd_id']) +
                                        parser.GetHistorySchema,
                                        data=result)
                                    try:
                                        configuration.cursor.execute_query(
                                            query)
                                    except Exception as exception:
                                        logger.error(
                                            'database execution failed: {0:s}'.
                                            format(exception))

                            if len(parser.GetContentRecords) > 0:
                                table_name = 'lv1_os_win_esedb_ie_content'
                                if not configuration.cursor.check_table_exist(
                                        table_name):
                                    ret = self.CreateTableWithSchema(
                                        configuration.cursor,
                                        table_name=table_name,
                                        schema=tuple(
                                            ['par_id', 'case_id', 'evd_id']) +
                                        parser.GetContentSchema)
                                    if not ret:
                                        logger.error(
                                            'cannot create database table name: {0:s}'
                                            .format(table_name))
                                        return False

                                for record in parser.GetContentRecords:
                                    tmp_record = list(record)
                                    tmp_record[17] = tmp_record[17].replace(
                                        '"', '""')
                                    tmp_record[17] = tmp_record[17].replace(
                                        '\'', '\'\'')
                                    result = info + tuple(tmp_record)
                                    query = self.InsertQueryBuilder(
                                        table_name=table_name,
                                        schema=tuple(
                                            ['par_id', 'case_id', 'evd_id']) +
                                        parser.GetContentSchema,
                                        data=result)
                                    try:
                                        configuration.cursor.execute_query(
                                            query)
                                    except Exception as exception:
                                        logger.error(
                                            'database execution failed: {0:s}'.
                                            format(exception))

                            if len(parser.GetCookiesRecords) > 0:
                                table_name = 'lv1_os_win_esedb_ie_cookies'
                                if not configuration.cursor.check_table_exist(
                                        table_name):
                                    ret = self.CreateTableWithSchema(
                                        configuration.cursor,
                                        table_name=table_name,
                                        schema=tuple(
                                            ['par_id', 'case_id', 'evd_id']) +
                                        parser.GetCookiesSchema)
                                    if not ret:
                                        logger.error(
                                            'cannot create database table name: {0:s}'
                                            .format(table_name))
                                        return False

                                for record in parser.GetCookiesRecords:
                                    tmp_record = list(record)
                                    tmp_record[17] = tmp_record[17].replace(
                                        '"', '""')
                                    tmp_record[17] = tmp_record[17].replace(
                                        '\'', '\'\'')
                                    result = info + tuple(tmp_record)
                                    query = self.InsertQueryBuilder(
                                        table_name=table_name,
                                        schema=tuple(
                                            ['par_id', 'case_id', 'evd_id']) +
                                        parser.GetCookiesSchema,
                                        data=result)
                                    try:
                                        configuration.cursor.execute_query(
                                            query)
                                    except Exception as exception:
                                        logger.error(
                                            'database execution failed: {0:s}'.
                                            format(exception))

                            if len(parser.GetDownloadRecords) > 0:
                                table_name = 'lv1_os_win_esedb_ie_download'
                                if not configuration.cursor.check_table_exist(
                                        table_name):
                                    ret = self.CreateTableWithSchema(
                                        configuration.cursor,
                                        table_name=table_name,
                                        schema=tuple(
                                            ['par_id', 'case_id', 'evd_id']) +
                                        parser.GetDownloadSchema)
                                    if not ret:
                                        logger.error(
                                            'cannot create database table name: {0:s}'
                                            .format(table_name))
                                        return False

                                for record in parser.GetDownloadRecords:
                                    tmp_record = list(record)
                                    tmp_record[17] = tmp_record[17].replace(
                                        '"', '""')
                                    tmp_record[17] = tmp_record[17].replace(
                                        '\'', '\'\'')
                                    result = info + tuple(tmp_record)
                                    query = self.InsertQueryBuilder(
                                        table_name=table_name,
                                        schema=tuple(
                                            ['par_id', 'case_id', 'evd_id']) +
                                        parser.GetDownloadSchema,
                                        data=result)
                                    try:
                                        configuration.cursor.execute_query(
                                            query)
                                    except Exception as exception:
                                        logger.error(
                                            'database execution failed: {0:s}'.
                                            format(exception))

                    except errors.UnableToParseFile as exception:
                        logger.debug(
                            '[{0:s}] unable to parse file with error: {1!s}'.
                            format(self.NAME, exception))

            finally:
                esedb_file.close()
                file_object.close()
Beispiel #16
0
                def message_handle(payload, emit):

                    try:
                        if 'id' not in payload:
                            raise Exception("request id is not present")

                        if 'command' not in payload:
                            raise Exception("command is not present")

                        command = payload['command']

                        if command == 'config':

                            logger.debug(
                                "Config Temperature %s", {
                                    'id': payload['id'],
                                    'status': 'Config max temperature',
                                    'data': {
                                        'temp': payload['temp']
                                    }
                                })

                            emit(
                                topic_res, {
                                    'id': payload['id'],
                                    'status': 'Config max temperature',
                                    'data': {
                                        'temp': payload['temp']
                                    }
                                })

                            try:
                                newTemp = str(payload['temp'])
                                data = {"temp_max": newTemp}

                                save_config(data)
                                #updateTemperature('.local/config/hackrf-sensors.json', 0, '{"temp_max":' + newTemp + "}")

                            except Exception as ex:
                                logger.warning("%s", payload)
                                logger.error(ex)

                                emit(topic_res, {
                                    'id': payload['id'],
                                    'error': ex
                                })

                        elif command == 'status':

                            #INTEGRAR PROCESO QUE OBTIENE T° DE LA MAQUINA
                            # Y PASARLO COMO VARIABLE A EMIT(TOPIC_RES)

                            logger.debug("Getting sensors data")

                            emit(
                                topic_res, {
                                    'id': payload['id'],
                                    'status': 'Data sensors found',
                                    'data': {
                                        'temp': payload['temp']
                                    }
                                })

                        else:
                            emit(topic_res, {'id': payload['id']})

                    except Exception as ex:
                        logger.warning("%s", payload)
                        logger.error(ex)

                        emit(topic_res, {'id': payload['id'], 'error': ex})
Beispiel #17
0
    def create_host_directories_and_tar(self):
        """Main packaging function

        Works in 3 parts:
          1. Validate app data and configurations
          2. Create tmp directories for each host with loaded apps and manifest
          3. Package (tar) up host tmp directories for distribution
        """

        Helpers.delete_path(self.tmp_folder)
        Helpers.create_path(self.tars_folder, True)

        self.repo_manager.set_commit_id()
        master_commit_log = self.repo_manager.get_commit_log()

        errors_found = False
        changes_found = False

        for host in self.appetite_hosts:  # pylint: disable=too-many-nested-blocks
            # Per host build apps folder and tar up based on class
            hostname = host.hostname
            apps = host.get_apps(self.args.refname)
            tarname = host.tarname

            apps = sorted(apps, key=lambda app: app.commit_id)

            tmp_hostname_dir = os.path.join(self.hosts_folder, hostname)
            tmp_hostname_meta = os.path.join(tmp_hostname_dir, Consts.META_DIR)

            apps_meta = []

            if len(apps) < 1:
                Logger.warn("Host with no apps", hostname=hostname)
                continue
            # Parse the remote meta file from the host
            # This file might not exist
            remote_meta_file = host.local_meta_file
            remote_metas_loaded = False
            if os.path.exists(remote_meta_file):
                try:
                    with open(remote_meta_file) as remote_data_file:
                        remote_metas_master = json.load(remote_data_file)
                        remote_metas_content = remote_metas_master['content'] \
                            if 'content' in remote_metas_master else remote_metas_master
                        remote_metas = [
                            AppetiteHost.create_app_from_object(
                                self.repo_manager, self.deployment_manager,
                                meta_data)
                            for meta_data in remote_metas_content
                        ]

                        remote_metas_loaded = True
                except Exception as exception:
                    Logger.error("Problems loading meta file",
                                 error=exception.message,
                                 path=remote_meta_file)
            elif not self.args.dryrun:
                Logger.warn("Local version of remote meta not found",
                            file=remote_meta_file)

            ordered_unique_apps = sorted(list(set(apps)),
                                         key=lambda single_app:
                                         (single_app.name, single_app.
                                          commit_id, single_app.method_name))

            for iapp in ordered_unique_apps:
                app_occurrences = apps.count(iapp)
                if app_occurrences > 1:
                    Logger.warn("Dup app found",
                                host=host.hostname,
                                app_info=iapp.app_key,
                                occurences=app_occurrences)

            # Validate app data and configurations

            # Go through the apps and checks to see if there are any errors
            # This is where the remote meta is compared to the newly generated
            # lists of apps from the manifest
            for app in apps:
                raw_app_path = os.path.join(self.apps_folder, app.name)

                # Check the commit Id for problems
                if app.commit_id:
                    self.repo_manager.set_commit_id(app.commit_id)
                else:  # pylint: disable=else-if-used
                    if self.args.strict_commitids:
                        Logger.error("Application with missing commit Id",
                                     hostname=hostname,
                                     app=app.name)
                        errors_found = True
                        continue
                    else:
                        app._commit_id = master_commit_log['app_commit_id']  # pylint: disable=protected-access
                        self.repo_manager.set_commit_id(app.commit_id)

                # Checks if app listed in the manifest
                # exists with the correct commit id
                if Helpers.check_path(raw_app_path):
                    meta_to_append = None
                    app.refresh_version_info(self.args.refname,
                                             Consts.META_APP_UNCHANGED)
                    remote_meta = None

                    # Check to see what has changed
                    if remote_metas_loaded:
                        # Searches remote meta to see if application already exists
                        remote_meta = next((rmeta for rmeta in remote_metas
                                            if app.check_names(rmeta)), None)

                        if remote_meta:
                            # If app does exist on system, have the commit ids changed
                            if remote_meta.commit_id != app.commit_id:
                                meta_to_append = app.set_status_changed()
                            else:
                                # meta has not changed so use existing meta
                                meta_to_append = app.clone
                                meta_to_append.update_app_version(app)

                            # to track if an app is removed from the remote meta
                            remote_metas.remove(remote_meta)

                    if not meta_to_append:
                        # There is no remote meta so all files should be added
                        meta_to_append = app.set_status_added()

                    if remote_meta and meta_to_append:
                        meta_outcome = Helpers.debug_app_versions(
                            meta_to_append, remote_meta, meta_to_append.status)
                        Logger.debug("Check meta logic", outcome=meta_outcome)

                        if meta_to_append.has_changed:
                            Logger.info("App change", logic=meta_outcome)

                    apps_meta.append(meta_to_append)
                else:
                    Logger.error("Missing application",
                                 hostname=hostname,
                                 app=app.name,
                                 path=raw_app_path)
                    continue

            if remote_metas_loaded and len(remote_metas) > 0:
                # Any apps left in the remote meta do not exist in the current
                # manifest and should be deleted
                delete_list = []
                for deleted_app in remote_metas:
                    if deleted_app.method_info:
                        deleted_app.set_status_deleted()
                        # Added logic check to catch method changes
                        added_app_found = next((
                            app for app in apps_meta
                            if app.status == Consts.META_APP_ADDED and app.name
                            == deleted_app.name and app.method_info['path'] ==
                            deleted_app.method_info['path']), None)
                        if added_app_found:
                            added_app_found.set_status_changed()
                        else:
                            delete_list.append(deleted_app)
                    else:
                        Logger.error(
                            "Problems with method info for deleted app.",
                            hostname=hostname,
                            app=deleted_app.name)

                apps_meta += delete_list

            # Only do something if there has been a change
            if len([app for app in apps_meta if not app.is_unchanged]) < 1:
                continue

            # No point continuing if there is no connection to the host
            if not self.check_host_connection(host):
                continue

            # Clean command lines for auth params
            # This data is ingested so creds should be removed
            # apps_meta = [updated_app.clone for updated_app in apps_meta]

            if not self.args.disable_logging:
                for updated_app in apps_meta:
                    Logger.log_event(updated_app.to_dict)

            # Applications that actually needs to be updated
            tar_apps = sorted([
                updated_app for updated_app in apps_meta if updated_app.updated
            ],
                              key=lambda tar_app: tar_app.app)

            use_templating = self.template_values and self.args.templating

            # Checking will allow templating otherwise will skip steps
            Helpers.create_path(
                os.path.join(tmp_hostname_meta, Consts.HOST_LOGS_FOLDER_NAME),
                True)
            if len(tar_apps) > 0:
                # All error checks have been done above, build out
                # the hosts directory and tar up
                for updated_app in tar_apps:
                    app_path = os.path.join(tmp_hostname_dir,
                                            updated_app.method_info['path'])
                    Helpers.create_path(app_path, True)
                    raw_app_path = os.path.join(self.apps_folder,
                                                updated_app.name)

                    self.repo_manager.set_commit_id(updated_app.commit_id)

                    if updated_app.update_method_is_copy:
                        app_dest = os.path.join(app_path,
                                                updated_app.app_clean)
                    else:
                        app_dest = app_path

                    copy_tree(raw_app_path, app_dest)

                    lookups_inclusion_location = os.path.join(
                        app_dest, self.deployment_manager.inclusion_filename)

                    ignore_dir = os.path.join(app_dest, Consts.TMP_IGNORE_DIR)

                    # Ignore files/folders set in the global configurations
                    if self.args.install_ignore:
                        content_ignored_results = Helpers.move_regexed_files(
                            self.args.install_ignore.split(';'), app_dest,
                            ignore_dir)
                        files_included = content_ignored_results['files_moved']

                        if len(files_included) > 0:
                            Logger.error(
                                "Globally these files should not exist in the App. "
                                "The files have been removed from the install.",
                                files=files_included,
                                hostname=hostname,
                                app=updated_app.name)

                        # Users should not have the capability to include files from the
                        # global ignore.
                        Helpers.delete_path(ignore_dir)

                    # Defined folders/files are to move out of application.
                    # This is defined in the deploymentmethods.conf
                    # If an app is installed for the first time, all files should be included
                    if 'install_ignore' in updated_app.method_info and not updated_app.is_added:
                        Helpers.move_regexed_files(
                            updated_app.method_info['install_ignore'],
                            app_dest, ignore_dir)

                        # If there is a inclusion file, include files back into app.
                        # This is defined on a per app basis
                        if os.path.isfile(lookups_inclusion_location):
                            with open(lookups_inclusion_location, "r") as f:
                                lines = [l.strip() for l in f.readlines()]

                            lookup_inclusion_results = Helpers.move_regexed_files(
                                lines, ignore_dir, app_dest)

                            if lookup_inclusion_results['errors_found']:
                                Logger.warn(
                                    "Lookup inclusion error found",
                                    paths=lookup_inclusion_results[
                                        'path_errors'],
                                    hostname=hostname,
                                    app=updated_app.name,
                                    todo="Remove file/path from inclusion..")
                                # Problem with host inclusion,
                                # move to next host
                                continue

                            updated_app.method_info['inclusions'] = \
                                lookup_inclusion_results['files_moved']

                            # Update objects with inclusions
                            updated_app.copy_value_to_method_info(
                                'inclusions', apps_meta)
                            os.remove(lookups_inclusion_location)

                    Helpers.delete_path(ignore_dir)

                    if use_templating and not updated_app.method_info[
                            'skip_templating']:
                        # Can template based on vars from templated
                        # values, hosts vars and app vars
                        Helpers.template_directory(app_dest, [
                            self.template_values, host.to_dict,
                            updated_app.to_dict
                        ])

                    # Should only change access and create version file if a whole app is copied
                    if updated_app.update_method_is_copy:
                        for host_path, host_dir, host_files in os.walk(
                                app_dest):  # pylint: disable=unused-variable
                            for host_file in host_files:
                                # Splunk apps can have active binaries in multiple languages
                                # This is a catch all to make sure apps have all the required
                                # permissions.
                                chmod = 0755
                                os.chmod(os.path.join(host_path, host_file),
                                         chmod)

                        if not updated_app.method_info['no_appetite_changes']:
                            with open(
                                    os.path.join(
                                        app_dest,
                                        Helpers.get_app_version_filename()),
                                    "w") as f:
                                f.write(updated_app.to_json)

                            AppVersioning.create_app_version(
                                app_dest,
                                updated_app.commit_log['app_abbrev_commit_id'])
            apps_distro = Helpers.content_wrapper(apps_meta,
                                                  Consts.META_CURRENT,
                                                  hostname, self.track)

            # Meta file used as source of truth on instance
            master_meta = self.create_meta_files(tmp_hostname_meta, '',
                                                 apps_distro)
            # check be used to update and test manifest changes locally
            if self.args.dryrun:
                Helpers.create_path(host.local_meta_file)
                shutil.copy(master_meta, host.local_meta_file)

            # Always want clean logs ingested
            selected_apps = Helpers.select_and_update_apps(
                apps_meta, Consts.META_CURRENT, False)

            self.create_meta_log(tmp_hostname_meta, '', selected_apps,
                                 Helpers.get_utc())

            host.updates = Helpers.content_process(apps_meta,
                                                   Consts.META_UPDATED,
                                                   hostname, self.track, True)

            # Create the meta change file
            self.create_meta_files(tmp_hostname_meta, '_update',
                                   Helpers.content_convert(host.updates))

            # Clean updates file for logging
            selected_apps = Helpers.select_and_update_apps(
                apps_meta, Consts.META_UPDATED, True)

            self.create_meta_log(tmp_hostname_meta, '_update', selected_apps,
                                 Helpers.get_utc())

            Logger.info("Changes found",
                        updates=Helpers.content_wrapper(
                            apps_meta, Consts.META_UPDATED, hostname,
                            self.track, True))

            # Package (tar) up host tmp directories for distribution
            tar = tarfile.open(
                os.path.join(self.tars_folder, "%s.tar.gz" % tarname), "w:gz")
            tar.add(tmp_hostname_dir, arcname=os.path.basename(self.base_name))
            tar.close()

            changes_found = True

        if errors_found:
            sys.exit(1)

        self.repo_manager.set_commit_id()

        return changes_found
Beispiel #18
0
def download_url(url, data=None, cookie=None, loop_client=0, loop_server=0):
    # Client errors (40x): if more than 5 recursions, give up on URL (used for the 404 case)
    if loop_client >= retries_client:
        return None, None
    # Server errors (50x): if more than 100 recursions, give up on URL
    if loop_server >= retries_server:
        return None, None
    try:
        opener = None
        # Random Proxy if set in config
        random_proxy = get_random_proxy()
        if random_proxy:
            proxyh = urllib2.ProxyHandler({'http': random_proxy})
            opener = urllib2.build_opener(proxyh, NoRedirectHandler())
        # We need to create an opener if it didn't exist yet
        if not opener:
            opener = urllib2.build_opener(NoRedirectHandler())
        # Random User-Agent if set in config
        user_agent = get_random_user_agent()
        opener.addheaders = [('Accept-Charset', 'utf-8')]
        if user_agent:
            opener.addheaders.append(('User-Agent', user_agent))
        if cookie:
            opener.addheaders.append(('Cookie', cookie))
        logger.debug(
            'Downloading url: {url} with proxy: {proxy} and user-agent: {ua}'.format(
                url=url, proxy=random_proxy, ua=user_agent))
        if data:
            response = opener.open(url, data)
        else:
            response = opener.open(url)
        htmlPage = unicode(response.read(), errors='replace')
        return htmlPage, response.headers
    except urllib2.HTTPError as e:
        failed_proxy(random_proxy)
        logger.warning("!!Proxy error on {0}.".format(url))
        if 404 == e.code:
            htmlPage = e.read()
            logger.warning("404 from proxy received for {url}. Waiting 1 minute".format(url=url))
            time.sleep(60)
            loop_client += 1
            logger.warning("Retry {nb}/{total} for {url}".format(nb=loop_client, total=retries_client, url=url))
            return download_url(url, loop_client=loop_client)
        if 500 == e.code:
            htmlPage = e.read()
            logger.warning("500 from proxy received for {url}. Waiting 1 minute".format(url=url))
            time.sleep(60)
            loop_server += 1
            logger.warning("Retry {nb}/{total} for {url}".format(nb=loop_server, total=retries_server, url=url))
            return download_url(url, loop_server=loop_server)
        if 504 == e.code:
            htmlPage = e.read()
            logger.warning("504 from proxy received for {url}. Waiting 1 minute".format(url=url))
            time.sleep(60)
            loop_server += 1
            logger.warning("Retry {nb}/{total} for {url}".format(nb=loop_server, total=retries_server, url=url))
            return download_url(url, loop_server=loop_server)
        if 502 == e.code:
            htmlPage = e.read()
            logger.warning("502 from proxy received for {url}. Waiting 1 minute".format(url=url))
            time.sleep(60)
            loop_server += 1
            logger.warning("Retry {nb}/{total} for {url}".format(nb=loop_server, total=retries_server, url=url))
            return download_url(url, loop_server=loop_server)
        if 403 == e.code:
            htmlPage = e.read()
            if 'Please slow down' in htmlPage or 'has temporarily blocked your computer' in htmlPage or 'blocked' in htmlPage:
                logger.warning("Slow down message received for {url}. Waiting 1 minute".format(url=url))
                time.sleep(60)
                return download_url(url)
        logger.warning("ERROR: HTTP Error ##### {e} ######################## {url}".format(e=e, url=url))
        return None, None
    except urllib2.URLError as e:
        logger.debug("ERROR: URL Error ##### {e} ######################## ".format(e=e, url=url))
        if random_proxy:  # remove proxy from the list if needed
            failed_proxy(random_proxy)
            logger.warning("Failed to download the page because of proxy error {0} trying again.".format(url))
            loop_server += 1
            logger.warning("Retry {nb}/{total} for {url}".format(nb=loop_server, total=retries_server, url=url))
            return download_url(url, loop_server=loop_server)
        if 'timed out' in e.reason:
            logger.warning("Timed out or slow down for {url}. Waiting 1 minute".format(url=url))
            loop_server += 1
            logger.warning("Retry {nb}/{total} for {url}".format(nb=loop_server, total=retries_server, url=url))
            time.sleep(60)
            return download_url(url, loop_server=loop_server)
        return None, None
    except socket.timeout:
        logger.debug("ERROR: timeout ############################# " + url)
        if random_proxy:  # remove proxy from the list if needed
            failed_proxy(random_proxy)
            logger.warning("Failed to download the page because of socket error {0} trying again.".format(url))
            loop_server += 1
            logger.warning("Retry {nb}/{total} for {url}".format(nb=loop_server, total=retries_server, url=url))
            return download_url(url, loop_server=loop_server)
        return None, None
    except Exception as e:
        failed_proxy(random_proxy)
        logger.warning("Failed to download the page because of other HTTPlib error proxy error {0} trying again.".format(url))
        loop_server += 1
        logger.warning("Retry {nb}/{total} for {url}".format(nb=loop_server, total=retries_server, url=url))
        return download_url(url, loop_server=loop_server)
Beispiel #19
0
    def Connect(self, par_id, configuration, source_path_spec, knowledge_base):
        """Connector to connect to ESE database modules.

		Args:
			configuration: configuration values.
			source_path_spec (dfvfs.PathSpec): path specification of the source file.
			knowledge_base (KnowledgeBase): knowledge base.

		"""

        self._configuration = configuration
        self._time_zone = knowledge_base.time_zone

        # Load Schema
        yaml_path = os.path.dirname(os.path.abspath(__file__)) + os.sep + 'schema' + os.sep + 'esedb' \
                    + os.sep + 'lv1_os_win_esedb.yaml'
        if not self.LoadSchemaFromYaml(yaml_path):
            logger.error('cannot load schema from yaml: {0:s}'.format(
                self.TABLE_NAME))
            return False

        # Search artifact paths
        paths = self._schema['Paths']
        separator = self._schema['Path_Separator']
        environment_variables = knowledge_base.GetEnvironmentVariables()

        find_specs = self.BuildFindSpecs(paths, separator,
                                         environment_variables)
        if len(find_specs) < 0:
            return False

        esedb_file = pyesedb.file()
        for spec in find_specs:
            try:
                file_object = self.LoadTargetFileToMemory(
                    source_path_spec=source_path_spec,
                    configuration=configuration,
                    file_spec=spec)

                esedb_file.open_file_object(file_object)
            except IOError as exception:
                logger.debug(
                    '[{0:s}] unable to open file with error: {0!s}'.format(
                        self.NAME, exception))
                return

            path_spec_generator = self._path_spec_extractor.ExtractPathSpecs(
                [source_path_spec],
                find_specs=[spec],
                recurse_file_system=False,
                resolver_context=configuration.resolver_context)

            for path_spec in path_spec_generator:
                edb_path = path_spec.location
            try:
                esedb_parsers = esedb_parser.ESEDBParser.GetESEDBParserObjects(
                )
                table_names = frozenset(
                    esedb_parser.ESEDBParser.GetTableNames(esedb_file))

                for parser in esedb_parsers.values():

                    if not parser.required_tables.issubset(table_names):
                        continue

                    try:
                        parser.Process(database=esedb_file)

                        info = tuple([
                            par_id, configuration.case_id,
                            configuration.evidence_id
                        ])
                        # Internet Explorer
                        if 'Containers' in parser.required_tables:
                            # Internet Explorer History
                            if len(parser.GetHistoryRecords) > 0:
                                history_schema = ['par_id', 'case_id', 'evd_id'] + list(parser.GetHistorySchema) \
                                                 + ['source']
                                table_name = 'lv1_os_win_esedb_ie_history'
                                if not configuration.cursor.check_table_exist(
                                        table_name):
                                    ret = self.CreateTableWithSchema(
                                        configuration.cursor,
                                        table_name=table_name,
                                        schema=history_schema)
                                    if not ret:
                                        logger.error(
                                            'cannot create database table name: {0:s}'
                                            .format(table_name))
                                        return False

                                for record in parser.GetHistoryRecords:
                                    tmp_record = list(record)
                                    tmp_record[17] = tmp_record[17].replace(
                                        '"', '""')
                                    tmp_record[17] = tmp_record[17].replace(
                                        '\'', '\'\'')
                                    result = info + tuple(tmp_record) + tuple(
                                        [edb_path])
                                    query = self.InsertQueryBuilder(
                                        table_name=table_name,
                                        schema=history_schema,
                                        data=result)
                                    try:
                                        configuration.cursor.execute_query(
                                            query)
                                    except Exception as exception:
                                        logger.error(
                                            'database execution failed: {0!s}'.
                                            format(exception))

                            # Internet Explorer Content
                            if len(parser.GetContentRecords) > 0:
                                content_schema = ['par_id', 'case_id', 'evd_id'] + list(parser.GetContentSchema) \
                                                 + ['source']
                                table_name = 'lv1_os_win_esedb_ie_content'
                                if not configuration.cursor.check_table_exist(
                                        table_name):
                                    ret = self.CreateTableWithSchema(
                                        configuration.cursor,
                                        table_name=table_name,
                                        schema=content_schema)
                                    if not ret:
                                        logger.error(
                                            'cannot create database table name: {0!s}'
                                            .format(table_name))
                                        return False

                                for record in parser.GetContentRecords:
                                    tmp_record = list(record)
                                    if isinstance(tmp_record[17], bytes):
                                        tmp_record[17] = "Unknown"
                                    else:
                                        tmp_record[17] = tmp_record[
                                            17].replace('"', '""').replace(
                                                '\'', '\'\'')
                                    result = info + tuple(tmp_record) + tuple(
                                        [edb_path])
                                    query = self.InsertQueryBuilder(
                                        table_name=table_name,
                                        schema=content_schema,
                                        data=result)
                                    try:
                                        configuration.cursor.execute_query(
                                            query)
                                    except Exception as exception:
                                        logger.error(
                                            'database execution failed: {0!s}'.
                                            format(exception))

                            # Internet Explorer Cookies
                            if len(parser.GetCookiesRecords) > 0:
                                cookie_schema = ['par_id', 'case_id', 'evd_id'] + list(parser.GetCookiesSchema) \
                                                + ['source']
                                table_name = 'lv1_os_win_esedb_ie_cookies'
                                if not configuration.cursor.check_table_exist(
                                        table_name):
                                    ret = self.CreateTableWithSchema(
                                        configuration.cursor,
                                        table_name=table_name,
                                        schema=cookie_schema)
                                    if not ret:
                                        logger.error(
                                            'cannot create database table name: {0!s}'
                                            .format(table_name))
                                        return False

                                for record in parser.GetCookiesRecords:
                                    tmp_record = list(record)
                                    result = info + tuple(tmp_record) + tuple(
                                        [edb_path])
                                    query = self.InsertQueryBuilder(
                                        table_name=table_name,
                                        schema=cookie_schema,
                                        data=result)
                                    try:
                                        configuration.cursor.execute_query(
                                            query)
                                    except Exception as exception:
                                        logger.error(
                                            'database execution failed: {0!s}'.
                                            format(exception))

                            if len(parser.GetDownloadRecords) > 0:
                                download_schema = ['par_id', 'case_id', 'evd_id'] + list(parser.GetDownloadSchema) \
                                                  + ['source']
                                table_name = 'lv1_os_win_esedb_ie_download'
                                if not configuration.cursor.check_table_exist(
                                        table_name):
                                    ret = self.CreateTableWithSchema(
                                        configuration.cursor,
                                        table_name=table_name,
                                        schema=download_schema)
                                    if not ret:
                                        logger.error(
                                            'cannot create database table name: {0!s}'
                                            .format(table_name))
                                        return False

                                for record in parser.GetDownloadRecords:
                                    tmp_record = list(record)
                                    tmp_record[17] = tmp_record[17].replace(
                                        '"', '""')
                                    tmp_record[17] = tmp_record[17].replace(
                                        '\'', '\'\'')
                                    result = info + tuple(tmp_record) + tuple(
                                        [edb_path])
                                    query = self.InsertQueryBuilder(
                                        table_name=table_name,
                                        schema=download_schema,
                                        data=result)
                                    try:
                                        configuration.cursor.execute_query(
                                            query)
                                    except Exception as exception:
                                        logger.error(
                                            'database execution failed: {0!s}'.
                                            format(exception))

                    except errors.UnableToParseFile as exception:
                        logger.debug(
                            '[{0:s}] unable to parse file with error: {1!s}'.
                            format(self.NAME, exception))

            finally:
                esedb_file.close()
                file_object.close()
Beispiel #20
0
def main():

    read_config()
    signal.signal(signal.SIGINT, signal_handle)
    signal.signal(signal.SIGTERM, signal_handle)

    parser = argparse.ArgumentParser()
    parser.add_argument("name")
    parser.add_argument("--verbose", action="store_true")
    parser.add_argument("--mqtt-host")
    parser.add_argument("--mqtt-port")
    parser.add_argument("--mqtt-user")
    parser.add_argument("--mqtt-pass")
    parser.add_argument("--mqtt-topic-req")
    parser.add_argument("--mqtt-topic-res")

    args = parser.parse_args()
    name = args.name

    load_dotenv()

    log_level = os.getenv("LOG_LEVEL", "info").lower()
    setLevel(args.verbose or log_level == 'debug')

    mqtt_host = args.mqtt_host or os.getenv("MQTT_HOST")
    mqtt_port = args.mqtt_port or os.getenv("MQTT_PORT")
    mqtt_user = args.mqtt_user or os.getenv("MQTT_USER")
    mqtt_pass = args.mqtt_pass or os.getenv("MQTT_PASS")

    mqtt_topic_req = args.mqtt_topic_req or os.getenv('MQTT_TOPIC_REQ')
    mqtt_topic_res = args.mqtt_topic_res or os.getenv('MQTT_TOPIC_RES')

    topic_req = f"{mqtt_topic_req}/{name}"
    topic_res = f"{mqtt_topic_res}/{name}"

    logger.debug("Starting MQTT")

    nextConnectionAt = datetime.now()
    connected = False

    HOME = os.getenv("HOME")

    pattern = re.compile(r'^Modify: (.*)\n')

    while True:

        now = datetime.now()

        if not connected and now > nextConnectionAt:
            try:

                @subscribe(
                    topic_req, {
                        "host": mqtt_host,
                        "port": int(mqtt_port),
                        "user": mqtt_user,
                        "pass": mqtt_pass
                    })
                def message_handle(payload, emit):

                    try:
                        if 'id' not in payload:
                            raise Exception("request id is not present")

                        if 'command' not in payload:
                            raise Exception("command is not present")

                        command = payload['command']

                        if command == 'config':

                            logger.debug(
                                "Config Temperature %s", {
                                    'id': payload['id'],
                                    'status': 'Config max temperature',
                                    'data': {
                                        'temp': payload['temp']
                                    }
                                })

                            emit(
                                topic_res, {
                                    'id': payload['id'],
                                    'status': 'Config max temperature',
                                    'data': {
                                        'temp': payload['temp']
                                    }
                                })

                            try:
                                newTemp = str(payload['temp'])
                                data = {"temp_max": newTemp}

                                save_config(data)
                                #updateTemperature('.local/config/hackrf-sensors.json', 0, '{"temp_max":' + newTemp + "}")

                            except Exception as ex:
                                logger.warning("%s", payload)
                                logger.error(ex)

                                emit(topic_res, {
                                    'id': payload['id'],
                                    'error': ex
                                })

                        elif command == 'status':

                            #INTEGRAR PROCESO QUE OBTIENE T° DE LA MAQUINA
                            # Y PASARLO COMO VARIABLE A EMIT(TOPIC_RES)

                            logger.debug("Getting sensors data")

                            emit(
                                topic_res, {
                                    'id': payload['id'],
                                    'status': 'Data sensors found',
                                    'data': {
                                        'temp': payload['temp']
                                    }
                                })

                        else:
                            emit(topic_res, {'id': payload['id']})

                    except Exception as ex:
                        logger.warning("%s", payload)
                        logger.error(ex)

                        emit(topic_res, {'id': payload['id'], 'error': ex})

                logger.info("mqtt connected")
                connected = True

            except Exception as ex:
                logger.error(ex)

                connected = False
                nextConnectionAt = now + timedelta(seconds=10)

                logger.debug("Reconnecting mqtt at 10 seconds")

        time.sleep(0.1)