예제 #1
0
파일: sqlite.py 프로젝트: naaya17/carpe
    def Parse(self, filename, file_object, configuration):
        """Parses a SQLite database file entry.

        Args:
            filename (str): name of the file.
            file_object (dfvfs.FileIO): file-like object.
            configuration: configuration values.

        Raises:
            UnableToParseFile: when the file cannot be parsed.
        """
        temporary_directory_path = configuration.root_tmp_path + os.sep + 'and_smsmms'
        database = SQLiteDatabase(filename,
                                  temporary_directory=temporary_directory_path)

        try:
            database.Open(file_object)

        except (IOError, ValueError, sqlite3.DatabaseError) as exception:
            logger.warning(
                'unable to open SQLite database with error: {0!s}'.format(
                    exception))
            file_object.close()
            return

        database_wal, wal_file_entry = self._OpenDatabaseWithWAL()
예제 #2
0
파일: sqlite.py 프로젝트: naaya17/carpe
    def Close(self):
        """Closes the database connection and cleans up the temporary file."""
        self.schema = {}

        if self._is_open:
            self._database.close()
        self._database = None

        if os.path.exists(self._temp_db_file_path):
            try:
                os.remove(self._temp_db_file_path)
            except (OSError, IOError) as exception:
                logger.warning((
                    'Unable to remove temporary copy: {0:s} of SQLite database: '
                    '{1:s} with error: {2!s}').format(self._temp_db_file_path,
                                                      self._filename,
                                                      exception))

        self._temp_db_file_path = ''

        if os.path.exists(self._temp_wal_file_path):
            try:
                os.remove(self._temp_wal_file_path)
            except (OSError, IOError) as exception:
                logger.warning((
                    'Unable to remove temporary copy: {0:s} of SQLite database: '
                    '{1:s} with error: {2!s}').format(self._temp_wal_file_path,
                                                      self._filename,
                                                      exception))

        self._temp_wal_file_path = ''

        self._is_open = False
예제 #3
0
 def get_last_pasties(self):
     # reset the pasties list
     pasties = []
     # populate queue with data
     htmlPage, headers = download_url(self.archive_url)
     if not htmlPage:
         logger.warning("No HTML content for page {url}".format(url=self.archive_url))
         return False
     pasties_ids = re.findall(self.archive_regex, htmlPage)
     if pasties_ids:
         for pastie_id in pasties_ids:
             # check if the pastie was already downloaded
             # and remember that we've seen it
             if self.seen_pastie(pastie_id):
                 # do not append the seen things again in the queue
                 continue
             # pastie was not downloaded yet. Add it to the queue
             if self.pastie_classname:
                 class_name = globals()[self.pastie_classname]
                 pastie = class_name(self, pastie_id)
             else:
                 pastie = Pastie(self, pastie_id)
             pasties.append(pastie)
         return pasties
     logger.error("No last pasties matches for regular expression site:{site} regex:{regex}. Error in your regex? Dumping htmlPage \n {html}".format(site=self.name, regex=self.archive_regex, html=htmlPage.encode('utf8')))
     return False
예제 #4
0
    def LoadTargetFileToMemory(self,
                               source_path_spec,
                               configuration,
                               file_path=None,
                               file_spec=None,
                               data_stream_name=None):
        try:
            if not file_spec:
                find_spec = file_system_searcher.FindSpec(
                    case_sensitive=False,
                    location=file_path,
                    location_separator=source_path_spec.location)
            else:
                find_spec = file_spec
        except ValueError as exception:
            logger.error(
                'Unable to build find specification for path: "{0:s}" with '
                'error: {1!s}'.format(file_path, exception))

        path_spec_generator = self._path_spec_extractor.ExtractPathSpecs(
            [source_path_spec],
            find_specs=[find_spec],
            recurse_file_system=False,
            resolver_context=configuration.resolver_context)

        for path_spec in path_spec_generator:
            display_name = path_helper.PathHelper.GetDisplayNameForPathSpec(
                path_spec)

            try:
                file_entry = path_spec_resolver.Resolver.OpenFileEntry(
                    path_spec, resolver_context=configuration.resolver_context)

                if file_entry is None or not file_entry.IsFile():
                    logger.warning(
                        'Unable to open file entry with path spec: {0:s}'.
                        format(display_name))
                    return False

                if data_stream_name:
                    file_object = file_entry.GetFileObject(
                        data_stream_name=data_stream_name)

                    if not file_object:
                        return False

                    return file_object

                elif not data_stream_name:
                    file_object = file_entry.GetFileObject()

                    if not file_object:
                        return False

                    return file_object

            except KeyboardInterrupt:
                return False
예제 #5
0
    def BuildFindSpecs(self, paths, path_separator, environment_variables=None):
        """Builds find specifications from path filters.

        Args:
          path_filters (list[PathFilter]): path filters.
          environment_variables (Optional[list[EnvironmentVariableArtifact]]):
              environment variables.

        Returns:
          list[dfvfs.FindSpec]: find specifications for the file source type.
        """
        find_specs = []
        for path in paths:
            # Since paths are regular expression the path separator is escaped.
            if path_separator == '\\':
                path_separator = '\\\\'
            else:
                path_separator = path_separator

            expand_path = False
            path_segments = path.split(path_separator)
            for index, path_segment in enumerate(path_segments):
                if len(path_segment) <= 2:
                    continue

                if path_segment[0] == '{' and path_segment[-1] == '}':
                    # Rewrite legacy path expansion attributes, such as {systemroot}
                    # into %SystemRoot%.
                    path_segment = '%{0:s}%'.format(path_segment[1:-1])
                    path_segments[index] = path_segment

                if path_segment[0] == '%' and path_segment[-1] == '%':
                    expand_path = True

            if expand_path:
                path_segments = path_helper.PathHelper.ExpandWindowsPathSegments(
                    path_segments, environment_variables)

            if path_segments[0] != '':
                logger.warning(
                    'The path filter must be defined as an absolute path: ''{0:s}'.format(path))
                continue

            # Strip the root path segment.
            path_segments.pop(0)

            if not path_segments[-1]:
                logger.warning(
                    'Empty last path segment in path: {0:s}'.format(path))
                continue

            find_spec = file_system_searcher.FindSpec(
                case_sensitive=False, location_regex=path_segments)

            find_specs.append(find_spec)

        return find_specs
예제 #6
0
    def RecursiveDirOrFileSearch(self, path_spec, output_path):
        display_name = path_helper.PathHelper.GetDisplayNameForPathSpec(path_spec)

        file_entry = path_spec_resolver.Resolver.OpenFileEntry(path_spec)
        if file_entry is None:
            logger.warning(
                'Unable to open file entry with path spec: {0:s}'.format(
                    display_name))
            return

        if file_entry.IsDirectory():
            if not os.path.exists(output_path + os.sep + file_entry.name):
                os.mkdir(output_path + os.sep + file_entry.name)

            for sub_file_entry in file_entry.sub_file_entries:
                try:
                    if not sub_file_entry.IsAllocated():
                        continue

                except dfvfs_errors.BackEndError as exception:
                    logger.warning(
                        'Unable to process file: {0:s} with error: {1!s}'.format(
                            sub_file_entry.path_spec.comparable.replace(
                                '\n', ';'), exception))
                    continue

                if sub_file_entry.type_indicator == dfvfs_definitions.TYPE_INDICATOR_TSK:
                    if file_entry.IsRoot() and sub_file_entry.name == '$OrphanFiles':
                        continue

                self.RecursiveDirOrFileSearch(sub_file_entry.path_spec, output_path + os.sep + file_entry.name)

        if file_entry.IsFile():

            for data_stream in file_entry.data_streams:
                file_object = file_entry.GetFileObject(data_stream_name=data_stream.name)
                if not file_object:
                    return False

                try:

                    buffer_size = 65536
                    file = open(output_path + os.sep + file_entry.name, 'wb')
                    file_object.seek(0, os.SEEK_SET)
                    data = file_object.read(buffer_size)
                    while data:
                        file.write(data)
                        data = file_object.read(buffer_size)
                    file.close()

                except IOError as exception:
                    print(display_name)
                    logger.error(
                        'Failed to extract file "{0:s}" : {1!s}'.format(display_name, exception))
                finally:
                    file_object.close()
예제 #7
0
    def scrol_right_to_left(self, retry=2, indx=None, ret_all_data=False):
        """
        Scroll from right to left,
        @args:
            retry : Number of scroll retries after reaching to left
            indx  : If you pass a low value it will try to scroll more, which result
                    the big scrolling.
            ret_all_data : True if you want to get all data that found during the
                           scroll, else False
        """
        ele = "//android.widget.HorizontalScrollView[1]/" + \
              "android.widget.LinearLayout[1]//android.widget.TextView[1]"
        for echtry in range(4):
            try:
                ele_obj = self.get_element_by_xpath(ele)
                break
            except Exception as excp:
                warning(str(excp))
                if self.is_item_visible("setting-popup-cancel-btn"):
                    info("cancel button shown up, clicking on it")
                    self.click_on("setting-popup-cancel-btn")
                self.wait_for(1)
        else:
            raise Exception("not able to access the horizontal scroll element")

        list_text = list_text_old = [ech_ele.text for ech_ele in ele_obj]
        indx = len(ele_obj)/2 if indx == None else indx
        from_pt = ele_obj[0].location
        from_pt = (from_pt['x'], from_pt['y'])
        to_pt = ele_obj[indx].location
        to_pt = (to_pt['x'], to_pt['y'])

        tmp_retry = retry
        while tmp_retry > 0:
            self.scroll('H', frm_pt=from_pt, to_pt=to_pt)
            ele_obj = self.get_element_by_xpath(ele)
            list_text_new = [ech_ele.text for ech_ele in ele_obj]
            if list_text_new[:2] == list_text_old[:2]:
                tmp_retry -= 1
            else:
                if ret_all_data:
                    output = self.contains(list_text_old[:2], list_text_new)
                    if output == False:
                        info(str(list_text_old[:2]) + " not in " + \
                             str(list_text_new))
                        list_text[0:0] = list_text_new
                    else:
                        list_text[0:0] = list_text_new[:output[0]]

                list_text_old = list_text_new
                tmp_retry = retry

        if ret_all_data:
            return list_text
예제 #8
0
 def clear_value(ele_obj):
     """
     Clear out the element text
     @arg :
         ele_obj : element object that text need to clear out
     """
     for i in range(7):
         try:
             txt = str(ele_obj.text).strip()
             if txt:
                 ele_obj.click()
                 ele_obj.clear()
             else:
                 info("object got clear out")
                 break
         except Exception as ex :
             warning("Got exception while clear out value :: " + str(ex))
예제 #9
0
  def ExpandGlobStars(cls, path, path_separator):
    """Expands globstars "**" in a path.

    A globstar "**" will recursively match all files and zero or more
    directories and subdirectories.

    By default the maximum recursion depth is 10 subdirectories, a numeric
    values after the globstar, such as "**5", can be used to define the maximum
    recursion depth.

    Args:
      path (str): path to be expanded.
      path_separator (str): path segment separator.

    Returns:
      list[str]: String path expanded for each glob.
    """
    expanded_paths = []

    path_segments = path.split(path_separator)
    last_segment_index = len(path_segments) - 1
    for segment_index, path_segment in enumerate(path_segments):
      recursion_depth = None
      if path_segment.startswith('**'):
        if len(path_segment) == 2:
          recursion_depth = 10
        else:
          try:
            recursion_depth = int(path_segment[2:], 10)
          except (TypeError, ValueError):
            logger.warning((
                'Globstar with suffix "{0:s}" in path "{1:s}" not '
                'supported.').format(path_segment, path))

      elif '**' in path_segment:
        logger.warning((
            'Globstar with prefix "{0:s}" in path "{1:s}" not '
            'supported.').format(path_segment, path))

      if recursion_depth is not None:
        if recursion_depth <= 1 or recursion_depth > cls._RECURSIVE_GLOB_LIMIT:
          logger.warning((
              'Globstar "{0:s}" in path "{1:s}" exceed recursion maximum '
              'recursion depth, limiting to: {2:d}.').format(
                  path_segment, path, cls._RECURSIVE_GLOB_LIMIT))
          recursion_depth = cls._RECURSIVE_GLOB_LIMIT

        next_segment_index = segment_index + 1
        for expanded_path_segment in [
            ['*'] * depth for depth in range(1, recursion_depth + 1)]:
          expanded_path_segments = list(path_segments[:segment_index])
          expanded_path_segments.extend(expanded_path_segment)
          if next_segment_index <= last_segment_index:
            expanded_path_segments.extend(path_segments[next_segment_index:])

          expanded_path = path_separator.join(expanded_path_segments)
          expanded_paths.append(expanded_path)

    return expanded_paths or [path]
예제 #10
0
    def ExtractTargetFileToPath(self,
                                source_path_spec,
                                configuration,
                                file_path=None,
                                file_spec=None,
                                output_path=None,
                                data_stream_name=None):
        # TODO: find_spec 있을 경우 처리 해야함. Load참조

        try:
            if not file_spec:
                find_spec = file_system_searcher.FindSpec(
                    case_sensitive=False,
                    location=file_path,
                    location_separator=source_path_spec.location)
            else:
                find_spec = file_spec
        except ValueError as exception:
            logger.error(
                'Unable to build find specification for path: "{0:s}" with '
                'error: {1!s}'.format(file_path, exception))

        path_spec_generator = self._path_spec_extractor.ExtractPathSpecs(
            [source_path_spec],
            find_specs=[find_spec],
            recurse_file_system=False,
            resolver_context=configuration.resolver_context)

        for path_spec in path_spec_generator:
            display_name = path_helper.PathHelper.GetDisplayNameForPathSpec(
                path_spec)
            try:
                file_entry = path_spec_resolver.Resolver.OpenFileEntry(
                    path_spec, resolver_context=configuration.resolver_context)

                if file_entry is None or not file_entry.IsFile():
                    logger.warning(
                        'Unable to open file entry with path spec: {0:s}'.
                        format(display_name))
                    return False

                if data_stream_name:
                    file_object = file_entry.GetFileObject(
                        data_stream_name=data_stream_name)

                    if not file_object:
                        return False

                    try:
                        buffer_size = 65536
                        file = open(
                            output_path + os.sep + file_entry.name + '_' +
                            data_stream_name, 'wb')
                        file_object.seek(0, os.SEEK_SET)
                        data = file_object.read(buffer_size)
                        while data:
                            file.write(data)
                            data = file_object.read(buffer_size)
                        file.close()

                    except IOError as exception:
                        # TODO: replace location by display name.
                        location = getattr(file_entry.path_spec, 'location',
                                           '')
                        logger.error(
                            'Failed to extract file "{0:s}" : {1!s}'.format(
                                data_stream_name, exception))
                        return False

                    finally:
                        file_object.close()

                elif not data_stream_name:
                    file_object = file_entry.GetFileObject()

                    if not file_object:
                        return False

                    try:
                        buffer_size = 65536
                        file = open(output_path + os.sep + file_entry.name,
                                    'wb')
                        file_object.seek(0, os.SEEK_SET)
                        data = file_object.read(buffer_size)
                        while data:
                            file.write(data)
                            data = file_object.read(buffer_size)
                        file.close()
                    except IOError as exception:
                        logger.error(
                            'Failed to extract file "{0:s}" : {1!s}'.format(
                                display_name, exception))
                    finally:
                        file_object.close()

            except KeyboardInterrupt:
                return False
예제 #11
0
파일: index.py 프로젝트: Matias-alt/hack
                def message_handle(payload, emit):

                    try:
                        if 'id' not in payload:
                            raise Exception("request id is not present")

                        if 'command' not in payload:
                            raise Exception("command is not present")

                        command = payload['command']

                        if command == 'status':
                            settings = read_config()
                            logger.info("settings: [%s]", settings)

                            found = glob.glob(
                                f"{HOME}/.pm2/pids/hackrf-control-*")
                            status = 'stopped'
                            uptime = None

                            if found:
                                status = 'online'

                                with open(found[0]) as fd:
                                    pid = fd.read()

                                out = subprocess.check_output(
                                    f"stat /proc/{pid} | grep Modify",
                                    shell=True,
                                    encoding="utf-8")
                                res = pattern.findall(out)

                                uptime = res[0] if res else None

                            emit(
                                topic_res, {
                                    'id': payload['id'],
                                    'settings': settings,
                                    'process': {
                                        'status': status,
                                        'uptime': uptime
                                    }
                                })

                        elif command == 'logs':

                            lines = payload.get('lines', 10)
                            out = subprocess.check_output(
                                f"tail {HOME}/.pm2/logs/hackrf-control-error.log -n {lines}",
                                shell=True,
                                encoding="utf-8")

                            data = []

                            for x in out.split('\n'):

                                created_at = x[0:23]

                                pos = x.find(" ", 24)
                                level = x[23:pos]

                                pos = x.find(" ", pos + 1)
                                content = x[pos:]

                                data.append({
                                    'created_at': created_at,
                                    'level': level,
                                    'content': content
                                })

                            emit(topic_res, {
                                'id': payload['id'],
                                'data': data
                            })

                        elif command == 'config':

                            if 'settings' not in payload:
                                raise Exception("settings is not present")

                            settings = payload['settings']
                            settings['_waveform'] = 'waveform' in settings

                            logger.info("settings: [%s]", settings)
                            save_config(settings)

                            emit(topic_res, {'id': payload['id']})

                        else:
                            emit(topic_res, {'id': payload['id']})

                    except Exception as ex:
                        logger.warning("%s", payload)
                        logger.error(ex)

                        emit(topic_res, {'id': payload['id'], 'error': ex})
예제 #12
0
def download_url(url, data=None, cookie=None, loop_client=0, loop_server=0):
    # Client errors (40x): if more than 5 recursions, give up on URL (used for the 404 case)
    if loop_client >= retries_client:
        return None, None
    # Server errors (50x): if more than 100 recursions, give up on URL
    if loop_server >= retries_server:
        return None, None
    try:
        opener = None
        # Random Proxy if set in config
        random_proxy = get_random_proxy()
        if random_proxy:
            proxyh = urllib2.ProxyHandler({'http': random_proxy})
            opener = urllib2.build_opener(proxyh, NoRedirectHandler())
        # We need to create an opener if it didn't exist yet
        if not opener:
            opener = urllib2.build_opener(NoRedirectHandler())
        # Random User-Agent if set in config
        user_agent = get_random_user_agent()
        opener.addheaders = [('Accept-Charset', 'utf-8')]
        if user_agent:
            opener.addheaders.append(('User-Agent', user_agent))
        if cookie:
            opener.addheaders.append(('Cookie', cookie))
        logger.debug(
            'Downloading url: {url} with proxy: {proxy} and user-agent: {ua}'.format(
                url=url, proxy=random_proxy, ua=user_agent))
        if data:
            response = opener.open(url, data)
        else:
            response = opener.open(url)
        htmlPage = unicode(response.read(), errors='replace')
        return htmlPage, response.headers
    except urllib2.HTTPError as e:
        failed_proxy(random_proxy)
        logger.warning("!!Proxy error on {0}.".format(url))
        if 404 == e.code:
            htmlPage = e.read()
            logger.warning("404 from proxy received for {url}. Waiting 1 minute".format(url=url))
            time.sleep(60)
            loop_client += 1
            logger.warning("Retry {nb}/{total} for {url}".format(nb=loop_client, total=retries_client, url=url))
            return download_url(url, loop_client=loop_client)
        if 500 == e.code:
            htmlPage = e.read()
            logger.warning("500 from proxy received for {url}. Waiting 1 minute".format(url=url))
            time.sleep(60)
            loop_server += 1
            logger.warning("Retry {nb}/{total} for {url}".format(nb=loop_server, total=retries_server, url=url))
            return download_url(url, loop_server=loop_server)
        if 504 == e.code:
            htmlPage = e.read()
            logger.warning("504 from proxy received for {url}. Waiting 1 minute".format(url=url))
            time.sleep(60)
            loop_server += 1
            logger.warning("Retry {nb}/{total} for {url}".format(nb=loop_server, total=retries_server, url=url))
            return download_url(url, loop_server=loop_server)
        if 502 == e.code:
            htmlPage = e.read()
            logger.warning("502 from proxy received for {url}. Waiting 1 minute".format(url=url))
            time.sleep(60)
            loop_server += 1
            logger.warning("Retry {nb}/{total} for {url}".format(nb=loop_server, total=retries_server, url=url))
            return download_url(url, loop_server=loop_server)
        if 403 == e.code:
            htmlPage = e.read()
            if 'Please slow down' in htmlPage or 'has temporarily blocked your computer' in htmlPage or 'blocked' in htmlPage:
                logger.warning("Slow down message received for {url}. Waiting 1 minute".format(url=url))
                time.sleep(60)
                return download_url(url)
        logger.warning("ERROR: HTTP Error ##### {e} ######################## {url}".format(e=e, url=url))
        return None, None
    except urllib2.URLError as e:
        logger.debug("ERROR: URL Error ##### {e} ######################## ".format(e=e, url=url))
        if random_proxy:  # remove proxy from the list if needed
            failed_proxy(random_proxy)
            logger.warning("Failed to download the page because of proxy error {0} trying again.".format(url))
            loop_server += 1
            logger.warning("Retry {nb}/{total} for {url}".format(nb=loop_server, total=retries_server, url=url))
            return download_url(url, loop_server=loop_server)
        if 'timed out' in e.reason:
            logger.warning("Timed out or slow down for {url}. Waiting 1 minute".format(url=url))
            loop_server += 1
            logger.warning("Retry {nb}/{total} for {url}".format(nb=loop_server, total=retries_server, url=url))
            time.sleep(60)
            return download_url(url, loop_server=loop_server)
        return None, None
    except socket.timeout:
        logger.debug("ERROR: timeout ############################# " + url)
        if random_proxy:  # remove proxy from the list if needed
            failed_proxy(random_proxy)
            logger.warning("Failed to download the page because of socket error {0} trying again.".format(url))
            loop_server += 1
            logger.warning("Retry {nb}/{total} for {url}".format(nb=loop_server, total=retries_server, url=url))
            return download_url(url, loop_server=loop_server)
        return None, None
    except Exception as e:
        failed_proxy(random_proxy)
        logger.warning("Failed to download the page because of other HTTPlib error proxy error {0} trying again.".format(url))
        loop_server += 1
        logger.warning("Retry {nb}/{total} for {url}".format(nb=loop_server, total=retries_server, url=url))
        return download_url(url, loop_server=loop_server)
예제 #13
0
                def message_handle(payload, emit):

                    try:
                        if 'id' not in payload:
                            raise Exception("request id is not present")

                        if 'command' not in payload:
                            raise Exception("command is not present")

                        command = payload['command']

                        if command == 'config':

                            logger.debug(
                                "Config Temperature %s", {
                                    'id': payload['id'],
                                    'status': 'Config max temperature',
                                    'data': {
                                        'temp': payload['temp']
                                    }
                                })

                            emit(
                                topic_res, {
                                    'id': payload['id'],
                                    'status': 'Config max temperature',
                                    'data': {
                                        'temp': payload['temp']
                                    }
                                })

                            try:
                                newTemp = str(payload['temp'])
                                data = {"temp_max": newTemp}

                                save_config(data)
                                #updateTemperature('.local/config/hackrf-sensors.json', 0, '{"temp_max":' + newTemp + "}")

                            except Exception as ex:
                                logger.warning("%s", payload)
                                logger.error(ex)

                                emit(topic_res, {
                                    'id': payload['id'],
                                    'error': ex
                                })

                        elif command == 'status':

                            #INTEGRAR PROCESO QUE OBTIENE T° DE LA MAQUINA
                            # Y PASARLO COMO VARIABLE A EMIT(TOPIC_RES)

                            logger.debug("Getting sensors data")

                            emit(
                                topic_res, {
                                    'id': payload['id'],
                                    'status': 'Data sensors found',
                                    'data': {
                                        'temp': payload['temp']
                                    }
                                })

                        else:
                            emit(topic_res, {'id': payload['id']})

                    except Exception as ex:
                        logger.warning("%s", payload)
                        logger.error(ex)

                        emit(topic_res, {'id': payload['id'], 'error': ex})