Exemplo n.º 1
0
    def wrapper(self, *args, **kwargs):
        # We assume that the first argument to instancemethod is a URL-like object;
        # that is, either a string or a urllib2.Request.
        url_object = args[0]
        data = kwargs.get("data")

        # If this is a urllib2.Request...
        if isinstance(url_object, urllib2.Request):
            # If this is a GET HTTP method...
            if url_object.get_method() == "GET":
                # ...then you should check with TUF.
                url = url_object.get_full_url()
            else:
                # ...otherwise, revert to default behaviour.
                Logger.warn(
                    NON_GET_HTTP_METHOD_MESSAGE.format(
                        method=url_object.get_method(),
                        url=url_object.get_full_url()))
                return instancemethod(self, *args, **kwargs)
        # ...otherwise, we assume this is a string.
        else:
            url = url_object

        updater = __updater_controller.get(url)

        # If TUF has not been configured for this URL...
        if updater is None:
            # ...then revert to default behaviour.
            return instancemethod(self, *args, **kwargs)
        else:
            # ...otherwise, use TUF to get this document.
            return updater.open(url, data=data)
Exemplo n.º 2
0
def __urllib2_urlopen(url, data=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT):
    """Create a file-like object for the specified URL to read from."""

    # We assume that the first argument to instancemethod is a URL-like object;
    # that is, either a string or a urllib2.Request.

    updater = None

    # If this is a urllib2.Request...
    if isinstance(url, urllib2.Request):
        # If this is a GET HTTP method...
        if url.get_method() == "GET":
            # ...then you should check with TUF.
            updater = __updater_controller.get(url.get_full_url())
        else:
            # ...otherwise, revert to default behaviour.
            Logger.warn(
                NON_GET_HTTP_METHOD_MESSAGE.format(method=url.get_method(),
                                                   url=url.get_full_url()))
            return urllib2.urlopen(url, data=data, timeout=timeout)
    else:
        # ...otherwise, we assume this is a string.
        updater = __updater_controller.get(url)

    if updater is None:
        return urllib2.urlopen(url, data=data, timeout=timeout)
    else:
        response = updater.open(url, data=data)
        # See urllib2.AbstractHTTPHandler.do_open
        # TODO: let Updater handle this
        response.msg = ""
        return response
Exemplo n.º 3
0
  def wrapper(self, *args, **kwargs):
    # We assume that the first argument to instancemethod is a URL-like object;
    # that is, either a string or a urllib2.Request.
    url_object = args[0]
    data = kwargs.get("data")

    # If this is a urllib2.Request...
    if isinstance(url_object, urllib2.Request):
      # If this is a GET HTTP method...
      if url_object.get_method() == "GET":
        # ...then you should check with TUF.
        url = url_object.get_full_url()
      else:
        # ...otherwise, revert to default behaviour.
        Logger.warn(NON_GET_HTTP_METHOD_MESSAGE.format(method=url_object.get_method(),
                                                      url=url_object.get_full_url()))
        return instancemethod(self, *args, **kwargs)
    # ...otherwise, we assume this is a string.
    else:
      url = url_object

    updater = __updater_controller.get(url)

    # If TUF has not been configured for this URL...
    if updater is None:
      # ...then revert to default behaviour.
      return instancemethod(self, *args, **kwargs)
    else:
      # ...otherwise, use TUF to get this document.
      return updater.open(url, data=data)
Exemplo n.º 4
0
def __urllib2_urlopen(url, data=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT):
  """Create a file-like object for the specified URL to read from."""

  # We assume that the first argument to instancemethod is a URL-like object;
  # that is, either a string or a urllib2.Request.

  updater = None

  # If this is a urllib2.Request...
  if isinstance(url, urllib2.Request):
    # If this is a GET HTTP method...
    if url.get_method() == "GET":
      # ...then you should check with TUF.
      updater = __updater_controller.get(url.get_full_url())
    else:
      # ...otherwise, revert to default behaviour.
      Logger.warn(NON_GET_HTTP_METHOD_MESSAGE.format(method=url.get_method(),
                                                    url=url.get_full_url()))
      return urllib2.urlopen(url, data=data, timeout=timeout)
  else:
    # ...otherwise, we assume this is a string.
    updater = __updater_controller.get(url)

  if updater is None:
    return urllib2.urlopen(url, data=data, timeout=timeout)
  else:
    response = updater.open(url, data=data)
    # See urllib2.AbstractHTTPHandler.do_open
    # TODO: let Updater handle this
    response.msg = ""
    return response
Exemplo n.º 5
0
def main(svc_input, configs):
    logger = Logger("查询日志", verbose=True)
    log_file_name = "log%s_%s.txt" % (svc_input.replace("?", "#"), DateTimeUtil.get_current_datetime(is_date=True))
    log_file_path = WindowsUtil.convert_win_path(os.path.join(temp_dir, log_file_name))
    logger.info("[开始查询] %s" % svc_input)
    try:
        # 找到本地匹配的保修历史记录
        history_zip = ZipFileSVC(zip_file_path=history_zipfile, mode='a')
        start_time = DateTimeUtil.get_current_datetime()
        # 创建出所有可能查询码
        svc_generator = SVCGenerator(svc_input, logger)
        logger.info("创建出所有可能查询码:%s" % len(svc_generator.target_svc_set))
        # 根据本地匹配的非法查询码历史,筛选出目标查询码,以及非法查询码
        existed_svc = history_zip.find_file_regex(svc_generator.regex)
        svc_generator.generate_target_svc_batch(existed_svc, invalid_history_file_path)
        # 调用戴尔查询API,并将API数据转化为实体类数据
        output_dell_asset_list = list([])
        if svc_generator.target_svc_set:
            batch = Batch(logger, configs)
            api_dell_asset_list = batch.begin(svc_generator.target_svc_set)
            output_dell_asset_list = api_dell_asset_list
            logger.info("从API中总共得到%s个结果" % (len(api_dell_asset_list)))
            logger.info("将实体类序列化到本地临时TXT文件")
            temp_text_files_path = DellAsset.serialize_txt_batch(api_dell_asset_list, temp_dir)
            logger.info("将序列化临时文件存到本地zip历史记录,总数:%s" % len(temp_text_files_path))
            history_zip.add_new_file_batch(temp_text_files_path)
            logger.info("删除临时 %s 个TXT文件" % len(temp_text_files_path))
            for file_path in temp_text_files_path:
                FileUtil.delete_file(file_path)
            logger.info("将API得到的实体类和历史记录实体类合并")
        else:
            logger.warn("目标查询码为空,仅从从历史记录中导出结果")
        for svc in svc_generator.existed_svc_set:
            dell_asset_content = history_zip.get_member_content(file_name="%s.txt" % svc)
            output_dell_asset_list.append(DellAsset.deserialize_txt(dell_asset_content))
        logger.info("添加历史记录,总共得到%s个结果" % (len(output_dell_asset_list)))
        excel_output_path = WindowsUtil.convert_win_path(os.path.join(excel_dir, "%s.xlsx" % svc_generator.get_file_name()))
        DellAsset.save_as_excel_batch(output_dell_asset_list, excel_output_path)
        if FileUtil.is_path_existed(excel_output_path):
            logger.info("存为Excel文档成功")
            end_time = DateTimeUtil.get_current_datetime()
            logger.info("总用时 %s " % DateTimeUtil.datetime_diff(start_time, end_time))
            logger.info("[查询结束] 总共%s个结果 保存在:%s" % (len(output_dell_asset_list), excel_output_path))
        else:
            logger.error("[保存结果失败] %s" % excel_output_path)
    except Exception as e:
        # 若程序出现错误失败,发送邮件
        logger.error("[查询失败] 已发送报告 请等待解决")
        logger.error("%s\n%s" % (e, traceback.format_exc()))
        logger.save(log_file_path)
        email_api_key = configs["email_api_key"]
        email = Email(email_api_key, subject="[查询失败] %s %s" % (DateTimeUtil.get_current_datetime(is_date=True), svc_input))
        email.add_attachment(log_file_path)
        email.send(cc_mode=logger.has_error)
Exemplo n.º 6
0
    def get(self, url):
        """Get an Updater, if any, for this URL.

    Assumptions:
      - @url is a string."""

        GENERIC_WARNING_MESSAGE = "No updater or interposition for url={url}"
        DIFFERENT_NETLOC_MESSAGE = "We have an updater for netloc={netloc1} but not for netlocs={netloc2}"
        HOSTNAME_FOUND_MESSAGE = "Found updater for hostname={hostname}"
        HOSTNAME_NOT_FOUND_MESSAGE = "No updater for hostname={hostname}"

        updater = None

        try:
            parsed_url = urlparse.urlparse(url)
            hostname = parsed_url.hostname
            port = parsed_url.port or 80
            netloc = parsed_url.netloc
            network_location = "{hostname}:{port}".format(hostname=hostname,
                                                          port=port)

            # Sometimes parsed_url.netloc does not have a port (e.g. 80),
            # so we do a double check.
            network_locations = set((netloc, network_location))

            updater = self.__updaters.get(hostname)

            if updater is None:
                Logger.warn(
                    HOSTNAME_NOT_FOUND_MESSAGE.format(hostname=hostname))

            else:

                # Ensure that the updater is meant for this (hostname, port).
                if updater.configuration.network_location in network_locations:
                    Logger.info(
                        HOSTNAME_FOUND_MESSAGE.format(hostname=hostname))
                    # Raises an exception in case we do not recognize how to
                    # transform this URL for TUF. In that case, there will be no
                    # updater for this URL.
                    target_filepath = updater.get_target_filepath(url)

                else:
                    # Same hostname, but different (not user-specified) port.
                    Logger.warn(
                        DIFFERENT_NETLOC_MESSAGE.format(
                            netloc1=updater.configuration.network_location,
                            netloc2=network_locations))
                    updater = None

        except:
            Logger.exception(GENERIC_WARNING_MESSAGE.format(url=url))
            updater = None

        finally:
            if updater is None:
                Logger.warn(GENERIC_WARNING_MESSAGE.format(url=url))

            return updater
Exemplo n.º 7
0
  def get(self, url):
    """Get an Updater, if any, for this URL.

    Assumptions:
      - @url is a string."""

    GENERIC_WARNING_MESSAGE = "No updater or interposition for url={url}"
    DIFFERENT_NETLOC_MESSAGE = "We have an updater for netloc={netloc1} but not for netlocs={netloc2}"
    HOSTNAME_FOUND_MESSAGE = "Found updater for hostname={hostname}"
    HOSTNAME_NOT_FOUND_MESSAGE = "No updater for hostname={hostname}"

    updater = None

    try:
      parsed_url = urlparse.urlparse(url)
      hostname = parsed_url.hostname
      port = parsed_url.port or 80
      netloc = parsed_url.netloc
      network_location = "{hostname}:{port}".format(hostname=hostname, port=port)

      # Sometimes parsed_url.netloc does not have a port (e.g. 80),
      # so we do a double check.
      network_locations = set((netloc, network_location))

      updater = self.__updaters.get(hostname)

      if updater is None:
        Logger.warn(HOSTNAME_NOT_FOUND_MESSAGE.format(hostname=hostname))

      else:

        # Ensure that the updater is meant for this (hostname, port).
        if updater.configuration.network_location in network_locations:
          Logger.info(HOSTNAME_FOUND_MESSAGE.format(hostname=hostname))
          # Raises an exception in case we do not recognize how to
          # transform this URL for TUF. In that case, there will be no
          # updater for this URL.
          target_filepath = updater.get_target_filepath(url)

        else:
          # Same hostname, but different (not user-specified) port.
          Logger.warn(DIFFERENT_NETLOC_MESSAGE.format(
            netloc1=updater.configuration.network_location, netloc2=network_locations))
          updater = None

    except:
      Logger.exception(GENERIC_WARNING_MESSAGE.format(url=url))
      updater = None

    finally:
      if updater is None:
        Logger.warn(GENERIC_WARNING_MESSAGE.format(url=url))

      return updater
Exemplo n.º 8
0
def main(svc_input, configs):
    logger = Logger("查询日志", verbose=True)
    log_file_name = "log%s_%s.txt" % (svc_input.replace(
        "?", "#"), DateTimeUtil.get_current_datetime(is_date=True))
    log_file_path = WindowsUtil.convert_win_path(
        os.path.join(temp_dir, log_file_name))
    logger.info("[开始查询] %s" % svc_input)
    try:
        # 找到本地匹配的保修历史记录
        history_zip = ZipFileSVC(zip_file_path=history_zipfile, mode='a')
        start_time = DateTimeUtil.get_current_datetime()
        # 创建出所有可能查询码
        svc_generator = SVCGenerator(svc_input, logger)
        logger.info("创建出所有可能查询码:%s" % len(svc_generator.target_svc_set))
        # 根据本地匹配的非法查询码历史,筛选出目标查询码,以及非法查询码
        existed_svc = history_zip.find_file_regex(svc_generator.regex)
        svc_generator.generate_target_svc_batch(existed_svc,
                                                invalid_history_file_path)
        # 调用戴尔查询API,并将API数据转化为实体类数据
        output_dell_asset_list = list([])
        if svc_generator.target_svc_set:
            batch = Batch(logger, configs)
            api_dell_asset_list = batch.begin(svc_generator.target_svc_set)
            output_dell_asset_list = api_dell_asset_list
            logger.info("从API中总共得到%s个结果" % (len(api_dell_asset_list)))
            logger.info("将实体类序列化到本地临时TXT文件")
            temp_text_files_path = DellAsset.serialize_txt_batch(
                api_dell_asset_list, temp_dir)
            logger.info("将序列化临时文件存到本地zip历史记录,总数:%s" %
                        len(temp_text_files_path))
            history_zip.add_new_file_batch(temp_text_files_path)
            logger.info("删除临时 %s 个TXT文件" % len(temp_text_files_path))
            for file_path in temp_text_files_path:
                FileUtil.delete_file(file_path)
            logger.info("将API得到的实体类和历史记录实体类合并")
        else:
            logger.warn("目标查询码为空,仅从从历史记录中导出结果")
        for svc in svc_generator.existed_svc_set:
            dell_asset_content = history_zip.get_member_content(
                file_name="%s.txt" % svc)
            output_dell_asset_list.append(
                DellAsset.deserialize_txt(dell_asset_content))
        logger.info("添加历史记录,总共得到%s个结果" % (len(output_dell_asset_list)))
        excel_output_path = WindowsUtil.convert_win_path(
            os.path.join(excel_dir, "%s.xlsx" % svc_generator.get_file_name()))
        DellAsset.save_as_excel_batch(output_dell_asset_list,
                                      excel_output_path)
        if FileUtil.is_path_existed(excel_output_path):
            logger.info("存为Excel文档成功")
            end_time = DateTimeUtil.get_current_datetime()
            logger.info("总用时 %s " %
                        DateTimeUtil.datetime_diff(start_time, end_time))
            logger.info("[查询结束] 总共%s个结果 保存在:%s" %
                        (len(output_dell_asset_list), excel_output_path))
        else:
            logger.error("[保存结果失败] %s" % excel_output_path)
    except Exception as e:
        # 若程序出现错误失败,发送邮件
        logger.error("[查询失败] 已发送报告 请等待解决")
        logger.error("%s\n%s" % (e, traceback.format_exc()))
        logger.save(log_file_path)
        email_api_key = configs["email_api_key"]
        email = Email(
            email_api_key,
            subject="[查询失败] %s %s" %
            (DateTimeUtil.get_current_datetime(is_date=True), svc_input))
        email.add_attachment(log_file_path)
        email.send(cc_mode=logger.has_error)