Пример #1
0
    def test_punt_socket_traffic_multi_ports_single_socket(self):
        """ Punt socket traffic multi ports and single socket"""

        for p in self.ports:
            self.portsCheck[p] = 0

        #
        # create stream with random pakets count per given ports
        #
        pkts = list()
        for _ in range(0, self.nr_packets):
            # choose port from port list
            p = random.choice(self.ports)
            pkts.append((
                Ether(src=self.pg0.remote_mac,
                      dst=self.pg0.local_mac) /
                IPv6(src=self.pg0.remote_ip6, dst=self.pg0.local_ip6) /
                inet6.UDP(sport=9876, dport=p) /
                Raw('\xa5' * 100)))
            self.portsCheck[p] += 1

        #
        # no punt socket
        #
        punts = self.vapi.punt_socket_dump(is_ip6=1)
        self.assertEqual(len(punts), 0)

        #
        # configure a punt socket
        #
        self.socket_client_create(b"%s/socket_multi" %
                                  six.ensure_binary(self.tempdir))
        for p in self.ports:
            self.vapi.punt_socket_register(p,
                                           b"%s/socket_multi" %
                                           six.ensure_binary(self.tempdir),
                                           is_ip4=0)
        punts = self.vapi.punt_socket_dump(is_ip6=1)
        self.assertEqual(len(punts), len(self.ports))

        for p in self.ports:
            self.logger.debug("Send %s packets to port %d",
                              str(self.portsCheck[p]), p)
        #
        # expect punt socket and no packets on pg0
        #
        self.vapi.cli("clear errors")
        self.vapi.cli("clear trace")
        self.pg0.add_stream(pkts)
        self.pg_enable_capture(self.pg_interfaces)
        self.pg_start()
        self.pg0.get_capture(0)
        self.logger.info(self.vapi.cli("show trace"))
        self.socket_client_close()

        for p in self.ports:
            self.assertEqual(self.portsCheck[p], 0)
            self.vapi.punt_socket_deregister(p, is_ip4=0)
        punts = self.vapi.punt_socket_dump(is_ip6=1)
        self.assertEqual(len(punts), 0)
Пример #2
0
 def test_ensure_binary_raise(self):
     converted_unicode = six.ensure_binary(self.UNICODE_EMOJI, encoding='utf-8', errors='strict')
     converted_binary = six.ensure_binary(self.BINARY_EMOJI, encoding="utf-8", errors='strict')
     if six.PY2:
         # PY2: unicode -> str
         assert converted_unicode == self.BINARY_EMOJI and isinstance(converted_unicode, str)
         # PY2: str -> str
         assert converted_binary == self.BINARY_EMOJI and isinstance(converted_binary, str)
     else:
         # PY3: str -> bytes
         assert converted_unicode == self.BINARY_EMOJI and isinstance(converted_unicode, bytes)
         # PY3: bytes -> bytes
         assert converted_binary == self.BINARY_EMOJI and isinstance(converted_binary, bytes)
Пример #3
0
 def add_vpp_config(self):
     rv = self._test.vapi.memif_socket_filename_add_del(
         1, self.socket_id, self.socket_filename)
     if self.add_default_folder:
         self.socket_filename = b"%s/%s" % (
             six.ensure_binary(self._test.tempdir, encoding='utf-8'),
             self.socket_filename)
     return rv
Пример #4
0
    def test_punt_socket_dump(self):
        """ Punt socket registration """

        punts = self.vapi.punt_socket_dump(is_ip6=1)
        self.assertEqual(len(punts), 0)

        #
        # configure a punt socket
        #
        self.vapi.punt_socket_register(1111, b"%s/socket_1111" %
                                       six.ensure_binary(self.tempdir),
                                       is_ip4=0)
        self.vapi.punt_socket_register(2222, b"%s/socket_2222" %
                                       six.ensure_binary(self.tempdir),
                                       is_ip4=0)
        punts = self.vapi.punt_socket_dump(is_ip6=1)
        self.assertEqual(len(punts), 2)
        self.assertEqual(punts[0].punt.l4_port, 1111)
        self.assertEqual(punts[1].punt.l4_port, 2222)

        #
        # deregister a punt socket
        #
        self.vapi.punt_socket_deregister(1111, is_ip4=0)
        punts = self.vapi.punt_socket_dump(is_ip6=1)
        self.assertEqual(len(punts), 1)

        #
        # configure a punt socket again
        #
        self.vapi.punt_socket_register(1111, b"%s/socket_1111" %
                                       six.ensure_binary(self.tempdir),
                                       is_ip4=0)
        punts = self.vapi.punt_socket_dump(is_ip6=1)
        self.assertEqual(len(punts), 2)

        #
        # deregister all punt socket
        #
        self.vapi.punt_socket_deregister(1111, is_ip4=0)
        self.vapi.punt_socket_deregister(2222, is_ip4=0)
        self.vapi.punt_socket_deregister(3333, is_ip4=0)
        punts = self.vapi.punt_socket_dump(is_ip6=1)
        self.assertEqual(len(punts), 0)
 def _create_netlogon_query(self, domain, msgid):
     """Create a netlogon query for `domain'."""
     client = ldap.Client()
     hostname = misc.hostname()
     filter = '(&(DnsDomain=%s)(Host=%s)(NtVer=\\06\\00\\00\\00))' % \
              (domain, hostname)
     attrs = ('NetLogon',)
     query = client.create_search_request('', filter, attrs=attrs,
                                          scope=ldap.SCOPE_BASE, msgid=msgid)
     return six.ensure_binary(query)
Пример #6
0
def _do_create_snapshot(data):
    snapshot_name = data["snapshot_name"]
    work_dir = snapshot_dir + "/workdir/%s" % snapshot_name

    try:
        if not os.path.exists(work_dir):
            os.makedirs(work_dir)

        # Open / initialize files
        filename_target = "%s/%s" % (snapshot_dir, snapshot_name)
        filename_work = "%s/%s.work" % (work_dir, snapshot_name)

        open(filename_target, "w").close()

        def get_basic_tarinfo(name):
            tarinfo = tarfile.TarInfo(name)
            tarinfo.mtime = time.time()
            tarinfo.uid = 0
            tarinfo.gid = 0
            tarinfo.mode = 0o644
            tarinfo.type = tarfile.REGTYPE
            return tarinfo

        # Initialize the snapshot tar file and populate with initial information
        tar_in_progress = tarfile.open(filename_work, "w")

        for key in ["comment", "created_by", "type"]:
            tarinfo = get_basic_tarinfo(key)
            encoded_value = data[key].encode("utf-8")
            tarinfo.size = len(encoded_value)
            tar_in_progress.addfile(tarinfo, io.BytesIO(encoded_value))

        tar_in_progress.close()

        # Process domains (sorted)
        subtar_info = {}

        for name, info in sorted(_get_default_backup_domains().items()):
            prefix = info.get("prefix", "")
            filename_subtar = "%s.tar.gz" % name
            path_subtar = "%s/%s" % (work_dir, filename_subtar)

            paths = ["." if x[1] == "" else x[1] for x in info.get("paths", [])]
            command = [
                "tar", "czf", path_subtar, "--ignore-failed-read", "--force-local", "-C", prefix
            ] + paths

            proc = subprocess.Popen(
                command,
                stdin=None,
                close_fds=True,
                stdout=subprocess.PIPE,
                stderr=subprocess.PIPE,
                cwd=prefix,
                encoding="utf-8",
            )
            _stdout, stderr = proc.communicate()
            exit_code = proc.wait()
            # Allow exit codes 0 and 1 (files changed during backup)
            if exit_code not in [0, 1]:
                raise MKGeneralException(
                    "Error while creating backup of %s (Exit Code %d) - %s.\n%s" %
                    (name, exit_code, stderr, command))

            subtar_hash = sha256(open(path_subtar, "rb").read()).hexdigest()
            subtar_signed = sha256(six.ensure_binary(subtar_hash) + _snapshot_secret()).hexdigest()
            subtar_info[filename_subtar] = (subtar_hash, subtar_signed)

            # Append tar.gz subtar to snapshot
            command = ["tar", "--append", "--file=" + filename_work, filename_subtar]
            proc = subprocess.Popen(
                command,
                cwd=work_dir,
                close_fds=True,
            )
            proc.communicate()
            exit_code = proc.wait()

            if os.path.exists(filename_subtar):
                os.unlink(filename_subtar)

            if exit_code != 0:
                raise MKGeneralException("Error on adding backup domain %s to tarfile" % name)

        # Now add the info file which contains hashes and signed hashes for
        # each of the subtars
        info = ''.join(['%s %s %s\n' % (k, v[0], v[1]) for k, v in subtar_info.items()]) + '\n'

        tar_in_progress = tarfile.open(filename_work, "a")
        tarinfo = get_basic_tarinfo("checksums")
        tarinfo.size = len(info)
        tar_in_progress.addfile(tarinfo, io.BytesIO(info))
        tar_in_progress.close()

        shutil.move(filename_work, filename_target)

    finally:
        shutil.rmtree(work_dir)
Пример #7
0
    def add_file(self,
                 name,
                 kind=tarfile.REGTYPE,
                 content=None,
                 link=None,
                 file_content=None,
                 uid=0,
                 gid=0,
                 uname='',
                 gname='',
                 mtime=None,
                 mode=None):
        """Add a file to the current tar.

    Args:
      name: the name of the file to add.
      kind: the type of the file to add, see tarfile.*TYPE.
      content: a textual content to put in the file.
      link: if the file is a link, the destination of the link.
      file_content: file to read the content from. Provide either this
          one or `content` to specifies a content for the file.
      uid: owner user identifier.
      gid: owner group identifier.
      uname: owner user names.
      gname: owner group names.
      mtime: modification time to put in the archive.
      mode: unix permission mode of the file, default 0644 (0755).
    """
        if file_content and os.path.isdir(file_content):
            # Recurse into directory
            self.add_dir(name, file_content, uid, gid, uname, gname, mtime,
                         mode)
            return
        if not (name == self.root_directory or name.startswith('/')
                or name.startswith(self.root_directory + '/')):
            name = os.path.join(self.root_directory, name)
        if kind == tarfile.DIRTYPE:
            name = name.rstrip('/')
            if name in self.directories:
                return
        if mtime is None:
            mtime = self.default_mtime

        components = name.rsplit('/', 1)
        if len(components) > 1:
            d = components[0]
            self.add_file(d,
                          tarfile.DIRTYPE,
                          uid=uid,
                          gid=gid,
                          uname=uname,
                          gname=gname,
                          mtime=mtime,
                          mode=0o755)
        tarinfo = tarfile.TarInfo(name)
        tarinfo.mtime = mtime
        tarinfo.uid = uid
        tarinfo.gid = gid
        tarinfo.uname = uname
        tarinfo.gname = gname
        tarinfo.type = kind
        if mode is None:
            tarinfo.mode = 0o644 if kind == tarfile.REGTYPE else 0o755
        else:
            tarinfo.mode = mode
        if link:
            tarinfo.linkname = link
        if content:
            content_bytes = six.ensure_binary(content, 'utf-8')
            tarinfo.size = len(content_bytes)
            self._addfile(tarinfo, io.BytesIO(content_bytes))
        elif file_content:
            with open(file_content, 'rb') as f:
                tarinfo.size = os.fstat(f.fileno()).st_size
                self._addfile(tarinfo, f)
        else:
            if kind == tarfile.DIRTYPE:
                self.directories.add(name)
            self._addfile(tarinfo)
Пример #8
0
    def test_punt_socket_traffic_single_port_single_socket(self):
        """ Punt socket traffic single port single socket"""

        port = self.ports[0]

        p = (Ether(src=self.pg0.remote_mac,
                   dst=self.pg0.local_mac) /
             IPv6(src=self.pg0.remote_ip6, dst=self.pg0.local_ip6) /
             inet6.UDP(sport=9876, dport=port) /
             Raw('\xa5' * 100))

        pkts = p * self.nr_packets
        self.portsCheck[port] = self.nr_packets

        punts = self.vapi.punt_socket_dump(is_ip6=1)
        self.assertEqual(len(punts), 0)

        #
        # expect ICMPv6 - destination unreachable for all packets
        #
        self.vapi.cli("clear trace")
        self.pg0.add_stream(pkts)
        self.pg_enable_capture(self.pg_interfaces)
        self.pg_start()
        # FIXME - when punt socket deregister is implemented
        # rx = self.pg0.get_capture(self.nr_packets)
        # for p in rx:
        #     self.assertEqual(int(p[IPv6].nh), 58)                # ICMPv6
        #     self.assertEqual(int(p[ICMPv6DestUnreach].code),4)  # unreachable

        #
        # configure a punt socket
        #
        self.socket_client_create(b"%s/socket_%d" % (
            six.ensure_binary(self.tempdir), port))
        self.vapi.punt_socket_register(port, b"%s/socket_%d" % (
            six.ensure_binary(self.tempdir), port), is_ip4=0)
        punts = self.vapi.punt_socket_dump(is_ip6=1)
        self.assertEqual(len(punts), 1)

        self.logger.debug("Sending %s packets to port %d",
                          str(self.portsCheck[port]), port)
        #
        # expect punt socket and no packets on pg0
        #
        self.vapi.cli("clear errors")
        self.vapi.cli("clear trace")
        self.pg0.add_stream(pkts)
        self.pg_enable_capture(self.pg_interfaces)
        self.pg_start()
        self.pg0.get_capture(0)
        self.logger.info(self.vapi.cli("show trace"))
        self.socket_client_close()
        self.assertEqual(self.portsCheck[port], 0)

        #
        # remove punt socket. expect ICMP - dest. unreachable for all packets
        #
        self.vapi.punt_socket_deregister(port, is_ip4=0)
        punts = self.vapi.punt_socket_dump(is_ip6=1)
        self.assertEqual(len(punts), 0)
        self.pg0.add_stream(pkts)
        self.pg_enable_capture(self.pg_interfaces)
        self.pg_start()
Пример #9
0
def helm_chart(kubeconfig, cluster, docker_image, image_pull_policy):  # pylint: disable=redefined-outer-name,unused-argument
    print('--- \033[32m:helm: Installing Helm chart\033[0m')

    # Install helm chart
    try:
        check_output('''kubectl create namespace dagster-test''', shell=True)

        print(
            'Creating k8s test objects ConfigMap test-env-configmap and Secret test-env-secret'
        )
        config.load_kube_config(config_file=kubeconfig)
        kube_api = client.CoreV1Api()

        configmap = client.V1ConfigMap(
            api_version='v1',
            kind='ConfigMap',
            data={'TEST_ENV_VAR': 'foobar'},
            metadata=client.V1ObjectMeta(name='test-env-configmap'),
        )
        kube_api.create_namespaced_config_map(namespace='dagster-test',
                                              body=configmap)

        # Secret values are expected to be base64 encoded
        secret_val = six.ensure_str(
            base64.b64encode(six.ensure_binary('foobar')))
        secret = client.V1Secret(
            api_version='v1',
            kind='Secret',
            data={'TEST_SECRET_ENV_VAR': secret_val},
            metadata=client.V1ObjectMeta(name='test-env-secret'),
        )
        kube_api.create_namespaced_secret(namespace='dagster-test',
                                          body=secret)

        try:
            repository, tag = docker_image.split(':')

            helm_config = {
                'imagePullPolicy': image_pull_policy,
                'dagit': {
                    'image': {
                        'repository': repository,
                        'tag': tag
                    },
                    'env': {
                        'TEST_SET_ENV_VAR': 'test_dagit_env_var'
                    },
                    'env_config_maps': ['test-env-configmap'],
                    'env_secrets': ['test-env-secret'],
                },
                'job_runner': {
                    'image': {
                        'repository': repository,
                        'tag': tag
                    },
                    'env': {
                        'TEST_SET_ENV_VAR': 'test_job_runner_env_var'
                    },
                    'env_config_maps': ['test-env-configmap'],
                    'env_secrets': ['test-env-secret'],
                },
                'serviceAccount': {
                    'name': 'dagit-admin'
                },
                'postgresqlPassword': '******',
                'postgresqlDatabase': 'test',
                'postgresqlUser': '******',
                'celery': {
                    'extraWorkerQueues': [
                        {
                            'name': 'extra-queue-1',
                            'replicaCount': 1
                        },
                        {
                            'name': 'extra-queue-2',
                            'replicaCount': 2
                        },
                    ]
                },
            }
            helm_config_yaml = yaml.dump(helm_config, default_flow_style=False)

            helm_cmd = [
                'helm',
                'install',
                '--namespace',
                'dagster-test',
                '-f',
                '-',
                'dagster',
                'helm/dagster/',
            ]

            print('Running Helm Install: \n', helm_cmd, '\nwith config:\n',
                  helm_config_yaml)

            p = subprocess.Popen(
                helm_cmd,
                stdin=subprocess.PIPE,
                stdout=subprocess.PIPE,
                stderr=subprocess.PIPE,
            )
            stdout, stderr = p.communicate(six.ensure_binary(helm_config_yaml))
            print('\n\nstdout:\n', six.ensure_str(stdout))
            print('\n\nstderr:\n', six.ensure_str(stderr))
            assert p.returncode == 0

            check_output(
                helm_cmd,
                shell=True,
                cwd=os.path.join(git_repository_root(),
                                 'python_modules/libraries/dagster-k8s/'),
            )

            # Wait for Dagit pod to be ready (won't actually stay up w/out js rebuild)
            success, _ = wait_for_pod('dagit')
            assert success

            # Wait for additional Celery worker queues to become ready
            pods = kube_api.list_namespaced_pod(namespace='dagster-test')
            for extra_queue in helm_config['celery']['extraWorkerQueues']:
                pod_names = [
                    p.metadata.name for p in pods.items
                    if extra_queue['name'] in p.metadata.name
                ]
                assert len(pod_names) == extra_queue['replicaCount']
                for pod in pod_names:
                    success, _ = wait_for_pod(pod)
                    assert success

            yield

        finally:
            print('Uninstalling helm chart')
            check_output(
                ['helm uninstall dagster --namespace dagster-test'],
                shell=True,
                cwd=os.path.join(git_repository_root(),
                                 'python_modules/libraries/dagster-k8s/'),
            )
    finally:
        if not IS_BUILDKITE:
            print('Deleting namespace')
            check_output(
                ['kubectl delete namespace dagster-test'],
                shell=True,
            )
            print('Deleted namespace')
Пример #10
0
def _run_deprecated_conversion_binary(model_flags_str,
                                      conversion_flags_str,
                                      input_data_str,
                                      debug_info_str=None):
  """Convert `input_data_str` using deprecated conversion binary.

  Args:
    model_flags_str: Serialized proto describing model properties, see
      `model_flags.proto`.
    conversion_flags_str: Serialized proto describing TFLite converter
      properties, see `toco/toco_flags.proto`.
    input_data_str: Input data in serialized form (e.g. a graphdef is common)
    debug_info_str: Serialized `GraphDebugInfo` proto describing logging
      information. (default None)

  Returns:
    Converted model in serialized form (e.g. a TFLITE model is common).
  Raises:
    ConverterError: When cannot find the deprecated conversion binary.
    RuntimeError: When conversion fails, an exception is raised with the error
      message embedded.
  """
  if distutils.spawn.find_executable(_deprecated_conversion_binary) is None:
    raise ConverterError("""Could not find `toco_from_protos` binary, make sure
your virtualenv bin directory or pip local bin directory is in your path.
In particular, if you have installed TensorFlow with --user, make sure you
add the install directory to your path.

For example:
Linux: export PATH=$PATH:~/.local/bin/
Mac: export PATH=$PATH:~/Library/Python/<version#>/bin

Alternative, use virtualenv.""")
  # Windows and TemporaryFile are not that useful together,
  # since you cannot have two readers/writers. So we have to
  # make the temporaries and close and delete them explicitly.
  conversion_filename, model_filename, input_filename, output_filename = (None,
                                                                          None,
                                                                          None,
                                                                          None)
  try:
    # Build all input files
    with _tempfile.NamedTemporaryFile(delete=False) as fp_conversion, \
             _tempfile.NamedTemporaryFile(delete=False) as fp_model, \
             _tempfile.NamedTemporaryFile(delete=False) as fp_input, \
             _tempfile.NamedTemporaryFile(delete=False) as fp_debug:
      conversion_filename = fp_conversion.name
      input_filename = fp_input.name
      model_filename = fp_model.name
      debug_filename = fp_debug.name

      fp_model.write(model_flags_str)
      fp_conversion.write(conversion_flags_str)
      fp_input.write(six.ensure_binary(input_data_str))
      debug_info_str = debug_info_str if debug_info_str else ""
      # if debug_info_str contains a "string value", then the call to
      # fp_debug.write(debug_info_str) will fail with the following error
      #
      # TypeError: a bytes-like object is required, not 'str'
      #
      # Some of the subtests within the "convert_test" unit-test fail
      # with the error shown above. So watch out for that scenario and
      # convert debug_info_str to bytes where needed
      if not isinstance(debug_info_str, bytes):
        fp_debug.write(debug_info_str.encode("utf-8"))
      else:
        fp_debug.write(debug_info_str)

    # Reserve an output file
    with _tempfile.NamedTemporaryFile(delete=False) as fp:
      output_filename = fp.name

    # Run
    cmd = [
        _deprecated_conversion_binary,
        model_filename,
        conversion_filename,
        input_filename,
        output_filename,
        "--debug_proto_file={}".format(debug_filename),
    ]
    cmdline = " ".join(cmd)
    is_windows = _platform.system() == "Windows"
    proc = _subprocess.Popen(
        cmdline,
        shell=True,
        stdout=_subprocess.PIPE,
        stderr=_subprocess.STDOUT,
        close_fds=not is_windows)
    stdout, stderr = proc.communicate()
    exitcode = proc.returncode
    if exitcode == 0:
      with open(output_filename, "rb") as fp:
        return fp.read()
    else:
      stdout = _try_convert_to_unicode(stdout)
      stderr = _try_convert_to_unicode(stderr)
      raise ConverterError("See console for info.\n%s\n%s\n" % (stdout, stderr))
  finally:
    # Must manually cleanup files.
    for filename in [
        conversion_filename, input_filename, model_filename, output_filename
    ]:
      try:
        _os.unlink(filename)
      except (OSError, TypeError):
        pass
Пример #11
0
def mk_repr(x: Any) -> bytes:
    return base64.b64encode(ensure_binary(repr(x)))
Пример #12
0
def harvest_objects_import(context, data_dict):
    '''
    Reimports the existing harvest objects, specified by either source_id,
    harvest_object_id or package_id.

    It performs the import stage with the last fetched objects, optionally
    belonging to a certain source.

    Please note that no objects will be fetched from the remote server.

    It will only affect the last fetched objects already present in the
    database.

    :param source_id: the id of the harvest source to import
    :type source_id: string
    :param guid: the guid of the harvest object to import
    :type guid: string
    :param harvest_object_id: the id of the harvest object to import
    :type harvest_object_id: string
    :param package_id: the id or name of the package to import
    :type package_id: string
    '''
    log.info('Harvest objects import: %r', data_dict)
    check_access('harvest_objects_import', context, data_dict)

    session = context['session']
    source_id = data_dict.get('source_id')
    guid = data_dict.get('guid')
    harvest_object_id = data_dict.get('harvest_object_id')
    package_id_or_name = data_dict.get('package_id')

    segments = context.get('segments')

    join_datasets = context.get('join_datasets', True)

    if guid:
        last_objects_ids = \
            session.query(HarvestObject.id) \
                   .filter(HarvestObject.guid == guid) \
                   .filter(HarvestObject.current == True)  # noqa: E712

    elif source_id:
        source = HarvestSource.get(source_id)
        if not source:
            log.error('Harvest source %s does not exist', source_id)
            raise NotFound('Harvest source %s does not exist' % source_id)

        if not source.active:
            log.warn('Harvest source %s is not active.', source_id)
            raise Exception('This harvest source is not active')

        last_objects_ids = \
            session.query(HarvestObject.id) \
                   .join(HarvestSource) \
                   .filter(HarvestObject.source == source) \
                   .filter(HarvestObject.current == True)  # noqa: E712

    elif harvest_object_id:
        last_objects_ids = \
            session.query(HarvestObject.id) \
                   .filter(HarvestObject.id == harvest_object_id)
    elif package_id_or_name:
        last_objects_ids = (
            session.query(HarvestObject.id).join(Package).filter(
                HarvestObject.current == True)  # noqa: E712
            .filter(Package.state == u'active').filter(
                or_(Package.id == package_id_or_name,
                    Package.name == package_id_or_name)))
        join_datasets = False
    else:
        last_objects_ids = \
            session.query(HarvestObject.id) \
                   .filter(HarvestObject.current == True)  # noqa: E712

    if join_datasets:
        last_objects_ids = last_objects_ids.join(Package) \
            .filter(Package.state == u'active')

    last_objects_ids = last_objects_ids.all()

    last_objects_count = 0

    for obj_id in last_objects_ids:
        if segments and \
                str(hashlib.md5(six.ensure_binary(obj_id[0])).hexdigest())[0] not in segments:
            continue

        obj = session.query(HarvestObject).get(obj_id)

        for harvester in PluginImplementations(IHarvester):
            if harvester.info()['name'] == obj.source.type:
                if hasattr(harvester, 'force_import'):
                    harvester.force_import = True
                harvester.import_stage(obj)
                break
        last_objects_count += 1
    log.info('Harvest objects imported: %s', last_objects_count)
    return last_objects_count
Пример #13
0
def check_server(force=False, change_user=False, notify=False):
    log.debug("checkServer Called")

    settings = xbmcaddon.Addon()
    server_url = ""
    something_changed = False
    du = DownloadUtils()

    if force is False:
        # if not forcing use server details from settings
        svr = du.get_server()
        if svr is not None:
            server_url = svr

    # if the server is not set then try to detect it
    if server_url == "":

        # scan for local server
        server_info = get_server_details()

        addon = xbmcaddon.Addon()
        server_icon = addon.getAddonInfo('icon')

        server_list = []
        for server in server_info:
            server_item = xbmcgui.ListItem(
                server.get("Name", string_load(30063)))
            sub_line = server.get("Address")
            server_item.setLabel2(sub_line)
            server_item.setProperty("address", server.get("Address"))
            art = {"Thumb": server_icon}
            server_item.setArt(art)
            server_list.append(server_item)

        if len(server_list) > 0:
            return_index = xbmcgui.Dialog().select('{} : {}'.format(
                __addon_name__, string_load(30166)),
                                                   server_list,
                                                   useDetails=True)
            if return_index != -1:
                server_url = server_info[return_index]["Address"]

        if not server_url:
            return_index = xbmcgui.Dialog().yesno(
                __addon_name__, '{}\n{}'.format(string_load(30282),
                                                string_load(30370)))
            if not return_index:
                xbmc.executebuiltin("ActivateWindow(Home)")
                return

            while True:
                kb = xbmc.Keyboard()
                kb.setHeading(string_load(30372))
                if server_url:
                    kb.setDefault(server_url)
                else:
                    kb.setDefault("http://<server address>:8096")
                kb.doModal()
                if kb.isConfirmed():
                    server_url = kb.getText()
                else:
                    xbmc.executebuiltin("ActivateWindow(Home)")
                    return

                public_lookup_url = "%s/System/Info/Public?format=json" % (
                    server_url)

                log.debug("Testing_Url: {0}".format(public_lookup_url))
                progress = xbmcgui.DialogProgress()
                progress.create('{} : {}'.format(__addon_name__,
                                                 string_load(30376)))
                progress.update(0, string_load(30377))
                result = du.download_url(public_lookup_url, authenticate=False)
                progress.close()

                if result:
                    xbmcgui.Dialog().ok(
                        '{} : {}'.format(__addon_name__, string_load(30167)),
                        server_url)
                    break
                else:
                    return_index = xbmcgui.Dialog().yesno(
                        '{} : {}'.format(__addon_name__, string_load(30135)),
                        server_url, string_load(30371))
                    if not return_index:
                        xbmc.executebuiltin("ActivateWindow(Home)")
                        return

        log.debug("Selected server: {0}".format(server_url))
        settings.setSetting("server_address", server_url)
        something_changed = True

    # do we need to change the user
    user_details = load_user_details(settings)
    current_username = user_details.get("username", "")
    current_username = py2_decode(current_username)

    # if asked or we have no current user then show user selection screen
    if something_changed or change_user or len(current_username) == 0:

        # stop playback when switching users
        xbmc.Player().stop()
        du = DownloadUtils()

        # get a list of users
        log.debug("Getting user list")
        result = du.download_url(server_url + "/Users/Public?format=json",
                                 authenticate=False)

        log.debug("jsonData: {0}".format(py2_decode(result)))

        selected_id = -1
        users = []
        for user in result:
            config = user.get("Configuration")
            if config is not None:
                if config.get("IsHidden", False) is False:
                    name = user.get("Name")
                    admin = user.get("Policy", {}).get("IsAdministrator",
                                                       False)

                    time_ago = ""
                    last_active = user.get("LastActivityDate")
                    if last_active:
                        last_active_date = datetime_from_string(last_active)
                        log.debug(
                            "LastActivityDate: {0}".format(last_active_date))
                        ago = datetime.now() - last_active_date
                        log.debug("LastActivityDate: {0}".format(ago))
                        days = divmod(ago.seconds, 86400)
                        hours = divmod(days[1], 3600)
                        minutes = divmod(hours[1], 60)
                        log.debug("LastActivityDate: {0} {1} {2}".format(
                            days[0], hours[0], minutes[0]))
                        if days[0]:
                            time_ago += " %sd" % days[0]
                        if hours[0]:
                            time_ago += " %sh" % hours[0]
                        if minutes[0]:
                            time_ago += " %sm" % minutes[0]
                        time_ago = time_ago.strip()
                        if not time_ago:
                            time_ago = "Active: now"
                        else:
                            time_ago = "Active: %s ago" % time_ago
                        log.debug("LastActivityDate: {0}".format(time_ago))

                    user_item = xbmcgui.ListItem(name)
                    user_image = du.get_user_artwork(user, 'Primary')
                    if not user_image:
                        user_image = "DefaultUser.png"
                    art = {"Thumb": user_image}
                    user_item.setArt(art)
                    user_item.setLabel2("TEST")

                    sub_line = time_ago

                    if user.get("HasPassword", False) is True:
                        sub_line += ", Password"
                        user_item.setProperty("secure", "true")

                        m = hashlib.md5()
                        m.update(ensure_binary(name))
                        hashed_username = m.hexdigest()
                        saved_password = settings.getSetting(
                            "saved_user_password_" + hashed_username)
                        if saved_password:
                            sub_line += ": Saved"

                    else:
                        user_item.setProperty("secure", "false")

                    if admin:
                        sub_line += ", Admin"
                    else:
                        sub_line += ", User"

                    user_item.setProperty("manual", "false")
                    user_item.setLabel2(sub_line)
                    users.append(user_item)

                    if current_username == name:
                        selected_id = len(users) - 1

        if current_username:
            selection_title = string_load(
                30180) + " (" + current_username + ")"
        else:
            selection_title = string_load(30180)

        # add manual login
        user_item = xbmcgui.ListItem(string_load(30365))
        art = {"Thumb": "DefaultUser.png"}
        user_item.setArt(art)
        user_item.setLabel2(string_load(30366))
        user_item.setProperty("secure", "true")
        user_item.setProperty("manual", "true")
        users.append(user_item)

        return_value = xbmcgui.Dialog().select(selection_title,
                                               users,
                                               preselect=selected_id,
                                               autoclose=20000,
                                               useDetails=True)

        if return_value > -1 and return_value != selected_id:

            something_changed = True
            selected_user = users[return_value]
            secured = selected_user.getProperty("secure") == "true"
            manual = selected_user.getProperty("manual") == "true"
            selected_user_name = selected_user.getLabel()

            log.debug("Selected User Name: {0} : {1}".format(
                return_value, selected_user_name))

            if manual:
                kb = xbmc.Keyboard()
                kb.setHeading(string_load(30005))
                if current_username:
                    kb.setDefault(current_username)
                kb.doModal()
                if kb.isConfirmed():
                    selected_user_name = kb.getText()
                    log.debug("Manual entered username: {0}".format(
                        selected_user_name))
                else:
                    return

            if secured:
                # we need a password, check the settings first
                m = hashlib.md5()
                m.update(selected_user_name.encode())
                hashed_username = m.hexdigest()
                saved_password = settings.getSetting("saved_user_password_" +
                                                     hashed_username)
                allow_password_saving = settings.getSetting(
                    "allow_password_saving") == "true"

                # if not saving passwords but have a saved ask to clear it
                if not allow_password_saving and saved_password:
                    clear_password = xbmcgui.Dialog().yesno(
                        string_load(30368), string_load(30369))
                    if clear_password:
                        settings.setSetting(
                            "saved_user_password_" + hashed_username, "")

                if saved_password:
                    log.debug("Saving username and password: {0}".format(
                        selected_user_name))
                    log.debug("Using stored password for user: {0}".format(
                        hashed_username))
                    save_user_details(settings, selected_user_name,
                                      saved_password)

                else:
                    kb = xbmc.Keyboard()
                    kb.setHeading(string_load(30006))
                    kb.setHiddenInput(True)
                    kb.doModal()
                    if kb.isConfirmed():
                        log.debug("Saving username and password: {0}".format(
                            selected_user_name))
                        save_user_details(settings, selected_user_name,
                                          kb.getText())

                        # should we save the password
                        if allow_password_saving:
                            save_password = xbmcgui.Dialog().yesno(
                                string_load(30363), string_load(30364))
                            if save_password:
                                log.debug(
                                    "Saving password for fast user switching: {0}"
                                    .format(hashed_username))
                                settings.setSetting(
                                    "saved_user_password_" + hashed_username,
                                    kb.getText())
            else:
                log.debug("Saving username with no password: {0}".format(
                    selected_user_name))
                save_user_details(settings, selected_user_name, "")

        if something_changed:
            home_window = HomeWindow()
            home_window.clear_property("userid")
            home_window.clear_property("AccessToken")
            home_window.clear_property("userimage")
            home_window.clear_property("embycon_widget_reload")
            du = DownloadUtils()
            du.authenticate()
            du.get_user_id()
            xbmc.executebuiltin("ActivateWindow(Home)")
            if "estuary_embycon" in xbmc.getSkinDir():
                xbmc.executebuiltin("SetFocus(9000, 0, absolute)")
            xbmc.executebuiltin("ReloadSkin()")
Пример #14
0
def mk_repr(x):
    # type: (Any) -> bytes
    r = pickle.dumps(x) if config.wato_legacy_eval else six.ensure_binary(repr(x))
    return base64.b64encode(r)
Пример #15
0
 def NoMoredata(self, data):
     if self.IsAlive:
         if self.action == "listall":
             self.request.setHeader("content-type",
                                    "application/json; charset=utf-8")
             try:
                 data = self.parseAll()
                 self.request.write(six.ensure_binary(json.dumps(data)))
             except Exception as exc:
                 self.request.setResponseCode(http.INTERNAL_SERVER_ERROR)
                 self.request.write(
                     six.ensure_binary(
                         json.dumps({
                             "result": False,
                             "request": self.request.path,
                             "exception": repr(exc)
                         })))
         elif self.action == "full":
             try:
                 data = open("/tmp/opkg.tmp", 'r').read()
                 self.request.write(six.ensure_binary(data))
             except Exception as exc:
                 self.request.setResponseCode(http.INTERNAL_SERVER_ERROR)
                 self.request.write(six.ensure_binary(repr(exc)))
         else:
             nresult = ""
             if self.action == "list":
                 for a in self.ResultString.split("\n"):
                     if a.count(" - ") > 0:
                         nresult += a + "\n"
             else:
                 for a in self.ResultString.split("\n"):
                     if a.count(" - ") > 0:
                         if nresult[:-1] == "\n":
                             nresult += a
                         else:
                             nresult += "\n" + a
                     else:
                         nresult += a + "\n"
             nresult = nresult.replace("\n\n", "\n")
             nresult = nresult.replace("\n ", " ")
             if self.filter is not None:
                 pl = nresult.split("\n")
                 add = True
                 rpl = []
                 for p in pl:
                     if p.count(" - ") > 0:
                         add = True
                         name = p.split(' - ')[0]
                         for f in self.filter:
                             if name.endswith('-' + f):
                                 add = False
                     if add:
                         rpl.append(p)
             if self.json:
                 data = []
                 data.append({"result": True, "packages": rpl})
                 self.request.setHeader("content-type",
                                        "application/json; charset=utf-8")
                 self.request.write(six.ensure_binary(json.dumps(data)))
             else:
                 nresult = '\n'.join(rpl)
                 nresult.replace('\n', '<br>\n')
                 nresult = six.ensure_binary(nresult)
                 self.request.write(b"<html><body>\n")
                 self.request.write(nresult)
                 self.request.write(b"</body></html>\n")
         self.request.finish()
     return server.NOT_DONE_YET
Пример #16
0
 def ShowError(self, request, text):
     request.setResponseCode(http.OK)
     request.write(six.ensure_binary(text))
     request.finish()
     return server.NOT_DONE_YET
Пример #17
0
 def validate_mtime(self, mtime, path):
     return self.packet(
         TRANSLATE_VALIDATE_MTIME,
         struct.pack('L', int(mtime)) + six.ensure_binary(path))
Пример #18
0
 def expand_request_header(self, name, value):
     assert isinstance(name, (str, bytes))
     assert isinstance(value, (str, bytes))
     return self.packet(
         TRANSLATE_EXPAND_REQUEST_HEADER,
         six.ensure_binary(name) + b':' + six.ensure_binary(value))
Пример #19
0
 def response_header(self, name, value):
     assert isinstance(name, (str, bytes))
     assert isinstance(value, (str, bytes))
     return self.packet(
         TRANSLATE_REQUEST_HEADER,
         six.ensure_binary(name) + b':' + six.ensure_binary(value))
Пример #20
0
def doMerge(outputio,
            files,
            authoropts=[],
            titleopt=None,
            descopt=None,
            tags=[],
            languages=['en'],
            titlenavpoints=True,
            originalnavpoints=True,
            flattentoc=False,
            printtimes=False,
            coverjpgpath=None,
            keepmetadatafiles=False,
            source=None):
    '''
    outputio = output file name or BytesIO.
    files = list of input file names or BytesIOs.
    authoropts = list of authors to use, otherwise add from all input
    titleopt = title, otherwise '<first title> Anthology'
    descopt = description, otherwise '<title> by <author>' list for all input
    tags = dc:subject tags to include, otherwise none.
    languages = dc:language tags to include
    titlenavpoints if true, put in a new TOC entry for each epub, nesting each epub's chapters under it
    originalnavpoints if true, include the original TOCs from each epub
    flattentoc if true, flatten TOC down to one level only.
    coverjpgpath, Path to a jpg to use as cover image.
    '''

    printt = partial(cond_print,printtimes)

    ## Python 2.5 ZipFile is rather more primative than later
    ## versions.  It can operate on a file, or on a BytesIO, but
    ## not on an open stream.  OTOH, I suspect we would have had
    ## problems with closing and opening again to change the
    ## compression type anyway.

    filecount=0
    t = time()

    ## Write mimetype file, must be first and uncompressed.
    ## Older versions of python(2.4/5) don't allow you to specify
    ## compression by individual file.
    ## Overwrite if existing output file.
    outputepub = ZipFile(outputio, "w", compression=ZIP_STORED, allowZip64=True)
    outputepub.debug = 3
    outputepub.writestr("mimetype", "application/epub+zip")
    outputepub.close()

    ## Re-open file for content.
    outputepub = ZipFile(outputio, "a", compression=ZIP_DEFLATED, allowZip64=True)
    outputepub.debug = 3

    ## Create META-INF/container.xml file.  The only thing it does is
    ## point to content.opf
    containerdom = getDOMImplementation().createDocument(None, "container", None)
    containertop = containerdom.documentElement
    containertop.setAttribute("version","1.0")
    containertop.setAttribute("xmlns","urn:oasis:names:tc:opendocument:xmlns:container")
    rootfiles = containerdom.createElement("rootfiles")
    containertop.appendChild(rootfiles)
    rootfiles.appendChild(newTag(containerdom,"rootfile",{"full-path":"content.opf",
                                                          "media-type":"application/oebps-package+xml"}))
    outputepub.writestr("META-INF/container.xml",containerdom.toprettyxml(indent='   ',encoding='utf-8'))

    ## Process input epubs.

    items = [] # list of (id, href, type) tuples(all strings) -- From .opfs' manifests
    items.append(("ncx","toc.ncx","application/x-dtbncx+xml")) ## we'll generate the toc.ncx file,
                                                               ## but it needs to be in the items manifest.
    itemrefs = [] # list of strings -- idrefs from .opfs' spines
    navmaps = [] # list of navMap DOM elements -- TOC data for each from toc.ncx files
    is_ffdl_epub = [] # list of t/f

    itemhrefs = {} # hash of item[id]s to itemref[href]s -- to find true start of book(s).
    firstitemhrefs = []

    booktitles = [] # list of strings -- Each book's title
    allauthors = [] # list of lists of strings -- Each book's list of authors.

    filelist = []

    printt("prep output:%s"%(time()-t))
    t = time()

    booknum=1
    firstmetadom = None
    for file in files:
        if file == None : continue

        book = "%d" % booknum
        bookdir = "%d/" % booknum
        bookid = "a%d" % booknum

        epub = ZipFile(file, 'r')

        ## Find the .opf file.
        container = epub.read("META-INF/container.xml")
        containerdom = parseString(container)
        rootfilenodelist = containerdom.getElementsByTagNameNS("*","rootfile")
        rootfilename = rootfilenodelist[0].getAttribute("full-path")

        ## Save the path to the .opf file--hrefs inside it are relative to it.
        relpath = get_path_part(rootfilename)

        metadom = parseString(epub.read(rootfilename))
        # logger.debug("metadom:%s"%epub.read(rootfilename))
        if booknum==1 and not source:
            try:
                firstmetadom = metadom.getElementsByTagNameNS("*","metadata")[0]
                source=firstmetadom.getElementsByTagName("dc:source")[0].firstChild.data.encode("utf-8")
            except:
                source=""

        # if the epub was ever edited with Sigil, it changed the unique-identifier,
        # but dc:contributor was left.
        #is_ffdl_epub.append(metadom.documentElement.getAttribute('unique-identifier') == "fanficdownloader-uid")
        is_ffdl_epub.append(False)

        for c in metadom.getElementsByTagName("dc:contributor"):
            # logger.debug("dc:contributor:%s"%getText(c.childNodes))
            if c.getAttribute("opf:role") == "bkp" and \
                    getText(c.childNodes) in ["fanficdownloader [http://fanficdownloader.googlecode.com]",
                                              "FanFicFare [https://github.com/JimmXinu/FanFicFare]"]:
                is_ffdl_epub[-1] = True # set last.
                break;

        ## Save indiv book title
        try:
            booktitles.append(metadom.getElementsByTagName("dc:title")[0].firstChild.data)
        except:
            booktitles.append("(Title Missing)")

        ## Save authors.
        authors=[]
        for creator in metadom.getElementsByTagName("dc:creator"):
            try:
                if( creator.getAttribute("opf:role") == "aut" or not creator.hasAttribute("opf:role") and creator.firstChild != None):
                    authors.append(creator.firstChild.data)
            except:
                pass
        if len(authors) == 0:
            authors.append("(Author Missing)")
        allauthors.append(authors)

        if keepmetadatafiles:
            itemid=bookid+"rootfile"
            itemhref = rootfilename
            href=bookdir+itemhref
            logger.debug("write rootfile %s to %s"%(itemhref,href))
            outputepub.writestr(href,
                                epub.read(itemhref))
            items.append((itemid,href,"origrootfile/xml"))

        # spin through the manifest--only place there are item tags.
        # Correction--only place there *should* be item tags.  But
        # somebody found one that did.
        manifesttag=metadom.getElementsByTagNameNS("*","manifest")[0]
        for item in manifesttag.getElementsByTagNameNS("*","item"):
            itemid=bookid+item.getAttribute("id")
            itemhref = normpath(unquote(item.getAttribute("href"))) # remove %20, etc.
            href=bookdir+relpath+itemhref
            # if item.getAttribute("properties") == "nav":
            #     # epub3 TOC file is only one with this type--as far as I know.
            #     # grab the whole navmap, deal with it later.
            # el
            if item.getAttribute("media-type") == "application/x-dtbncx+xml":
                # epub2 TOC file is only one with this type--as far as I know.
                # grab the whole navmap, deal with it later.
                tocdom = parseString(epub.read(normpath(relpath+item.getAttribute("href"))))

                # update all navpoint ids with bookid for uniqueness.
                for navpoint in tocdom.getElementsByTagNameNS("*","navPoint"):
                    navpoint.setAttribute("id",bookid+navpoint.getAttribute("id"))

                # update all content paths with bookdir for uniqueness.
                for content in tocdom.getElementsByTagNameNS("*","content"):
                    content.setAttribute("src",normpath(bookdir+relpath+content.getAttribute("src")))

                navmaps.append(tocdom.getElementsByTagNameNS("*","navMap")[0])

                if keepmetadatafiles:
                    logger.debug("write toc.ncx %s to %s"%(relpath+itemhref,href))
                    outputepub.writestr(href,
                                        epub.read(normpath(relpath+itemhref)))
                    items.append((itemid,href,"origtocncx/xml"))
            else:
                #href=href.encode('utf8')
                logger.debug("item id: %s -> %s:"%(itemid,href))
                itemhrefs[itemid] = href
                if href not in filelist:
                    try:
                        outputepub.writestr(href,
                                            epub.read(normpath(relpath+itemhref)))
                        if re.match(r'.*/(file|chapter)\d+\.x?html',href):
                            filecount+=1
                        items.append((itemid,href,item.getAttribute("media-type")))
                        filelist.append(href)
                    except KeyError as ke: # Skip missing files.
                        logger.info("Skipping missing file %s (%s)"%(href,relpath+itemhref))
                        del itemhrefs[itemid]

        itemreflist = metadom.getElementsByTagNameNS("*","itemref")
        # logger.debug("itemhrefs:%s"%itemhrefs)
        logger.debug("bookid:%s"%bookid)
        logger.debug("itemreflist[0].getAttribute(idref):%s"%itemreflist[0].getAttribute("idref"))

        # Looking for the first item in itemreflist that wasn't
        # discarded due to missing files.
        for itemref in itemreflist:
            idref = bookid+itemref.getAttribute("idref")
            if idref in itemhrefs:
                firstitemhrefs.append(itemhrefs[idref])
                break

        for itemref in itemreflist:
            itemrefs.append(bookid+itemref.getAttribute("idref"))
            # logger.debug("adding to itemrefs:%s"%itemref.toprettyxml())

        booknum=booknum+1;

    printt("after file loop:%s"%(time()-t))
    t = time()

    ## create content.opf file.
    uniqueid="epubmerge-uid-%d" % time() # real sophisticated uid scheme.
    contentdom = getDOMImplementation().createDocument(None, "package", None)
    package = contentdom.documentElement

    package.setAttribute("version","2.0")
    package.setAttribute("xmlns","http://www.idpf.org/2007/opf")
    package.setAttribute("unique-identifier","epubmerge-id")
    metadata=newTag(contentdom,"metadata",
                    attrs={"xmlns:dc":"http://purl.org/dc/elements/1.1/",
                           "xmlns:opf":"http://www.idpf.org/2007/opf"})
    metadata.appendChild(newTag(contentdom,"dc:identifier",text=uniqueid,attrs={"id":"epubmerge-id"}))
    if( titleopt is None ):
        titleopt = booktitles[0]+" Anthology"
    metadata.appendChild(newTag(contentdom,"dc:title",text=titleopt))

    # If cmdline authors, use those instead of those collected from the epubs
    # (allauthors kept for TOC & description gen below.
    if( len(authoropts) > 1  ):
        useauthors=[authoropts]
    else:
        useauthors=allauthors

    usedauthors=dict()
    for authorlist in useauthors:
        for author in authorlist:
            if( author not in usedauthors ):
                usedauthors[author]=author
                metadata.appendChild(newTag(contentdom,"dc:creator",
                                            attrs={"opf:role":"aut"},
                                            text=author))

    metadata.appendChild(newTag(contentdom,"dc:contributor",text="epubmerge",attrs={"opf:role":"bkp"}))
    metadata.appendChild(newTag(contentdom,"dc:rights",text="Copyrights as per source stories"))

    for l in languages:
        metadata.appendChild(newTag(contentdom,"dc:language",text=l))

    if not descopt:
        # created now, but not filled in until TOC generation to save loops.
        description = newTag(contentdom,"dc:description",text="Anthology containing:\n")
    else:
        description = newTag(contentdom,"dc:description",text=descopt)
    metadata.appendChild(description)

    if source:
        metadata.appendChild(newTag(contentdom,"dc:identifier",
                                    attrs={"opf:scheme":"URL"},
                                    text=source))
        metadata.appendChild(newTag(contentdom,"dc:source",
                                    text=source))

    for tag in tags:
        metadata.appendChild(newTag(contentdom,"dc:subject",text=tag))

    package.appendChild(metadata)

    manifest = contentdom.createElement("manifest")
    package.appendChild(manifest)

    spine = newTag(contentdom,"spine",attrs={"toc":"ncx"})
    package.appendChild(spine)

    if coverjpgpath:
        # in case coverjpg isn't a jpg:
        coverext = 'jpg'
        covertype = 'image/jpeg'
        try:
            coverext = coverjpgpath.split('.')[-1].lower()
            covertype = imagetypes.get(coverext,covertype)
        except:
            pass
        logger.debug("coverjpgpath:%s coverext:%s covertype:%s"%(coverjpgpath,coverext,covertype))
        # <meta name="cover" content="cover.jpg"/>
        metadata.appendChild(newTag(contentdom,"meta",{"name":"cover",
                                                       "content":"coverimageid"}))
        guide = newTag(contentdom,"guide")
        guide.appendChild(newTag(contentdom,"reference",attrs={"type":"cover",
                                                   "title":"Cover",
                                                   "href":"cover.xhtml"}))
        package.appendChild(guide)

        manifest.appendChild(newTag(contentdom,"item",
                                    attrs={'id':"coverimageid",
                                           'href':"cover."+coverext,
                                           'media-type':covertype}))

        # Note that the id of the cover xhmtl *must* be 'cover'
        # for it to work on Nook.
        manifest.appendChild(newTag(contentdom,"item",
                                    attrs={'id':"cover",
                                           'href':"cover.xhtml",
                                           'media-type':"application/xhtml+xml"}))

        spine.appendChild(newTag(contentdom,"itemref",
                                 attrs={"idref":"cover",
                                        "linear":"yes"}))

    for item in items:
        # logger.debug("new item:%s %s %s"%item)
        (id,href,type)=item
        manifest.appendChild(newTag(contentdom,"item",
                                       attrs={'id':id,
                                              'href':href,
                                              'media-type':type}))

    for itemref in itemrefs:
        # logger.debug("itemref:%s"%itemref)
        spine.appendChild(newTag(contentdom,"itemref",
                                    attrs={"idref":itemref,
                                           "linear":"yes"}))

    ## create toc.ncx file
    tocncxdom = getDOMImplementation().createDocument(None, "ncx", None)
    ncx = tocncxdom.documentElement
    ncx.setAttribute("version","2005-1")
    ncx.setAttribute("xmlns","http://www.daisy.org/z3986/2005/ncx/")
    head = tocncxdom.createElement("head")
    ncx.appendChild(head)
    head.appendChild(newTag(tocncxdom,"meta",
                            attrs={"name":"dtb:uid", "content":uniqueid}))
    depthnode = newTag(tocncxdom,"meta",
                            attrs={"name":"dtb:depth", "content":"4"})
    head.appendChild(depthnode)
    head.appendChild(newTag(tocncxdom,"meta",
                            attrs={"name":"dtb:totalPageCount", "content":"0"}))
    head.appendChild(newTag(tocncxdom,"meta",
                            attrs={"name":"dtb:maxPageNumber", "content":"0"}))

    docTitle = tocncxdom.createElement("docTitle")
    docTitle.appendChild(newTag(tocncxdom,"text",text=titleopt))
    ncx.appendChild(docTitle)

    tocnavMap = tocncxdom.createElement("navMap")
    ncx.appendChild(tocnavMap)

    booknum=0

    printt("wrote initial metadata:%s"%(time()-t))
    t = time()

    for navmap in navmaps:
        depthnavpoints = navmap.getElementsByTagNameNS("*","navPoint") # for checking more than one TOC entry

        # logger.debug( [ x.toprettyxml() for x in navmap.childNodes ] )
        ## only gets top level TOC entries.  sub entries carried inside.
        navpoints = [ x for x in navmap.childNodes if isinstance(x,Element) and x.tagName=="navPoint" ]
        # logger.debug("len(navpoints):%s"%len(navpoints))
        # logger.debug( [ x.toprettyxml() for x in navpoints ] )
        newnav = None
        if titlenavpoints:
            newnav = newTag(tocncxdom,"navPoint",{"id":"book%03d"%booknum})
            navlabel = newTag(tocncxdom,"navLabel")
            newnav.appendChild(navlabel)
            # For purposes of TOC titling & desc, use first book author.  Skip adding author if only one.
            if len(usedauthors) > 1:
                title = booktitles[booknum]+" by "+allauthors[booknum][0]
            else:
                title = booktitles[booknum]

            navlabel.appendChild(newTag(tocncxdom,"text",text=title))
            # Find the first 'spine' item's content for the title navpoint.
            # Many epubs have the first chapter as first navpoint, so we can't just
            # copy that anymore.
            newnav.appendChild(newTag(tocncxdom,"content",
                                      {"src":firstitemhrefs[booknum]}))

            # logger.debug("newnav:%s"%newnav.toprettyxml())
            tocnavMap.appendChild(newnav)
            # logger.debug("tocnavMap:%s"%tocnavMap.toprettyxml())
        else:
            newnav = tocnavMap

        if not descopt and len(allauthors[booknum]) > 0:
            description.appendChild(contentdom.createTextNode(booktitles[booknum]+" by "+allauthors[booknum][0]+"\n"))

        # If only one TOC point(total, not top level), or if not
        # including title nav point, include sub book TOC entries.
        if originalnavpoints and (len(depthnavpoints) > 1 or not titlenavpoints):
            for navpoint in navpoints:
                # logger.debug("navpoint:%s"%navpoint.toprettyxml())
                newnav.appendChild(navpoint)
                navpoint.is_ffdl_epub = is_ffdl_epub[booknum]

        booknum=booknum+1;
        # end of navmaps loop.


    maxdepth = 0
    contentsrcs = {}
    removednodes = []
    ## Force strict ordering of playOrder, stripping out some.
    playorder=0
    # logger.debug("tocncxdom:%s"%tocncxdom.toprettyxml())
    for navpoint in tocncxdom.getElementsByTagNameNS("*","navPoint"):
        # logger.debug("navpoint:%s"%navpoint.toprettyxml())
        if navpoint in removednodes:
            continue
        # need content[src] to compare for dups.  epub wants dup srcs to have same playOrder.
        contentsrc = None
        for n in navpoint.childNodes:
            if isinstance(n,Element) and n.tagName == "content":
                contentsrc = n.getAttribute("src")
                logger.debug("contentsrc: %s"%contentsrc)
                break

        if( contentsrc not in contentsrcs ):

            parent = navpoint.parentNode
            try:
                # if the epub was ever edited with Sigil, it changed
                # the id, but the file name is the same.
                if navpoint.is_ffdl_epub and \
                        ( navpoint.getAttribute("id").endswith('log_page') \
                              or contentsrc.endswith("log_page.xhtml") ):
                    logger.debug("Doing sibs 'filter' 1")
                    sibs = [ x for x in parent.childNodes if isinstance(x,Element) and x.tagName=="navPoint" ]
                    # if only logpage and one chapter, remove them from TOC and just show story.
                    if len(sibs) == 2:
                        parent.removeChild(navpoint)
                        logger.debug("Removing %s:"% sibs[0].getAttribute("playOrder"))
                        parent.removeChild(sibs[1])
                        removednodes.append(sibs[1])
            except:
                pass

            # New src, new number.
            contentsrcs[contentsrc] = navpoint.getAttribute("id")
            playorder += 1
            navpoint.setAttribute("playOrder","%d" % playorder)
            logger.debug("playorder:%d:"%playorder)

            # need to know depth of deepest navpoint for <meta name="dtb:depth" content="2"/>
            npdepth = 1
            dp = navpoint.parentNode
            while dp and dp.tagName != "navMap":
                npdepth += 1
                dp = dp.parentNode

            if npdepth > maxdepth:
                maxdepth = npdepth
        else:
            # same content, look for ffdl and title_page and/or single chapter.

            # easier to just set it now, even if the node gets removed later.
            navpoint.setAttribute("playOrder","%d" % playorder)
            logger.debug("playorder:%d:"%playorder)

            parent = navpoint.parentNode
            try:
                # if the epub was ever edited with Sigil, it changed
                # the id, but the file name is the same.
                if navpoint.is_ffdl_epub and \
                        ( navpoint.getAttribute("id").endswith('title_page') \
                              or contentsrc.endswith("title_page.xhtml") ):
                    parent.removeChild(navpoint)
                    logger.debug("Doing sibs 'filter' 2")
                    sibs = [ x for x in parent.childNodes if isinstance(x,Element) and x.tagName=="navPoint" ]
                    # if only one chapter after removing title_page, remove it too.
                    if len(sibs) == 1:
                        logger.debug("Removing %s:"% sibs[0].getAttribute("playOrder"))
                        parent.removeChild(sibs[0])
                        removednodes.append(sibs[0])
            except:
                pass


    if flattentoc:
        maxdepth = 1
        # already have play order and pesky dup/single chapters
        # removed, just need to flatten.
        flattocnavMap = tocncxdom.createElement("navMap")
        for n in tocnavMap.getElementsByTagNameNS("*","navPoint"):
            flattocnavMap.appendChild(n)

        ncx.replaceChild(flattocnavMap,tocnavMap)

    printt("navmap/toc maddess:%s"%(time()-t))
    t = time()

    depthnode.setAttribute("content","%d"%maxdepth)

    ## content.opf written now due to description being filled in
    ## during TOC generation to save loops.
    contentxml = contentdom.toprettyxml(indent='   ',encoding='utf-8')
    # tweak for brain damaged Nook STR.  Nook insists on name before content.
    contentxml = contentxml.replace(ensure_binary('<meta content="coverimageid" name="cover"/>'),
                                    ensure_binary('<meta name="cover" content="coverimageid"/>'))
    outputepub.writestr("content.opf",contentxml)
    outputepub.writestr("toc.ncx",tocncxdom.toprettyxml(indent='   ',encoding='utf-8'))

    printt("wrote opf/ncx files:%s"%(time()-t))
    t = time()

    if coverjpgpath:
        # write, not write string.  Pulling from file.
        outputepub.write(coverjpgpath,"cover."+coverext)

        outputepub.writestr("cover.xhtml",'''
<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en"><head><title>Cover</title><style type="text/css" title="override_css">
@page {padding: 0pt; margin:0pt}
body { text-align: center; padding:0pt; margin: 0pt; }
div { margin: 0pt; padding: 0pt; }
</style></head><body><div>
<img src="cover.'''+coverext+'''" alt="cover"/>
</div></body></html>
''')

    # declares all the files created by Windows.  otherwise, when
    # it runs in appengine, windows unzips the files as 000 perms.
    for zf in outputepub.filelist:
        zf.create_system = 0
    outputepub.close()

    printt("closed outputepub:%s"%(time()-t))
    t = time()

    return (source,filecount)
Пример #21
0
    def add(self, name, data, compress=None, mode=None, skip_duplicates=False):
        """
        Add a new member to the jar archive, with the given name and the given
        data.
        The compress option indicates how the given data should be compressed
        (one of JAR_STORED or JAR_DEFLATE), or compressed according
        to the default defined when creating the JarWriter (None). True and
        False are allowed values for backwards compatibility, mapping,
        respectively, to JAR_DEFLATE and JAR_STORED.
        When the data should be compressed, it is only really compressed if
        the compressed size is smaller than the uncompressed size.
        The mode option gives the unix permissions that should be stored
        for the jar entry.
        If a duplicated member is found skip_duplicates will prevent raising
        an exception if set to True.
        The given data may be a buffer, a file-like instance, a Deflater or a
        JarFileReader instance. The latter two allow to avoid uncompressing
        data to recompress it.
        """
        name = mozpath.normsep(six.ensure_text(name))

        if name in self._contents and not skip_duplicates:
            raise JarWriterError("File %s already in JarWriter" % name)
        if compress is None:
            compress = self._compress
        if compress is True:
            compress = JAR_DEFLATED
        if compress is False:
            compress = JAR_STORED
        if isinstance(data,
                      (JarFileReader, Deflater)) and data.compress == compress:
            deflater = data
        else:
            deflater = Deflater(compress, compress_level=self._compress_level)
            if isinstance(data, (six.binary_type, six.string_types)):
                deflater.write(data)
            elif hasattr(data, "read"):
                try:
                    data.seek(0)
                except (UnsupportedOperation, AttributeError):
                    pass
                deflater.write(data.read())
            else:
                raise JarWriterError("Don't know how to handle %s" %
                                     type(data))
        # Fill a central directory entry for this new member.
        entry = JarCdirEntry()
        entry["creator_version"] = 20
        if mode is not None:
            # Set creator host system (upper byte of creator_version)
            # to 3 (Unix) so mode is honored when there is one.
            entry["creator_version"] |= 3 << 8
            entry["external_attr"] = (mode & 0xFFFF) << 16
        if deflater.compressed:
            entry["min_version"] = 20  # Version 2.0 supports deflated streams
            entry["general_flag"] = 2  # Max compression
            entry["compression"] = deflater.compress
        else:
            entry["min_version"] = 10  # Version 1.0 for stored streams
            entry["general_flag"] = 0
            entry["compression"] = JAR_STORED
        # January 1st, 2010. See bug 592369.
        entry["lastmod_date"] = ((2010 - 1980) << 9) | (1 << 5) | 1
        entry["lastmod_time"] = 0
        entry["crc32"] = deflater.crc32
        entry["compressed_size"] = deflater.compressed_size
        entry["uncompressed_size"] = deflater.uncompressed_size
        entry["filename"] = six.ensure_binary(name)
        self._contents[name] = entry, deflater.compressed_data
Пример #22
0
def toco_convert_protos(model_flags_str,
                        toco_flags_str,
                        input_data_str,
                        debug_info_str=None,
                        enable_mlir_converter=False):
    """Convert `input_data_str` according to model and toco parameters.

  Unless you know what you are doing consider using
  the more friendly `tf.compat.v1.lite.toco_convert`.

  Args:
    model_flags_str: Serialized proto describing model properties, see
      `toco/model_flags.proto`.
    toco_flags_str: Serialized proto describing conversion properties, see
      `toco/toco_flags.proto`.
    input_data_str: Input data in serialized form (e.g. a graphdef is common)
    debug_info_str: Serialized `GraphDebugInfo` proto describing logging
      information. (default None)
    enable_mlir_converter: Enables MLIR-based conversion instead of the default
      TOCO conversion. (default False)
  Returns:
    Converted model in serialized form (e.g. a TFLITE model is common).
  Raises:
    ConverterError: When conversion fails in TFLiteConverter, usually due to
      ops not being supported.
    RuntimeError: When conversion fails, an exception is raised with the error
      message embedded.
  """
    # TODO(aselle): When toco does not use fatal errors for failure, we can
    # switch this on.
    if not _toco_from_proto_bin:
        try:
            model_str = wrap_toco.wrapped_toco_convert(model_flags_str,
                                                       toco_flags_str,
                                                       input_data_str,
                                                       debug_info_str,
                                                       enable_mlir_converter)
            return model_str
        except Exception as e:
            raise ConverterError(str(e))

    # Windows and TemporaryFile are not that useful together,
    # since you cannot have two readers/writers. So we have to
    # make the temporaries and close and delete them explicitly.
    toco_filename, model_filename, input_filename, output_filename = (None,
                                                                      None,
                                                                      None,
                                                                      None)
    try:
        # Build all input files
        with _tempfile.NamedTemporaryFile(delete=False) as fp_toco, \
                 _tempfile.NamedTemporaryFile(delete=False) as fp_model, \
                 _tempfile.NamedTemporaryFile(delete=False) as fp_input, \
                 _tempfile.NamedTemporaryFile(delete=False) as fp_debug:
            toco_filename = fp_toco.name
            input_filename = fp_input.name
            model_filename = fp_model.name
            debug_filename = fp_debug.name

            fp_model.write(model_flags_str)
            fp_toco.write(toco_flags_str)
            fp_input.write(six.ensure_binary(input_data_str))
            debug_info_str = debug_info_str if debug_info_str else ""
            # if debug_info_str contains a "string value", then the call to
            # fp_debug.write(debug_info_str) will fail with the following error
            #
            # TypeError: a bytes-like object is required, not 'str'
            #
            # Some of the subtests within the "convert_test" unit-test fail
            # with the error shown above. So watch out for that scenario and
            # convert debug_info_str to bytes where needed
            if not isinstance(debug_info_str, bytes):
                fp_debug.write(debug_info_str.encode("utf-8"))
            else:
                fp_debug.write(debug_info_str)

        # Reserve an output file
        with _tempfile.NamedTemporaryFile(delete=False) as fp:
            output_filename = fp.name

        # Run
        cmd = [
            _toco_from_proto_bin,
            model_filename,
            toco_filename,
            input_filename,
            output_filename,
            "--debug_proto_file={}".format(debug_filename),
        ]
        if enable_mlir_converter:
            cmd.append("--enable_mlir_converter")
        cmdline = " ".join(cmd)
        is_windows = _platform.system() == "Windows"
        proc = _subprocess.Popen(cmdline,
                                 shell=True,
                                 stdout=_subprocess.PIPE,
                                 stderr=_subprocess.STDOUT,
                                 close_fds=not is_windows)
        stdout, stderr = proc.communicate()
        exitcode = proc.returncode
        if exitcode == 0:
            with open(output_filename, "rb") as fp:
                return fp.read()
        else:
            stdout = _try_convert_to_unicode(stdout)
            stderr = _try_convert_to_unicode(stderr)
            raise ConverterError("See console for info.\n%s\n%s\n" %
                                 (stdout, stderr))
    finally:
        # Must manually cleanup files.
        for filename in [
                toco_filename, input_filename, model_filename, output_filename
        ]:
            try:
                _os.unlink(filename)
            except (OSError, TypeError):
                pass
Пример #23
0
    def render(self, request):
        action = getUrlArg(request, "action", "download")
        file = getUrlArg(request, "file")

        if file != None:
            filename = lenient_force_utf_8(file)
            filename = sanitise_filename_slashes(os.path.realpath(filename))

            if not os.path.exists(filename):
                return "File '%s' not found" % (filename)

            if action == "stream":
                name = getUrlArg(request, "name", "stream")
                port = config.OpenWebif.port.value
                proto = 'http'
                if request.isSecure():
                    port = config.OpenWebif.https_port.value
                    proto = 'https'
                ourhost = request.getHeader('host')
                m = re.match('.+\:(\d+)$', ourhost)
                if m is not None:
                    port = m.group(1)

                response = "#EXTM3U\n#EXTVLCOPT--http-reconnect=true\n#EXTINF:-1,%s\n%s://%s:%s/file?action=download&file=%s" % (
                    name, proto, request.getRequestHostname(), port,
                    quote(filename))
                request.setHeader("Content-Disposition",
                                  'attachment;filename="%s.m3u"' % name)
                request.setHeader("Content-Type", "application/x-mpegurl")
                return response
            elif action == "delete":
                request.setResponseCode(http.OK)
                return "TODO: DELETE FILE: %s" % (filename)
            elif action == "download":
                request.setHeader(
                    "Content-Disposition",
                    "attachment;filename=\"%s\"" % (filename.split('/')[-1]))
                rfile = static.File(six.ensure_binary(filename),
                                    defaultType="application/octet-stream")
                return rfile.render(request)
            else:
                return "wrong action parameter"

        path = getUrlArg(request, "dir")
        if path != None:
            pattern = '*'
            nofiles = False
            pattern = getUrlArg(request, "pattern", "*")
            nofiles = getUrlArg(request, "nofiles") != None
            directories = []
            files = []
            request.setHeader("content-type",
                              "application/json; charset=utf-8")
            if fileExists(path):
                if path == '/':
                    path = ''
                try:
                    files = glob.glob(path + '/' + pattern)
                except:  # nosec # noqa: E722
                    files = []
                files.sort()
                tmpfiles = files[:]
                for x in tmpfiles:
                    if os.path.isdir(x):
                        directories.append(x + '/')
                        files.remove(x)
                if nofiles:
                    files = []
                return json.dumps(
                    {
                        "result": True,
                        "dirs": directories,
                        "files": files
                    },
                    indent=2)
            else:
                return json.dumps(
                    {
                        "result": False,
                        "message": "path %s not exits" % (path)
                    },
                    indent=2)
Пример #24
0
def is_psv(buf):
    '''If the buffer is a PSV file then return True.'''
    buf_rows = six.BytesIO(six.ensure_binary(buf))
    table_set = messytables.CSVTableSet(buf_rows, delimiter='|')
    return _is_spreadsheet(table_set, 'PSV')
Пример #25
0
 def email_hash(self):
     e = ''
     if self.email:
         e = self.email.strip().lower().encode('utf8')
     return md5(six.ensure_binary(e)).hexdigest()
Пример #26
0
def _Utf8ToBase64(s):
    """Encode a utf-8 string as a base 64 string."""
    return six.ensure_text(base64.b64encode(six.ensure_binary(s)))
  def CreateObject(self,
                   bucket_uri=None,
                   object_name=None,
                   contents=None,
                   prefer_json_api=False,
                   encryption_key=None,
                   mode=None,
                   mtime=None,
                   uid=None,
                   gid=None,
                   storage_class=None,
                   gs_idempotent_generation=0,
                   kms_key_name=None):
    """Creates a test object.

    Args:
      bucket_uri: The URI of the bucket to place the object in. If not
          specified, a new temporary bucket is created.
      object_name: The name to use for the object. If not specified, a temporary
          test object name is constructed.
      contents: The contents to write to the object. If not specified, the key
          is not written to, which means that it isn't actually created
          yet on the server.
      prefer_json_api: If true, use the JSON creation functions where possible.
      encryption_key: AES256 encryption key to use when creating the object,
          if any.
      mode: The POSIX mode for the object. Must be a base-8 3-digit integer
          represented as a string.
      mtime: The modification time of the file in POSIX time (seconds since
          UTC 1970-01-01). If not specified, this defaults to the current
          system time.
      uid: A POSIX user ID.
      gid: A POSIX group ID.
      storage_class: String representing the storage class to use for the
          object.
      gs_idempotent_generation: For use when overwriting an object for which
          you know the previously uploaded generation. Create GCS object
          idempotently by supplying this generation number as a precondition
          and assuming the current object is correct on precondition failure.
          Defaults to 0 (new object); to disable, set to None.
      kms_key_name: Fully-qualified name of the KMS key that should be used to
          encrypt the object. Note that this is currently only valid for 'gs'
          objects.

    Returns:
      A StorageUri for the created object.
    """
    bucket_uri = bucket_uri or self.CreateBucket()
    # checking for valid types - None or unicode/binary text
    if contents is not None:
      if not isinstance(contents, (six.binary_type, six.text_type)):
        raise TypeError('contents must be either none or bytes, not {}'.format(
            type(contents)))
      contents = six.ensure_binary(contents)
    if (contents and bucket_uri.scheme == 'gs' and
        (prefer_json_api or encryption_key or kms_key_name)):

      object_name = object_name or self.MakeTempName('obj')
      json_object = self.CreateObjectJson(
          contents=contents,
          bucket_name=bucket_uri.bucket_name,
          object_name=object_name,
          encryption_key=encryption_key,
          mtime=mtime,
          storage_class=storage_class,
          gs_idempotent_generation=gs_idempotent_generation,
          kms_key_name=kms_key_name)
      object_uri = bucket_uri.clone_replace_name(object_name)
      # pylint: disable=protected-access
      # Need to update the StorageUri with the correct values while
      # avoiding creating a versioned string.

      md5 = (Base64ToHexHash(json_object.md5Hash),
             json_object.md5Hash.strip('\n"\''))
      object_uri._update_from_values(None,
                                     json_object.generation,
                                     True,
                                     md5=md5)
      # pylint: enable=protected-access
      return object_uri

    bucket_uri = bucket_uri or self.CreateBucket()
    object_name = object_name or self.MakeTempName('obj')
    key_uri = bucket_uri.clone_replace_name(object_name)
    if contents is not None:
      if bucket_uri.scheme == 'gs' and gs_idempotent_generation is not None:
        try:
          key_uri.set_contents_from_string(contents,
                                           headers={
                                               'x-goog-if-generation-match':
                                               str(gs_idempotent_generation)
                                           })
        except StorageResponseError as e:
          if e.status == 412:
            pass
          else:
            raise
      else:
        key_uri.set_contents_from_string(contents)
    custom_metadata_present = (mode is not None or mtime is not None or
                               uid is not None or gid is not None)
    if custom_metadata_present:
      self.SetPOSIXMetadata(bucket_uri.scheme,
                            bucket_uri.bucket_name,
                            object_name,
                            atime=None,
                            mtime=mtime,
                            uid=uid,
                            gid=gid,
                            mode=mode)
    return key_uri
def download(url, file, writeProgress=None, contextFactory=None,
	*args, **kwargs):
	"""Download a remote file and provide current-/total-length.

	@param file: path to file on filesystem, or file-like object.
	@param writeProgress: function or list of functions taking two parameters (pos, length)

	See HTTPDownloader to see what extra args can be passed if remote file
	is accessible via http or https. Both Backends should offer supportPartial.
	"""

	scheme, host, port, path, username, password = _parse(url)

	if scheme == 'ftp':
		from .FTPProgressDownloader import FTPProgressDownloader

		if not (username and password):
			username = '******'
			password = '******'

		client = FTPProgressDownloader(
			host,
			port,
			path,
			file,
			username,
			password,
			writeProgress,
			*args,
			**kwargs
		)
		return client.deferred

	# We force username and password here as we lack a satisfying input method
	if username and password:
		from six import PY3, ensure_binary
		from base64 import b64encode

		# twisted will crash if we don't rewrite this ;-)
		url = scheme + '://' + host + ':' + str(port) + path

		base64string = "%s:%s" % (username, password)
		base64string = b64encode(ensure_binary(base64string))
		if PY3:
			base64string.decode()
		AuthHeaders = {"Authorization": "Basic %s" % base64string}

		if "headers" in kwargs:
			kwargs["headers"].update(AuthHeaders)
		else:
			kwargs["headers"] = AuthHeaders

	from .HTTPProgressDownloader import HTTPProgressDownloader
	from twisted.internet import reactor

	factory = HTTPProgressDownloader(url, file, writeProgress, *args, **kwargs)
	if scheme == 'https':
		from twisted.internet import ssl
		if contextFactory is None:
			contextFactory = ssl.ClientContextFactory()
		reactor.connectSSL(host, port, factory, contextFactory)
	else:
		reactor.connectTCP(host, port, factory)

	return factory.deferred
Пример #29
0
 def _fourcc(self):
     return six.ensure_binary(
         struct.pack('!I', self._buffer.read_bits(32)), encoding='ascii'
     )
Пример #30
0
    def _rewrite(self, original_model: rewriter.ModelDescription,
                 rewritten_model: rewriter.ModelDescription):
        """Rewrites the provided model.

    Args:
      original_model: A `ModelDescription` specifying the original model to be
        rewritten.
      rewritten_model: A `ModelDescription` specifying the format and location
        of the rewritten model.

    Raises:
      ValueError: If the model could not be sucessfully rewritten.
    """
        if rewritten_model.model_type not in [
                rewriter.ModelType.TFLITE_MODEL, rewriter.ModelType.ANY_MODEL
        ]:
            raise ValueError(
                'TFLiteConverter can only convert to the TFLite format.')

        # TODO(dzats): We create a temporary directory with a SavedModel that does
        # not contain an assets or assets.extra directory. Remove this when the
        # TFLite converter can convert models having these directories.
        tmp_model_dir = os.path.join(six.ensure_text(rewritten_model.path),
                                     'tmp-rewrite-' + str(int(time.time())))
        if fileio.exists(tmp_model_dir):
            raise ValueError(
                'TFLiteConverter is unable to create a unique path '
                'for the temp rewriting directory.')

        fileio.makedirs(tmp_model_dir)
        _create_tflite_compatible_saved_model(
            six.ensure_text(original_model.path), tmp_model_dir)

        converter = _create_tflite_converter(
            saved_model_path=tmp_model_dir,
            quantization_optimizations=self._quantization_optimizations,
            quantization_supported_types=self._quantization_supported_types,
            # TODO(b/175699054): Enable once data API is landed.
            input_data=self._input_data)
        tflite_model = converter.convert()

        output_path = os.path.join(six.ensure_text(rewritten_model.path),
                                   self._filename)
        with fileio.open(six.ensure_text(output_path), 'wb') as f:
            f.write(six.ensure_binary(tflite_model))
        fileio.rmtree(tmp_model_dir)

        copy_pairs = []
        if self._copy_assets:
            src = os.path.join(six.ensure_text(original_model.path),
                               tf.saved_model.ASSETS_DIRECTORY)
            dst = os.path.join(six.ensure_text(rewritten_model.path),
                               tf.saved_model.ASSETS_DIRECTORY)
            if fileio.isdir(src):
                fileio.mkdir(dst)
                copy_pairs.append((src, dst))
        if self._copy_assets_extra:
            src = os.path.join(six.ensure_text(original_model.path),
                               EXTRA_ASSETS_DIRECTORY)
            dst = os.path.join(six.ensure_text(rewritten_model.path),
                               EXTRA_ASSETS_DIRECTORY)
            if fileio.isdir(src):
                fileio.mkdir(dst)
                copy_pairs.append((src, dst))
        for src, dst in copy_pairs:
            io_utils.copy_dir(src, dst)
Пример #31
0
 def _pack_unpack(format_str, *args):
     """wrapper for struct.pack function that converts unicode format string to 'str'"""
     binary_format_str = six.ensure_binary(format_str)
     return f(binary_format_str, *args)
 def getPLSContent(self):
     print("loading PLS of stream ", self.name, self.url)
     getPage(six.ensure_binary(self.url)).addCallback(self._gotPLSContent).addErrback(self._errorPLSContent)
Пример #33
0
 def __call__(self, r):
     r.headers[b'Authorization'] = ensure_binary('Bearer {}'.format(
         self.token))
     return r
Пример #34
0
 def sha1(self):
     return hashlib.sha1(six.ensure_binary(self.data)).hexdigest()
def generate_signed_url(service_account_file,
                        bucket_name,
                        object_name,
                        subresource=None,
                        expiration=604800,
                        http_method='GET',
                        query_parameters=None,
                        headers=None):

    if expiration > 604800:
        print('Expiration Time can\'t be longer than 604800 seconds (7 days).')
        sys.exit(1)

    escaped_object_name = quote(six.ensure_binary(object_name), safe=b'/~')
    canonical_uri = '/{}'.format(escaped_object_name)

    datetime_now = datetime.datetime.utcnow()
    request_timestamp = datetime_now.strftime('%Y%m%dT%H%M%SZ')
    datestamp = datetime_now.strftime('%Y%m%d')

    google_credentials = service_account.Credentials.from_service_account_file(
        service_account_file)
    client_email = google_credentials.service_account_email
    credential_scope = '{}/auto/storage/goog4_request'.format(datestamp)
    credential = '{}/{}'.format(client_email, credential_scope)

    if headers is None:
        headers = dict()
    host = '{}.storage.googleapis.com'.format(bucket_name)
    headers['host'] = host

    canonical_headers = ''
    ordered_headers = collections.OrderedDict(sorted(headers.items()))
    for k, v in ordered_headers.items():
        lower_k = str(k).lower()
        strip_v = str(v).lower()
        canonical_headers += '{}:{}\n'.format(lower_k, strip_v)

    signed_headers = ''
    for k, _ in ordered_headers.items():
        lower_k = str(k).lower()
        signed_headers += '{};'.format(lower_k)
    signed_headers = signed_headers[:-1]  # remove trailing ';'

    if query_parameters is None:
        query_parameters = dict()
    query_parameters['X-Goog-Algorithm'] = 'GOOG4-RSA-SHA256'
    query_parameters['X-Goog-Credential'] = credential
    query_parameters['X-Goog-Date'] = request_timestamp
    query_parameters['X-Goog-Expires'] = expiration
    query_parameters['X-Goog-SignedHeaders'] = signed_headers
    if subresource:
        query_parameters[subresource] = ''

    canonical_query_string = ''
    ordered_query_parameters = collections.OrderedDict(
        sorted(query_parameters.items()))
    for k, v in ordered_query_parameters.items():
        encoded_k = quote(str(k), safe='')
        encoded_v = quote(str(v), safe='')
        canonical_query_string += '{}={}&'.format(encoded_k, encoded_v)
    canonical_query_string = canonical_query_string[:-1]  # remove trailing '&'

    canonical_request = '\n'.join([
        http_method, canonical_uri, canonical_query_string, canonical_headers,
        signed_headers, 'UNSIGNED-PAYLOAD'
    ])

    canonical_request_hash = hashlib.sha256(
        canonical_request.encode()).hexdigest()

    string_to_sign = '\n'.join([
        'GOOG4-RSA-SHA256', request_timestamp, credential_scope,
        canonical_request_hash
    ])

    # signer.sign() signs using RSA-SHA256 with PKCS1v15 padding
    signature = binascii.hexlify(
        google_credentials.signer.sign(string_to_sign)).decode()

    # scheme_and_host = '{}://{}'.format('https', host)
    # signed_url = '{}{}?{}&x-goog-signature={}'.format(
    #    canonical_uri, canonical_query_string, signature)
    signed_url = '{}?{}&x-goog-signature={}'.format(canonical_uri,
                                                    canonical_query_string,
                                                    signature)

    return signed_url
Пример #36
0
  def CreateTempFile(self,
                     tmpdir=None,
                     contents=None,
                     file_name=None,
                     mtime=None,
                     mode=NA_MODE,
                     uid=NA_ID,
                     gid=NA_ID):
    """Creates a temporary file on disk.

    Note: if mode, uid, or gid are present, they must be validated by
    ValidateFilePermissionAccess and ValidatePOSIXMode before calling this
    function.

    Args:
      tmpdir: The temporary directory to place the file in. If not specified, a
              new temporary directory is created.
      contents: The contents to write to the file. If not specified, a test
                string is constructed and written to the file. Since the file
                is opened 'wb', the contents must be bytes.
      file_name: The name to use for the file. If not specified, a temporary
                 test file name is constructed. This can also be a tuple, where
                 ('dir', 'foo') means to create a file named 'foo' inside a
                 subdirectory named 'dir'.
      mtime: The modification time of the file in POSIX time (seconds since
             UTC 1970-01-01). If not specified, this defaults to the current
             system time.
      mode: The POSIX mode for the file. Must be a base-8 3-digit integer
            represented as a string.
      uid: A POSIX user ID.
      gid: A POSIX group ID.

    Returns:
      The path to the new temporary file.
    """

    tmpdir = six.ensure_str(tmpdir or self.CreateTempDir())
    file_name = file_name or self.MakeTempName(str('file'))
    if isinstance(file_name, (six.text_type, six.binary_type)):
      fpath = os.path.join(tmpdir, six.ensure_str(file_name))
    else:
      file_name = map(six.ensure_str, file_name)
      fpath = os.path.join(tmpdir, *file_name)
    if not os.path.isdir(os.path.dirname(fpath)):
      os.makedirs(os.path.dirname(fpath))
    if isinstance(fpath, six.binary_type):
      fpath = fpath.decode(UTF8)

    with open(fpath, 'wb') as f:
      contents = (contents if contents is not None else self.MakeTempName(
          str('contents')))
      if isinstance(contents, bytearray):
        contents = bytes(contents)
      else:
        contents = six.ensure_binary(contents)
      f.write(contents)
    if mtime is not None:
      # Set the atime and mtime to be the same.
      os.utime(fpath, (mtime, mtime))
    if uid != NA_ID or int(gid) != NA_ID:
      os.chown(fpath, uid, int(gid))
    if int(mode) != NA_MODE:
      os.chmod(fpath, int(mode, 8))
    return fpath
Пример #37
0
    def generate_ansible_command(self):
        """
        Given that the ``RunnerConfig`` preparation methods have been run to gather the inputs this method
        will generate the ``ansible`` or ``ansible-playbook`` command that will be used by the
        :py:class:`ansible_runner.runner.Runner` object to start the process
        """
        if self.binary is not None:
            base_command = self.binary
            self.execution_mode = ExecutionMode.RAW
        elif self.module is not None:
            base_command = 'ansible'
            self.execution_mode = ExecutionMode.ANSIBLE
        else:
            base_command = 'ansible-playbook'
            self.execution_mode = ExecutionMode.ANSIBLE_PLAYBOOK

        exec_list = [base_command]

        try:
            if self.cmdline_args:
                cmdline_args = self.cmdline_args
            else:
                cmdline_args = self.loader.load_file('env/cmdline', string_types, encoding=None)

            if six.PY2:
                cmdline_args = ensure_binary(cmdline_args, encoding='utf-8')

            args = shlex.split(cmdline_args)
            exec_list.extend(args)
        except ConfigurationError:
            pass

        if self.inventory is None:
            pass
        elif isinstance(self.inventory, list):
            for i in self.inventory:
                exec_list.append("-i")
                exec_list.append(i)
        else:
            exec_list.append("-i")
            exec_list.append(self.inventory)

        if self.limit is not None:
            exec_list.append("--limit")
            exec_list.append(self.limit)

        if self.loader.isfile('env/extravars'):
            exec_list.extend(['-e', '@{}'.format(self.loader.abspath('env/extravars'))])

        if self.extra_vars:
            if isinstance(self.extra_vars, dict) and self.extra_vars:
                exec_list.extend(
                    [
                        '-e',
                        '%s' % ' '.join(
                            ["{}=\"{}\"".format(k, self.extra_vars[k]) for k in self.extra_vars]
                        )
                    ]
                )
            elif self.loader.isfile(self.extra_vars):
                exec_list.extend(['-e', '@{}'.format(self.loader.abspath(self.extra_vars))])

        if self.verbosity:
            v = 'v' * self.verbosity
            exec_list.append('-{}'.format(v))

        if self.tags:
            exec_list.extend(['--tags', '{}'.format(self.tags)])

        if self.skip_tags:
            exec_list.extend(['--skip-tags', '{}'.format(self.skip_tags)])

        if self.forks:
            exec_list.extend(['--forks', '{}'.format(self.forks)])

        # Other parameters
        if self.execution_mode == ExecutionMode.ANSIBLE_PLAYBOOK:
            exec_list.append(self.playbook)
        elif self.execution_mode == ExecutionMode.ANSIBLE:
            exec_list.append("-m")
            exec_list.append(self.module)

            if self.module_args is not None:
                exec_list.append("-a")
                exec_list.append(self.module_args)

            if self.host_pattern is not None:
                exec_list.append(self.host_pattern)

        return exec_list
Пример #38
0
def post_ratelimited(protocol,
                     session,
                     url,
                     headers,
                     data,
                     allow_redirects=False,
                     stream=False,
                     timeout=None):
    """
    There are two error-handling policies implemented here: a fail-fast policy intended for stand-alone scripts which
    fails on all responses except HTTP 200. The other policy is intended for long-running tasks that need to respect
    rate-limiting errors from the server and paper over outages of up to 1 hour.

    Wrap POST requests in a try-catch loop with a lot of error handling logic and some basic rate-limiting. If a request
    fails, and some conditions are met, the loop waits in increasing intervals, up to 1 hour, before trying again. The
    reason for this is that servers often malfunction for short periods of time, either because of ongoing data
    migrations or other maintenance tasks, misconfigurations or heavy load, or because the connecting user has hit a
    throttling policy limit.

    If the loop exited early, consumers of exchangelib that don't implement their own rate-limiting code could quickly
    swamp such a server with new requests. That would only make things worse. Instead, it's better if the request loop
    waits patiently until the server is functioning again.

    If the connecting user has hit a throttling policy, then the server will start to malfunction in many interesting
    ways, but never actually tell the user what is happening. There is no way to distinguish this situation from other
    malfunctions. The only cure is to stop making requests.

    The contract on sessions here is to return the session that ends up being used, or retiring the session if we
    intend to raise an exception. We give up on max_wait timeout, not number of retries.

    An additional resource on handling throttling policies and client back off strategies:
        https://msdn.microsoft.com/en-us/library/office/jj945066(v=exchg.150).aspx#bk_ThrottlingBatch
    """
    thread_id = get_ident()
    wait = 10  # seconds
    retry = 0
    redirects = 0
    # In Python 2, we want this to be a 'str' object so logging doesn't break (all formatting arguments are 'str').
    # We activated 'unicode_literals' at the top of this file, so it would be a 'unicode' object unless we convert
    # to 'str' explicitly. This is a no-op for Python 3.
    log_msg = str('''\
Retry: %(retry)s
Waited: %(wait)s
Timeout: %(timeout)s
Session: %(session_id)s
Thread: %(thread_id)s
Auth type: %(auth)s
URL: %(url)s
HTTP adapter: %(adapter)s
Allow redirects: %(allow_redirects)s
Response time: %(response_time)s
Status code: %(status_code)s
Request headers: %(request_headers)s
Response headers: %(response_headers)s
Request data: %(xml_request)s
Response data: %(xml_response)s
''')
    log_vals = dict(
        retry=retry,
        wait=wait,
        timeout=protocol.TIMEOUT,
        session_id=session.session_id,
        thread_id=thread_id,
        auth=session.auth,
        url=url,
        adapter=session.get_adapter(url),
        allow_redirects=allow_redirects,
        response_time=None,
        status_code=None,
        request_headers=headers,
        response_headers=None,
        xml_request=data,
        xml_response=None,
    )
    try:
        while True:
            _back_off_if_needed(protocol.credentials.back_off_until)
            log.debug(
                'Session %s thread %s: retry %s timeout %s POST\'ing to %s after %ss wait',
                session.session_id, thread_id, retry, protocol.TIMEOUT, url,
                wait)
            d_start = time_func()
            # Always create a dummy response for logging purposes, in case we fail in the following
            r = DummyResponse(url=url, headers={}, request_headers=headers)
            try:
                data = ensure_binary(data)
            except UnicodeDecodeError:
                try:
                    data = data.decode('utf-8').encode('utf-8')
                except UnicodeDecodeError:
                    import chardet
                    encoding_info = chardet.detect(data)
                    data = data.decode('utf-8').encode(
                        encoding_info['encoding'])
            try:
                r = session.post(url=url,
                                 headers=headers,
                                 data=data,
                                 allow_redirects=False,
                                 timeout=(timeout or protocol.TIMEOUT),
                                 stream=stream)
            except CONNECTION_ERRORS as e:
                log.debug(
                    'Session %s thread %s: connection error POST\'ing to %s',
                    session.session_id, thread_id, url)
                r = DummyResponse(url=url,
                                  headers={'TimeoutException': e},
                                  request_headers=headers)
            finally:
                log_vals.update(
                    retry=retry,
                    wait=wait,
                    timeout=(timeout or protocol.TIMEOUT),
                    session_id=session.session_id,
                    url=str(r.url),
                    response_time=time_func() - d_start,
                    status_code=r.status_code,
                    request_headers=r.request.headers,
                    response_headers=None if stream else r.headers,
                    xml_request=data,
                    xml_response=None if stream else r.content,
                )
            log.debug(log_msg, log_vals)
            if _may_retry_on_error(r, protocol, wait):
                log.info(
                    "Session %s thread %s: Connection error on URL %s (code %s). Cool down %s secs",
                    session.session_id, thread_id, r.url, r.status_code, wait)
                time.sleep(wait)  # Increase delay for every retry
                retry += 1
                wait *= 2
                session = protocol.renew_session(session)
                continue
            if r.status_code in (301, 302):
                url, redirects = _redirect_or_fail(r, redirects,
                                                   allow_redirects)
                continue
            break
    except (RateLimitError, RedirectError) as e:
        log.warning(e.value)
        protocol.retire_session(session)
        raise
    except Exception as e:
        # Let higher layers handle this. Add full context for better debugging.
        log.error(str('%s: %s\n%s'), e.__class__.__name__, str(e),
                  log_msg % log_vals)
        protocol.retire_session(session)
        raise
    if r.status_code == 500 and r.content and is_xml(r.content):
        # Some genius at Microsoft thinks it's OK to send a valid SOAP response as an HTTP 500
        log.debug('Got status code %s but trying to parse content anyway',
                  r.status_code)
    elif r.status_code != 200:
        protocol.retire_session(session)
        _raise_response_errors(r, protocol, log_msg,
                               log_vals)  # Always raises an exception
    log.debug('Session %s thread %s: Useful response from %s',
              session.session_id, thread_id, url)
    return r, session
Пример #39
0
 def test_errors_and_encoding(self):
     six.ensure_binary(self.UNICODE_EMOJI, encoding='latin-1', errors='ignore')
     with py.test.raises(UnicodeEncodeError):
         six.ensure_binary(self.UNICODE_EMOJI, encoding='latin-1', errors='strict')