Ejemplo n.º 1
0
    def _load_metadata(self, metadata):
        the_metadata = metadata

        if the_metadata is None:
            the_metadata = {}

        if isinstance(the_metadata, six.string_types):
            try:
                if os.path.isfile(the_metadata):
                    with open(the_metadata, 'r') as f:
                        the_metadata = json.load(f)
                else:
                    req = requests.get(the_metadata)
                    req.raise_for_status()
                    the_metadata = req.json()
            except (IOError,
                    ValueError,
                    requests.exceptions.RequestException) as e:
                msg = 'Unable to load JSON at \'{0}\''.format(metadata)
                six.raise_from(DataPackageException(msg), e)

        if hasattr(the_metadata, 'read'):
            try:
                the_metadata = json.load(the_metadata)
            except ValueError as e:
                six.raise_from(DataPackageException(str(e)), e)

        if not isinstance(the_metadata, dict):
            msg = 'Data must be a \'dict\', but was a \'{0}\''
            raise DataPackageException(msg.format(type(the_metadata).__name__))

        return the_metadata
Ejemplo n.º 2
0
    def delete_plan(self, plan_name):
        """Deletes a plan and associated files

        Deletes a plan by deleting the container matching plan_name. It
        will not delete the plan if a stack exists with the same name.

        Raises StackInUseError if a stack with the same name as plan_name
        exists.

        :param plan_name: The name of the container to delete
        :type plan_name: str
        """
        # heat throws HTTPNotFound if the stack is not found
        try:
            stack = self.heatclient.stacks.get(plan_name)
            if stack is not None:
                raise exception.StackInUseError(name=plan_name)
        except heatexceptions.HTTPNotFound:
            try:
                self.plan_store.delete(plan_name)
            except swiftexceptions.ClientException as ce:
                LOG.exception("Swift error deleting plan.")
                if ce.http_status == 404:
                    six.raise_from(exception.PlanDoesNotExistError(
                        name=plan_name), ce)
            except Exception:
                LOG.exception("Error deleting plan.")
                raise
Ejemplo n.º 3
0
    def __call__(self, data):
        """
        Validate data against source schema, then apply transformation, and
        finally validate resulting data against target schema.

        :param data: Data to transform
        :type data: any

        :return: Transformed data
        :rtype: any

        :raises JsonValidationError: if source data or target data isn't valid
        :raises JsonTransformationError: if transformation has gone wrong
        """

        self.source.validate(data)

        try:
            result = self.patch.apply(data)

        except JsonPatchException as err:
            raise_from(
                JsonTransformationError(str(err)),
                err
            )

        self.target.validate(result)

        return result
Ejemplo n.º 4
0
def to_bytes(proto, string):
    try:
        return struct.pack('>H', int(string, 10))
    except ValueError as exc:
        six.raise_from(ValueError("Not a base 10 integer"), exc)
    except struct.error as exc:
        six.raise_from(ValueError("Integer not in range(65536)"), exc)
Ejemplo n.º 5
0
    def validate_plan(self, plan_name):
        """Validate Plan

        This private method provides validations to ensure a plan
        meets the proper criteria before allowed to persist in storage.

        :param plan_files: The files to import into the container.
        :type plan_files: dict
        :returns boolean
        """

        plan = self.get_plan(plan_name)
        # there can only be up to one root-template file in metadata
        rt = {k: v for (k, v) in plan.files.items()
              if v.get('meta', {}).get('file-type') == 'root-template'}
        if len(rt) > 1:
            raise exception.TooManyRootTemplatesError()

        # the plan needs to be validated with heat to ensure it conforms
        template, environment, files = templates.process_plan_data(plan.files)
        try:
            self.heatclient.stacks.validate(
                template=template,
                files=files,
                environment=environment,
                show_nested=True)
        except heatexceptions.HTTPBadRequest as exc:
            LOG.exception("Error validating the plan.")
            six.raise_from(exception.HeatValidationFailedError(msg=exc), exc)

        # no validation issues found
        return True
Ejemplo n.º 6
0
def raise_with_cause(exc_cls, message, *args, **kwargs):
    """Helper to raise + chain exceptions (when able) and associate a *cause*.

    NOTE(harlowja): Since in py3.x exceptions can be chained (due to
    :pep:`3134`) we should try to raise the desired exception with the given
    *cause* (or extract a *cause* from the current stack if able) so that the
    exception formats nicely in old and new versions of python. Since py2.x
    does **not** support exception chaining (or formatting) our root exception
    class has a :py:meth:`~taskflow.exceptions.TaskFlowException.pformat`
    method that can be used to get *similar* information instead (and this
    function makes sure to retain the *cause* in that case as well so
    that the :py:meth:`~taskflow.exceptions.TaskFlowException.pformat` method
    shows them).

    :param exc_cls: the :py:class:`~taskflow.exceptions.TaskFlowException`
                    class to raise.
    :param message: the text/str message that will be passed to
                    the exceptions constructor as its first positional
                    argument.
    :param args: any additional positional arguments to pass to the
                 exceptions constructor.
    :param kwargs: any additional keyword arguments to pass to the
                   exceptions constructor.
    """
    if not issubclass(exc_cls, TaskFlowException):
        raise ValueError("Subclass of taskflow exception is required")
    if 'cause' not in kwargs:
        exc_type, exc, exc_tb = sys.exc_info()
        if exc is not None:
            kwargs['cause'] = exc
        del(exc_type, exc, exc_tb)
    six.raise_from(exc_cls(message, *args, **kwargs), kwargs.get('cause'))
Ejemplo n.º 7
0
    def save(self):
        parent_product = self.parent_product
        current_products = set(parent_product.get_package_child_to_quantity_map())
        selected_products, removed_products, selected_quantities = self.get_selected_and_removed()

        with atomic():
            try:
                clear_existing_package(parent_product)
                parent_product.make_package(package_def=selected_quantities)
            except ImpossibleProductModeException as ipme:
                six.raise_from(
                    Problem(
                        _("Unable to make package %(product)s: %(error)s") %
                        {"product": parent_product, "error": ipme}
                    ), ipme
                )

        products_to_add = selected_products - current_products
        products_to_remove = current_products & removed_products

        message_parts = []
        if products_to_add:
            message_parts.append(_("New: %d") % len(products_to_add))
        if products_to_remove:
            message_parts.append(_("Removed: %d") % len(products_to_remove))
        if message_parts and self.request:
            messages.success(self.request, ", ".join(message_parts))
Ejemplo n.º 8
0
    def __init__(self, wfs, features, **kwargs):
        """
        Parameters
        ----------
        wfs: string or :class:`owslib.wfs.WebFeatureService` instance
            The WebFeatureService instance, or URL of a WFS service, from which
            to retrieve the geometries.
        features: string or list of strings
            The typename(s) of features available from the web service that
            will be retrieved. Somewhat analogous to layers in WMS/WMTS.

        Other Parameters
        ----------------
        **kwargs
            Keyword arguments to be used when drawing this feature.

        """
        try:
            from cartopy.io.ogc_clients import WFSGeometrySource
        except ImportError as e:
            six.raise_from(ImportError(
                'WFSFeature requires additional dependencies. If installed '
                'via pip, try `pip install cartopy[ows]`.\n'), e)

        self.source = WFSGeometrySource(wfs, features)
        crs = self.source.default_projection()
        super(WFSFeature, self).__init__(crs, **kwargs)
        # Default kwargs
        self._kwargs.setdefault('edgecolor', 'black')
        self._kwargs.setdefault('facecolor', 'none')
Ejemplo n.º 9
0
def convert(credentials):
    """Convert oauth2client credentials to google-auth credentials.

    This class converts:

    - :class:`oauth2client.client.OAuth2Credentials` to
      :class:`google.oauth2.credentials.Credentials`.
    - :class:`oauth2client.client.GoogleCredentials` to
      :class:`google.oauth2.credentials.Credentials`.
    - :class:`oauth2client.service_account.ServiceAccountCredentials` to
      :class:`google.oauth2.service_account.Credentials`.
    - :class:`oauth2client.service_account._JWTAccessCredentials` to
      :class:`google.oauth2.service_account.Credentials`.
    - :class:`oauth2client.contrib.gce.AppAssertionCredentials` to
      :class:`google.auth.compute_engine.Credentials`.
    - :class:`oauth2client.contrib.appengine.AppAssertionCredentials` to
      :class:`google.auth.app_engine.Credentials`.

    Returns:
        google.auth.credentials.Credentials: The converted credentials.

    Raises:
        ValueError: If the credentials could not be converted.
    """

    credentials_class = type(credentials)

    try:
        return _CLASS_CONVERSION_MAP[credentials_class](credentials)
    except KeyError as caught_exc:
        new_exc = ValueError(_CONVERT_ERROR_TMPL.format(credentials_class))
        six.raise_from(new_exc, caught_exc)
Ejemplo n.º 10
0
def string_iter(string):
    if not string.startswith(u'/'):
        raise exceptions.StringParseError("Must begin with /", string)
    # consume trailing slashes
    string = string.rstrip(u'/')
    sp = string.split(u'/')

    # skip the first element, since it starts with /
    sp.pop(0)
    while sp:
        element = sp.pop(0)
        try:
            proto = protocol_with_name(element)
            codec = codec_by_name(proto.codec)
        except (ImportError, exceptions.ProtocolNotFoundError) as exc:
            six.raise_from(exceptions.StringParseError("Unknown Protocol", string, element), exc)
        value = None
        if codec.SIZE != 0:
            if len(sp) < 1:
                raise exceptions.StringParseError("Protocol requires address", string, proto.name)
            if codec.IS_PATH:
                value = "/" + "/".join(sp)
                if not six.PY2:
                    sp.clear()
                else:
                    sp = []
            else:
                value = sp.pop(0)
        yield proto, codec, value
Ejemplo n.º 11
0
def wait_until_render_complete(driver):
    '''

    '''
    from selenium.webdriver.support.ui import WebDriverWait
    from selenium.common.exceptions import TimeoutException

    def is_bokeh_loaded(driver):
        return driver.execute_script('''
            const b = window.Bokeh;
            return b && b.documents && b.documents.length > 0;
        ''')

    try:
        WebDriverWait(driver, 5, poll_frequency=0.1).until(is_bokeh_loaded)
    except TimeoutException as e:
        raise_from(RuntimeError('Bokeh was not loaded in time. Something may have gone wrong.'), e)

    driver.execute_script(_WAIT_SCRIPT)

    def is_bokeh_render_complete(driver):
        return driver.execute_script('return window._bokeh_render_complete;')

    try:
        WebDriverWait(driver, 5, poll_frequency=0.1).until(is_bokeh_render_complete)
    except TimeoutException:
        log.warn("The webdriver raised a TimeoutException while waiting for \
                     a 'bokeh:idle' event to signify that the layout has rendered. \
                     Something may have gone wrong.")
    finally:
        browser_logs = driver.get_log('browser')
        severe_errors = [l for l in browser_logs if l.get('level') == 'SEVERE']
        if len(severe_errors) > 0:
            log.warn("There were severe browser errors that may have affected your export: {}".format(severe_errors))
Ejemplo n.º 12
0
    def details(self, sent_id):
        """
        Retrieve additional details about a given sentence.

        :param sent_id:
            A valid sentence ID.

        :raises MissingDataError:
            Raised if detailed sentence information was not loaded into the
            reader.

        :raises InvalidIDError:
            Raised if an invalid sentence ID is passed.

        :returns:
            Returns a tuple of the form:

            ``(username, date_added, date_modified)``

            All three are strings. 
        """
        if self._detailed_info_dict is None:
            raise MissingDataError('Detailed information not loaded.')

        try:
            return self._detailed_info_dict[sent_id]
        except KeyError as e:
            raise_from(InvalidIDError('Detailed information not found for '
                                      'sentence ID {}'.format(sent_id)), e)
Ejemplo n.º 13
0
    def parse_sentence(self, text):
        """
        Takes a Tanaka corpus formatted sentence and parses it into tagged
        :class:`TatoebaIndexReader.WordClass` (by default :class:`TanakaWord`)
        word objects.

        :param text:
            A Tanaka-corpus formatted sentence.

        :return:
            Returns a :py:class:`list` of :class:`TatoebaIndexReader.WordClass`
            objects representing a given sentence.
        """
        words = self.sentence_splitter(text)
        sentence = []
        for word in words:
            if not len(word):
                continue


            try:
                wobj = self.WordClass.from_text(word)
            except InvalidEntryError as e:
                msg = u'Failed to interpret word {w} in sentence {s}'
                raise_from(InvalidEntryError(msg.format(w=word, s=text)), e)

            sentence.append(wobj)

        return sentence
Ejemplo n.º 14
0
    def _get_profile(self, profile_id):
        '''dict: Return the profile with the received ID as a dict (None if it
        doesn't exist).'''
        profile_metadata = self._registry.get(profile_id)
        if not profile_metadata:
            return

        path = self._get_absolute_path(profile_metadata.get('schema_path'))
        url = profile_metadata.get('schema')
        if path:
            try:
                return self._load_json_file(path)
            except IOError as local_exc:
                if not url:
                    raise local_exc

                try:
                    return self._load_json_url(url)
                except IOError:
                    msg = (
                        'Error loading profile locally at "{path}" '
                        'and remotely at "{url}".'
                    ).format(path=path, url=url)
                    six.raise_from(IOError(msg), local_exc)
        elif url:
            return self._load_json_url(url)
Ejemplo n.º 15
0
 def _get(self, *args, **kwargs):
     try:
         response = self._session.get(*args, **kwargs)
         response.raise_for_status()
     except requests.RequestException as e:
         six.raise_from(NetworkError, e)
     return response
Ejemplo n.º 16
0
 def assert_reset(self, asserted):
     """Assert or de-assert target reset line"""
     try:
         self._link.drive_nreset(asserted)
         self._nreset_state = asserted
     except STLinkException as exc:
         six.raise_from(self._convert_exception(exc), exc)
Ejemplo n.º 17
0
    def __call__(self, url, method='GET', body=None, headers=None,
                 timeout=None, **kwargs):
        """Make an HTTP request using requests.

        Args:
            url (str): The URI to be requested.
            method (str): The HTTP method to use for the request. Defaults
                to 'GET'.
            body (bytes): The payload / body in HTTP request.
            headers (Mapping[str, str]): Request headers.
            timeout (Optional[int]): The number of seconds to wait for a
                response from the server. If not specified or if None, the
                requests default timeout will be used.
            kwargs: Additional arguments passed through to the underlying
                requests :meth:`~requests.Session.request` method.

        Returns:
            google.auth.transport.Response: The HTTP response.

        Raises:
            google.auth.exceptions.TransportError: If any exception occurred.
        """
        try:
            _LOGGER.debug('Making request: %s %s', method, url)
            response = self.session.request(
                method, url, data=body, headers=headers, timeout=timeout,
                **kwargs)
            return _Response(response)
        except requests.exceptions.RequestException as caught_exc:
            new_exc = exceptions.TransportError(caught_exc)
            six.raise_from(new_exc, caught_exc)
Ejemplo n.º 18
0
 def swo_start(self, baudrate):
     """! @brief Start receiving SWO data at the given baudrate."""
     try:
         self._link.swo_configure(True, baudrate)
         self._link.swo_control(True)
     except DAPAccess.Error as exc:
         six.raise_from(self._convert_exception(exc), exc)
Ejemplo n.º 19
0
 def connect(self, protocol=None):
     """Initialize DAP IO pins for JTAG or SWD"""
     try:
         self._link.enter_debug(stlink.STLink.Protocol.SWD)
         self._is_connected = True
     except STLinkException as exc:
         six.raise_from(self._convert_exception(exc), exc)
Ejemplo n.º 20
0
 def assert_reset(self, asserted):
     """! @brief Assert or de-assert target reset line"""
     try:
         self._invalidate_cached_registers()
         self._link.assert_reset(asserted)
     except DAPAccess.Error as exc:
         six.raise_from(self._convert_exception(exc), exc)
Ejemplo n.º 21
0
    def read_dp(self, addr, now=True):
        """! @brief Read a DP register.
        
        @param self
        @param addr Integer register address being one of (0x0, 0x4, 0x8, 0xC).
        @param now
        
        @todo Handle auto DPBANKSEL.
        """
        reg_id = self.REG_ADDR_TO_ID_MAP[self.DP, addr]
        
        try:
            result = self._link.read_reg(reg_id, now=now)
        except DAPAccess.Error as error:
            self._invalidate_cached_registers()
            six.raise_from(self._convert_exception(error), error)

        # Read callback returned for async reads.
        def read_dp_result_callback():
            try:
                return result()
            except DAPAccess.Error as error:
                self._invalidate_cached_registers()
                six.raise_from(self._convert_exception(error), error)

        return result if now else read_dp_result_callback
Ejemplo n.º 22
0
    def _config_operator_list_modifiers(self, attr_name, values, line, line_no,
                                        op,
                                        section):
        """Execute + <list>, -<list> or =<list> line in config file

            Used in the extension and directories section
            :param attr_name: The instance attribute to change
            :param values: The values from the config file
            :param line: The full line from the config file
            :param line_no: the line number of this line
            :param op: The actual operation (one of +,- or =)
            :param section: The section of the config file
        """
        if op == '=':
            setattr(self, attr_name, values)
            return

        if not values:
            six.raise_from(ConfigError(
                'Invalid value in [{section}] section :'
                ' \'{line}\' on line {line_no}'.format(
                    section=section, line=line, line_no=line_no)), None)

        if op == '+':
            setattr(self, attr_name, getattr(self, attr_name) | values)
        elif op == '-':
            setattr(self, attr_name, getattr(self, attr_name) - values)
        else:
            six.raise_from(ConfigError(
                'Invalid operator in [{section}] section :'
                ' \'{line}\' on line {line_no}'.format(
                    section=section, line=line, line_no=line_no)), None)
Ejemplo n.º 23
0
    def _config_line_directories_section(self, line, line_no):
        """Called for each line in the directories section
           line is in one of 3 formats:
                = <new directory list>
                - <remove directory list>
                + <additional directory list

            :param line: The full line from the config file
            :param line_no : The line number in the config file

            :raises ConfigError: When an invalid line is detected
        """
        # Extract Attribute name from the config sections data
        attr_name = \
            self.config_sections_and_attrs['directories']['directories'][0]
        op, direct = line[0], set(
            d.strip() for d in line[1:].strip().split(','))

        direct = direct if direct != {''} else set()

        if direct:
            # Identify null strings or strings which are just dots
            if any(d == '' for d in direct):
                six.raise_from(ConfigError(
                    'Invalid value in [directories] section :'
                    ' \'{}\' on line {}'.format(
                        line, line_no)), None)

        self._config_operator_list_modifiers(
            attr_name, direct, line, line_no, op, 'directories')
Ejemplo n.º 24
0
    def acquire(self):
        """
        Attempt to acquire the lock.

        If the lock is successfully acquired, this method returns a context
        manager that will automatically release the lock when exited. If the
        lock cannot be acquired, an ``UnableToAcquireLock`` error will be
        raised.
        """
        try:
            self.backend.acquire(self.key, self.duration, self.routing_key)
        except Exception as error:
            six.raise_from(
                UnableToAcquireLock('Unable to acquire {!r} due to error: {}'.format(self, error)),
                error
            )

        @contextmanager
        def releaser():
            try:
                yield
            finally:
                self.release()

        return releaser()
Ejemplo n.º 25
0
    def _config_line_reports_section(self, line, line_no):
        """Called for each line in the directories section
           line is in one of 3 formats:
                = <new directory list>
                - <remove directory list>
                + <additional directory list

            :param line: The full line from the config file
            :param line_no : The line number in the config file

            :raises ConfigError: When an invalid line is detected"""
        attrs_options = self.config_sections_and_attrs['reports']
        try:
            option, _, value = (x.strip() for x in line.partition('='))
            if option != 'verbose':
                value = value.lower() in ['true', 'yes']
            else:
                try:
                    value = int(value)
                except ValueError:
                    six.raise_from(ConfigError(
                        'Invalid value in section [reports] :'
                        ' \'{}\' on line {}'.format(
                            line, line_no)), None)
            setattr(self, attrs_options[option][0], value)
        except KeyError:
            six.raise_from(ConfigError(
                'Invalid option in section [reports] :'
                ' \'{}\' on line {}'.format(
                    line, line_no)), None)
Ejemplo n.º 26
0
    def _load_schema(self, schema, registry):
        the_schema = schema

        if isinstance(schema, six.string_types):
            try:
                the_schema = registry.get(schema)
                if not the_schema:
                    if os.path.isfile(schema):
                        with open(schema, 'r') as f:
                            the_schema = json.load(f)
                    else:
                        req = requests.get(schema)
                        req.raise_for_status()
                        the_schema = req.json()
            except (IOError, ValueError, requests.exceptions.RequestException) as ex:
                message = 'Unable to load profile at "{0}"'
                six.raise_from(
                    exceptions.ValidationError(message.format(schema)),
                    ex
                )

        elif isinstance(the_schema, dict):
            the_schema = copy.deepcopy(the_schema)
        else:
            message = 'Schema must be a "dict", but was a "{0}"'
            raise exceptions.ValidationError(message.format(type(the_schema).__name__))

        return the_schema
Ejemplo n.º 27
0
 def get_changes(self, resource):
     if resource not in self.changes:
         try:
             self.changes[resource] = list(self.get_plan(resource).get_actions())
         except Exception as e:
             six.raise_from(errors.Error("{}: {}".format(resource, e)), e)
     return self.changes[resource]
Ejemplo n.º 28
0
    def _config_line_extensions_section(self, line, line_no):
        """Called for each line in the extensions section
            line is in one of 3 formats:
                 = <new directory list>
                 - <remove directory list>
                 + <additional directory list

             :param line: The full line from the config file
             :param line_no : The line number in the config file
         """
        # Extract Attribute name from the config sections data
        section_data = self.config_sections_and_attrs['extensions']
        attr_name = section_data['extensions'][0]

        op, ext = line[0], set(e.strip() for e in line[1:].strip().split(','))

        ext = ext if ext != {''} else set()

        if ext:
            # Identify null strings or strings which are just dots
            if any(len(e) <= 1 for e in ext):
                six.raise_from(ConfigError(
                    'Invalid value in [extension] section :'
                    ' \'{}\' on line {}'.format(
                        line, line_no)), None)

            # Identify extension values without a leading dot
            if any(e[0] != '.' for e in ext):
                six.raise_from(ConfigError(
                    'Invalid value in [extension] section :'
                    ' \'{}\' on line {}'.format(
                        line, line_no)), None)

        self._config_operator_list_modifiers(
            attr_name, ext, line, line_no, op, 'extension')
Ejemplo n.º 29
0
def raise_with_cause(exc_cls, message, *args, **kwargs):
    """Helper to raise + chain exceptions (when able) and associate a *cause*.

    NOTE(harlowja): Since in py3.x exceptions can be chained (due to
    :pep:`3134`) we should try to raise the desired exception with the given
    *cause* (or extract a *cause* from the current stack if able) so that the
    exception formats nicely in old and new versions of python. Since py2.x
    does **not** support exception chaining (or formatting) the exception
    class provided should take a ``cause`` keyword argument (which it may
    discard if it wants) to its constructor which can then be
    inspected/retained on py2.x to get *similar* information as would be
    automatically included/obtainable in py3.x.

    :param exc_cls: the exception class to raise (typically one derived
                    from :py:class:`.CausedByException` or equivalent).
    :param message: the text/str message that will be passed to
                    the exceptions constructor as its first positional
                    argument.
    :param args: any additional positional arguments to pass to the
                 exceptions constructor.
    :param kwargs: any additional keyword arguments to pass to the
                   exceptions constructor.

    .. versionadded:: 1.6
    """
    if 'cause' not in kwargs:
        exc_type, exc, exc_tb = sys.exc_info()
        try:
            if exc is not None:
                kwargs['cause'] = exc
        finally:
            # Leave no references around (especially with regards to
            # tracebacks and any variables that it retains internally).
            del(exc_type, exc, exc_tb)
    six.raise_from(exc_cls(message, *args, **kwargs), kwargs.get('cause'))
Ejemplo n.º 30
0
 def reset(self):
     """! @brief Reset the target"""
     try:
         self._invalidate_cached_registers()
         self._link.reset()
     except DAPAccess.Error as exc:
         six.raise_from(self._convert_exception(exc), exc)
Ejemplo n.º 31
0
    def _pasteObjects(self, cp, cb_maxsize=0):
        """Paste previously copied objects into the current object.

        ``cp`` is the list of objects for paste as encoded by ``_cb_encode``.
        If calling _pasteObjects from python code, pass the result of a
        previous call to manage_cutObjects or manage_copyObjects as the first
        argument.

        ``cb_maxsize`` is the maximum size of the JSON representation of the
        object list. Set it to a non-zero value to prevent DoS attacks with
        huge object lists or zlib bombs.

        This method sends IObjectCopiedEvent and IObjectClonedEvent
        or IObjectWillBeMovedEvent and IObjectMovedEvent.

        Returns tuple of (operator, list of {'id': orig_id, 'new_id': new_id}).
        Where `operator` is 0 for a copy operation and 1 for a move operation.
        """
        if cp is None:
            raise CopyError('No clipboard data found.')

        try:
            op, mdatas = _cb_decode(cp, cb_maxsize)
        except Exception as e:
            six.raise_from(CopyError('Clipboard Error'), e)

        oblist = []
        app = self.getPhysicalRoot()
        for mdata in mdatas:
            m = loadMoniker(mdata)
            try:
                ob = m.bind(app)
            except ConflictError:
                raise
            except Exception:
                raise CopyError('Item Not Found')
            self._verifyObjectPaste(ob, validate_src=op + 1)
            oblist.append(ob)

        result = []
        if op == 0:
            # Copy operation
            for ob in oblist:
                orig_id = ob.getId()
                if not ob.cb_isCopyable():
                    raise CopyError('Not Supported')

                try:
                    ob._notifyOfCopyTo(self, op=0)
                except ConflictError:
                    raise
                except Exception:
                    raise CopyError('Copy Error')

                id = self._get_id(orig_id)
                result.append({'id': orig_id, 'new_id': id})

                orig_ob = ob
                ob = ob._getCopy(self)
                ob._setId(id)
                notify(ObjectCopiedEvent(ob, orig_ob))

                self._setObject(id, ob)
                ob = self._getOb(id)
                ob.wl_clearLocks()

                ob._postCopy(self, op=0)

                compatibilityCall('manage_afterClone', ob, ob)

                notify(ObjectClonedEvent(ob))

        elif op == 1:
            # Move operation
            for ob in oblist:
                orig_id = ob.getId()
                if not ob.cb_isMoveable():
                    raise CopyError('Not Supported')

                try:
                    ob._notifyOfCopyTo(self, op=1)
                except ConflictError:
                    raise
                except Exception:
                    raise CopyError('Move Error')

                if not sanity_check(self, ob):
                    raise CopyError("This object cannot be pasted into itself")

                orig_container = aq_parent(aq_inner(ob))
                if aq_base(orig_container) is aq_base(self):
                    id = orig_id
                else:
                    id = self._get_id(orig_id)
                result.append({'id': orig_id, 'new_id': id})

                notify(ObjectWillBeMovedEvent(ob, orig_container, orig_id,
                                              self, id))

                # try to make ownership explicit so that it gets carried
                # along to the new location if needed.
                ob.manage_changeOwnershipType(explicit=1)

                try:
                    orig_container._delObject(orig_id, suppress_events=True)
                except TypeError:
                    orig_container._delObject(orig_id)
                    warnings.warn(
                        "%s._delObject without suppress_events is discouraged."
                        % orig_container.__class__.__name__,
                        DeprecationWarning)
                ob = aq_base(ob)
                ob._setId(id)

                try:
                    self._setObject(id, ob, set_owner=0, suppress_events=True)
                except TypeError:
                    self._setObject(id, ob, set_owner=0)
                    warnings.warn(
                        "%s._setObject without suppress_events is discouraged."
                        % self.__class__.__name__, DeprecationWarning)
                ob = self._getOb(id)

                notify(ObjectMovedEvent(ob, orig_container, orig_id, self, id))
                notifyContainerModified(orig_container)
                if aq_base(orig_container) is not aq_base(self):
                    notifyContainerModified(self)

                ob._postCopy(self, op=1)
                # try to make ownership implicit if possible
                ob.manage_changeOwnershipType(explicit=0)

        return op, result
def k_feature_routing_function(input_data, tree_parameters, tree_biases, layer_num, max_nodes, num_features_per_node, random_seed, name=None):
  r"""  Returns the probability that each input will reach each leaf node.  Each

    decision is made based on k features.

    layer_num: The layer number of this tree.
    max_nodes: The number of nodes in the tree.
    num_features_per_node: The number of features each node can use to make a
     decision.
    random_seed: The base random seed.

    input_data: The training batch's features as a 2-d tensor; `input_data[i][j]`
     gives the j-th feature of the i-th input.
    tree_parameters: `tree_parameters[i]` gives the weight of
     the logistic regression model that translates from node features to
     probabilities.
    tree_biases: `tree_biases[i]` gives the bias of the logistic
     regression model that translates from node features to
     probabilities.
    tree_features: `tree_features[i]` gives the decision feature for node i.

    probabilities: `probabilities[i][j]` is the probability that input i
     will reach node j.

  Args:
    input_data: A `Tensor` of type `float32`.
    tree_parameters: A `Tensor` of type `float32`.
    tree_biases: A `Tensor` of type `float32`.
    layer_num: An `int`.
    max_nodes: An `int`.
    num_features_per_node: An `int`.
    random_seed: An `int`.
    name: A name for the operation (optional).

  Returns:
    A `Tensor` of type `float32`.
  """
  _ctx = _context._context
  if _ctx is None or not _ctx._eager_context.is_eager:
    layer_num = _execute.make_int(layer_num, "layer_num")
    max_nodes = _execute.make_int(max_nodes, "max_nodes")
    num_features_per_node = _execute.make_int(num_features_per_node, "num_features_per_node")
    random_seed = _execute.make_int(random_seed, "random_seed")
    _, _, _op = _op_def_lib._apply_op_helper(
        "KFeatureRoutingFunction", input_data=input_data,
        tree_parameters=tree_parameters, tree_biases=tree_biases,
        layer_num=layer_num, max_nodes=max_nodes,
        num_features_per_node=num_features_per_node, random_seed=random_seed,
        name=name)
    _result = _op.outputs[:]
    _inputs_flat = _op.inputs
    _attrs = ("layer_num", _op.get_attr("layer_num"), "max_nodes",
              _op.get_attr("max_nodes"), "num_features_per_node",
              _op.get_attr("num_features_per_node"), "random_seed",
              _op.get_attr("random_seed"))
    _execute.record_gradient(
      "KFeatureRoutingFunction", _inputs_flat, _attrs, _result, name)
    _result, = _result
    return _result

  else:
    try:
      _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
        _ctx._context_handle, _ctx._eager_context.device_name,
        "KFeatureRoutingFunction", name, _ctx._post_execution_callbacks,
        input_data, tree_parameters, tree_biases, "layer_num", layer_num,
        "max_nodes", max_nodes, "num_features_per_node",
        num_features_per_node, "random_seed", random_seed)
      return _result
    except _core._FallbackException:
      return k_feature_routing_function_eager_fallback(
          input_data, tree_parameters, tree_biases, layer_num=layer_num,
          max_nodes=max_nodes, num_features_per_node=num_features_per_node,
          random_seed=random_seed, name=name, ctx=_ctx)
    except _core._NotOkStatusException as e:
      if name is not None:
        message = e.message + " name: " + name
      else:
        message = e.message
      _six.raise_from(_core._status_to_exception(e.code, message), None)
def k_feature_gradient(input_data, tree_parameters, tree_biases, routes, layer_num, random_seed, name=None):
  r"""    Computes the derivative of the routing loss with respect to each decision

      node.  Each decision node is constrained to make a decision based on only
      k features.

      layer_num: The layer number of this tree.
      random_seed: The base random seed.

      input_data: The training batch's features as a 2-d tensor;
       `input_data[i][j]` gives the j-th feature of the i-th input.
      tree_parameters: `tree_parameters[i]` gives the weight of
       the logistic regression model that translates from node features to
       probabilities.
      tree_biases: `tree_biases[i]` gives the bias of the logistic
       regression model that translates from node features to
       probabilities.
      routes: The routes computed by routing_function_op.

      routing_gradient: `routing_gradient` provides du / df, where u is the
       routing function and f is the (vector of) decision functions.  A decision
       function f_i computes the routing decision at node i.

      data_gradient: `data_gradient` provides df / dx, where f is the (vector
       of) decision functions and x is a batch of data.

      weights_gradient: `weights_gradient` provides df / dw, where f is the
       (vector of) decision functions and w is the matrix of parameters that
       determine how instances are routed through a tree.

      f_i, the decision function at node i, is parameterized by t_i (parameters)
      and b_i (bias) and takes data x as input.  This op is called in
      training_ops.py to compute du / df, and we use that to compute

      du / dx = du / df * df / dx,
      du / dt = du / df * df / dt, and
      du / db = du / df * df / db.

  Args:
    input_data: A `Tensor` of type `float32`.
    tree_parameters: A `Tensor` of type `float32`.
    tree_biases: A `Tensor` of type `float32`.
    routes: A `Tensor` of type `float32`.
    layer_num: An `int`.
    random_seed: An `int`.
    name: A name for the operation (optional).

  Returns:
    A tuple of `Tensor` objects (routing_gradient, data_gradient, weight_gradient).

    routing_gradient: A `Tensor` of type `float32`.
    data_gradient: A `Tensor` of type `float32`.
    weight_gradient: A `Tensor` of type `float32`.
  """
  _ctx = _context._context
  if _ctx is None or not _ctx._eager_context.is_eager:
    layer_num = _execute.make_int(layer_num, "layer_num")
    random_seed = _execute.make_int(random_seed, "random_seed")
    _, _, _op = _op_def_lib._apply_op_helper(
        "KFeatureGradient", input_data=input_data,
        tree_parameters=tree_parameters, tree_biases=tree_biases,
        routes=routes, layer_num=layer_num, random_seed=random_seed,
        name=name)
    _result = _op.outputs[:]
    _inputs_flat = _op.inputs
    _attrs = ("layer_num", _op.get_attr("layer_num"), "random_seed",
              _op.get_attr("random_seed"))
    _execute.record_gradient(
      "KFeatureGradient", _inputs_flat, _attrs, _result, name)
    _result = _KFeatureGradientOutput._make(_result)
    return _result

  else:
    try:
      _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
        _ctx._context_handle, _ctx._eager_context.device_name,
        "KFeatureGradient", name, _ctx._post_execution_callbacks, input_data,
        tree_parameters, tree_biases, routes, "layer_num", layer_num,
        "random_seed", random_seed)
      _result = _KFeatureGradientOutput._make(_result)
      return _result
    except _core._FallbackException:
      return k_feature_gradient_eager_fallback(
          input_data, tree_parameters, tree_biases, routes,
          layer_num=layer_num, random_seed=random_seed, name=name, ctx=_ctx)
    except _core._NotOkStatusException as e:
      if name is not None:
        message = e.message + " name: " + name
      else:
        message = e.message
      _six.raise_from(_core._status_to_exception(e.code, message), None)
Ejemplo n.º 34
0
from __future__ import absolute_import, print_function

# checks
try:
    import tensorflow
    del tensorflow
except ModuleNotFoundError as e:
    from six import raise_from
    raise_from(RuntimeError('Please install TensorFlow: https://www.tensorflow.org/install/'), e)

try:
    import keras
    del keras
except ModuleNotFoundError as e:
    if e.name in {'theano','cntk'}:
        from six import raise_from
        raise_from(RuntimeError(
            "Keras is configured to use the '%s' backend, which is not installed. "
            "Please change it to use 'tensorflow' instead: "
            "https://keras.io/getting-started/faq/#where-is-the-keras-configuration-file-stored" % e.name
        ), e)
    else:
        raise e

import keras.backend as K
if K.backend() != 'tensorflow':
    raise NotImplementedError(
            "Keras is configured to use the '%s' backend, which is currently not supported. "
            "Please configure Keras to use 'tensorflow' instead: "
            "https://keras.io/getting-started/faq/#where-is-the-keras-configuration-file-stored" % K.backend()
        )
Ejemplo n.º 35
0
 def _check_schema(self):
     try:
         self._validator.check_schema(self._schema)
     except jsonschema.exceptions.SchemaError as ex:
         six.raise_from(
             exceptions.ValidationError('Profile is invalid: %s' % ex), ex)
Ejemplo n.º 36
0
# *************** Dependencies *********
INSTALL_REQUIRES = [
    'decopatch', 'makefun>=1.5', 'funcsigs;python_version<"3.3"', 'six'
]
DEPENDENCY_LINKS = []
SETUP_REQUIRES = ['pytest-runner', 'setuptools_scm', 'six']
TESTS_REQUIRE = ['pytest', 'pytest-logging', 'pandas', 'tabulate']
EXTRAS_REQUIRE = {}

# simple check
try:
    from setuptools_scm import get_version
except Exception as e:
    raise_from(
        Exception(
            "Required packages for setup not found. Please install 'setuptools_scm'"
        ), e)

# ************** ID card *****************
DISTNAME = 'pytest-harvest'
DESCRIPTION = 'Store data created during your pytest tests execution, and retrieve it at the end of the session, ' \
              'e.g. for applicative benchmarking purposes.'
MAINTAINER = 'Sylvain MARIE'
MAINTAINER_EMAIL = '*****@*****.**'
URL = 'https://github.com/smarie/python-pytest-harvest'
LICENSE = 'BSD 3-Clause'
LICENSE_LONG = 'License :: OSI Approved :: BSD License'

version_for_download_url = get_version()
DOWNLOAD_URL = URL + '/tarball/' + version_for_download_url
Ejemplo n.º 37
0
def decode_token(token):
    try:
        return jwt.decode(token, JWT_SECRET, algorithms=[JWT_ALGORITHM])
    except JWTError as e:
        six.raise_from(Unauthorized, e)
Ejemplo n.º 38
0
def ragged_gather(params_nested_splits, params_dense_values, indices, OUTPUT_RAGGED_RANK, name=None):
  r"""Gather ragged slices from `params` axis `0` according to `indices`.

  Outputs a `RaggedTensor` output composed from `output_dense_values` and
  `output_nested_splits`, such that:

  ```python
  output.shape = indices.shape + params.shape[1:]
  output.ragged_rank = indices.shape.ndims + params.ragged_rank
  output[i...j, d0...dn] = params[indices[i...j], d0...dn]
  ```

  where

  * `params =
     ragged.from_nested_row_splits(params_dense_values, params_nested_splits)`
     provides the values that should be gathered.
  * `indices` ia a dense tensor with dtype `int32` or `int64`, indicating which
     values should be gathered.
  * `output =
     ragged.from_nested_row_splits(output_dense_values, output_nested_splits)`
     is the output tensor.

  (Note: This c++ op is used to implement the higher-level python
  `tf.ragged.gather` op, which also supports ragged indices.)

  Args:
    params_nested_splits: A list of at least 1 `Tensor` objects with the same type in: `int32`, `int64`.
      The `nested_row_splits` tensors that define the row-partitioning for the
      `params` RaggedTensor input.
    params_dense_values: A `Tensor`.
      The `flat_values` for the `params` RaggedTensor. There was a terminology change
      at the python level from dense_values to flat_values, so dense_values is the
      deprecated name.
    indices: A `Tensor`. Must be one of the following types: `int32`, `int64`.
      Indices in the outermost dimension of `params` of the values that should be
      gathered.
    OUTPUT_RAGGED_RANK: An `int` that is `>= 0`.
      The ragged rank of the output RaggedTensor. `output_nested_splits` will contain
      this number of `row_splits` tensors. This value should equal
      `indices.shape.ndims + params.ragged_rank - 1`.
    name: A name for the operation (optional).

  Returns:
    A tuple of `Tensor` objects (output_nested_splits, output_dense_values).

    output_nested_splits: A list of `OUTPUT_RAGGED_RANK` `Tensor` objects with the same type as `params_nested_splits`.
    output_dense_values: A `Tensor`. Has the same type as `params_dense_values`.
  """
  _ctx = _context._context or _context.context()
  if _ctx is not None and _ctx._thread_local_data.is_eager:
    try:
      _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
        _ctx._context_handle, _ctx._thread_local_data.device_name,
        "RaggedGather", name, _ctx.post_execution_callbacks,
        params_nested_splits, params_dense_values, indices,
        "OUTPUT_RAGGED_RANK", OUTPUT_RAGGED_RANK)
      _result = _RaggedGatherOutput._make(_result)
      return _result
    except _core._FallbackException:
      try:
        return ragged_gather_eager_fallback(
            params_nested_splits, params_dense_values, indices,
            OUTPUT_RAGGED_RANK=OUTPUT_RAGGED_RANK, name=name, ctx=_ctx)
      except _core._SymbolicException:
        pass  # Add nodes to the TensorFlow graph.
    except _core._NotOkStatusException as e:
      if name is not None:
        message = e.message + " name: " + name
      else:
        message = e.message
      _six.raise_from(_core._status_to_exception(e.code, message), None)
  # Add nodes to the TensorFlow graph.
  if not isinstance(params_nested_splits, (list, tuple)):
    raise TypeError(
        "Expected list for 'params_nested_splits' argument to "
        "'ragged_gather' Op, not %r." % params_nested_splits)
  _attr_PARAMS_RAGGED_RANK = len(params_nested_splits)
  OUTPUT_RAGGED_RANK = _execute.make_int(OUTPUT_RAGGED_RANK, "OUTPUT_RAGGED_RANK")
  _, _, _op = _op_def_lib._apply_op_helper(
        "RaggedGather", params_nested_splits=params_nested_splits,
                        params_dense_values=params_dense_values,
                        indices=indices,
                        OUTPUT_RAGGED_RANK=OUTPUT_RAGGED_RANK, name=name)
  _result = _op.outputs[:]
  _inputs_flat = _op.inputs
  _attrs = ("Tvalues", _op._get_attr_type("Tvalues"), "Tindices",
            _op._get_attr_type("Tindices"), "Tsplits",
            _op._get_attr_type("Tsplits"), "PARAMS_RAGGED_RANK",
            _op.get_attr("PARAMS_RAGGED_RANK"), "OUTPUT_RAGGED_RANK",
            _op.get_attr("OUTPUT_RAGGED_RANK"))
  _execute.record_gradient(
      "RaggedGather", _inputs_flat, _attrs, _result, name)
  _result = [_result[:OUTPUT_RAGGED_RANK]] + _result[OUTPUT_RAGGED_RANK:]
  _result = _RaggedGatherOutput._make(_result)
  return _result
Ejemplo n.º 39
0
def process_input_v4(tree_handle,
                     stats_handle,
                     input_data,
                     sparse_input_indices,
                     sparse_input_values,
                     sparse_input_shape,
                     input_labels,
                     input_weights,
                     leaf_ids,
                     random_seed,
                     input_spec,
                     params,
                     name=None):
    r"""Add labels to stats after traversing the tree for each example.

  Outputs node ids that are finished.

  Args:
    tree_handle: A `Tensor` of type `resource`. The handle to the tree.
    stats_handle: A `Tensor` of type `resource`. The handle to the stats.
    input_data: A `Tensor` of type `float32`.
      The training batch's features as a 2-d tensor; `input_data[i][j]`
      gives the j-th feature of the i-th input.
    sparse_input_indices: A `Tensor` of type `int64`.
      The indices tensor from the SparseTensor input.
    sparse_input_values: A `Tensor` of type `float32`.
      The values tensor from the SparseTensor input.
    sparse_input_shape: A `Tensor` of type `int64`.
      The shape tensor from the SparseTensor input.
    input_labels: A `Tensor` of type `float32`.
      The training batch's labels as a 1 or 2-d tensor.
      'input_labels[i][j]' gives the j-th label/target for the i-th input.
    input_weights: A `Tensor` of type `float32`.
      The training batch's weights as a 1-d tensor.
      'input_weights[i]' gives the weight for the i-th input.
    leaf_ids: A `Tensor` of type `int32`.
      `leaf_ids[i]` is the leaf id for input i.
    random_seed: An `int`.
    input_spec: A `string`.
    params: A `string`. A serialized TensorForestParams proto.
    name: A name for the operation (optional).

  Returns:
    A `Tensor` of type `int32`.
    A 1-d tensor of node ids that have finished and are ready to
    grow.
  """
    _ctx = _context._context
    if _ctx is None or not _ctx._eager_context.is_eager:
        random_seed = _execute.make_int(random_seed, "random_seed")
        input_spec = _execute.make_str(input_spec, "input_spec")
        params = _execute.make_str(params, "params")
        _, _, _op = _op_def_lib._apply_op_helper(
            "ProcessInputV4",
            tree_handle=tree_handle,
            stats_handle=stats_handle,
            input_data=input_data,
            sparse_input_indices=sparse_input_indices,
            sparse_input_values=sparse_input_values,
            sparse_input_shape=sparse_input_shape,
            input_labels=input_labels,
            input_weights=input_weights,
            leaf_ids=leaf_ids,
            random_seed=random_seed,
            input_spec=input_spec,
            params=params,
            name=name)
        _result = _op.outputs[:]
        _inputs_flat = _op.inputs
        _attrs = ("random_seed", _op.get_attr("random_seed"), "input_spec",
                  _op.get_attr("input_spec"), "params", _op.get_attr("params"))
        _execute.record_gradient("ProcessInputV4", _inputs_flat, _attrs,
                                 _result, name)
        _result, = _result
        return _result

    else:
        try:
            _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
                _ctx._context_handle, _ctx._eager_context.device_name,
                "ProcessInputV4", name, _ctx._post_execution_callbacks,
                tree_handle, stats_handle, input_data, sparse_input_indices,
                sparse_input_values, sparse_input_shape, input_labels,
                input_weights, leaf_ids, "random_seed", random_seed,
                "input_spec", input_spec, "params", params)
            return _result
        except _core._FallbackException:
            return process_input_v4_eager_fallback(tree_handle,
                                                   stats_handle,
                                                   input_data,
                                                   sparse_input_indices,
                                                   sparse_input_values,
                                                   sparse_input_shape,
                                                   input_labels,
                                                   input_weights,
                                                   leaf_ids,
                                                   random_seed=random_seed,
                                                   input_spec=input_spec,
                                                   params=params,
                                                   name=name,
                                                   ctx=_ctx)
        except _core._NotOkStatusException as e:
            if name is not None:
                message = e.message + " name: " + name
            else:
                message = e.message
            _six.raise_from(_core._status_to_exception(e.code, message), None)
Ejemplo n.º 40
0
from __future__ import absolute_import

import functools
import logging
import numbers
import os
import time

try:
    import requests
except ImportError as caught_exc:  # pragma: NO COVER
    import six

    six.raise_from(
        ImportError(
            "The requests library is not installed, please install the "
            "requests package to use the requests transport."),
        caught_exc,
    )
import requests.adapters  # pylint: disable=ungrouped-imports
import requests.exceptions  # pylint: disable=ungrouped-imports
from requests.packages.urllib3.util.ssl_ import (
    create_urllib3_context, )  # pylint: disable=ungrouped-imports
import six  # pylint: disable=ungrouped-imports

from google.auth import environment_vars
from google.auth import exceptions
from google.auth import transport
import google.auth.transport._mtls_helper
from google.oauth2 import service_account

_LOGGER = logging.getLogger(__name__)
Ejemplo n.º 41
0
    def save(self, file_or_path):
        """"Validates and saves this Data Package contents into a zip file.

        It creates a zip file into ``file_or_path`` with the contents of this
        Data Package and its resources. Every resource which content lives in
        the local filesystem will be copied to the zip file. Consider the
        following Data Package descriptor::

            {
                "name": "gdp",
                "resources": [
                    {"name": "local", "format": "CSV", "path": "data.csv"},
                    {"name": "inline", "data": [4, 8, 15, 16, 23, 42]},
                    {"name": "remote", "url": "http://someplace.com/data.csv"}
                ]
            }

        The final structure of the zip file will be::

            ./datapackage.json
            ./data/local.csv

        With the contents of `datapackage.json` being the same as returned by
        :func:`to_json`.

        The resources' file names are generated based on their `name` and
        `format` fields if they exist. If the resource has no `name`, it'll be
        used `resource-X`, where `X` is the index of the resource in the
        `resources` list (starting at zero). If the resource has `format`,
        it'll be lowercased and appended to the `name`, becoming
        "`name.format`".

        Args:
            file_or_path (string or file-like object): The file path or a
                file-like object where the contents of this Data Package will
                be saved into.

        Raises:
            ValidationError: If the Data Package is invalid.
            DataPackageException: If there was some error writing the package.

        """
        self.validate()

        def arcname(resource):
            basename = resource.descriptor.get('name')
            resource_format = resource.descriptor.get('format')
            if not basename:
                index = self.resources.index(resource)
                basename = 'resource-{index}'.format(index=index)
            if resource_format:
                basename = '.'.join([basename, resource_format.lower()])
            return os.path.join('data', basename)

        try:
            with zipfile.ZipFile(file_or_path, 'w') as z:
                descriptor = json.loads(self.to_json())
                for i, resource in enumerate(self.resources):
                    path = None
                    if resource.source_type == 'local':
                        path = os.path.abspath(resource.source)
                    if path:
                        path_inside_dp = arcname(resource)
                        z.write(path, path_inside_dp)
                        descriptor['resources'][i]['path'] = path_inside_dp
                z.writestr('datapackage.json', json.dumps(descriptor))
        except (IOError, zipfile.BadZipfile, zipfile.LargeZipFile) as e:
            six.raise_from(exceptions.DataPackageException(e), e)
Ejemplo n.º 42
0
 def __getattr__(self, name):
     try:
         return self[name]
     except KeyError:
         raise_from(AttributeError(self.__attr_error__.format(name)), None)
Ejemplo n.º 43
0
def file_id_from_run_output(name, job_id, run_id, regex=False, client=None):
    """Find the file ID of a File run output with the name "name"

    The run output is required to have type "File".
    If using an approximate match and multiple names match the
    provided string, return only the first file ID.

    Parameters
    ----------
    name : str
        The "name" field of the run output you wish to retrieve
    job_id : int
    run_id : int
    regex : bool, optional
        If False (the default), require an exact string match between
        ``name`` and the name of the run output. If True, search for a
        name which matches the regular expression ``name`` and
        retrieve the first found.
    client : :class:`civis.APIClient`, optional
        If not provided, an :class:`civis.APIClient` object will be
        created from the :envvar:`CIVIS_API_KEY`.

    Returns
    -------
    file_id : int
        The ID of a Civis File with name matching ``name``

    Raises
    ------
    IOError
        If the provided job ID and run ID combination can't be found
    FileNotFoundError
        If the run exists, but ``name`` isn't in its run outputs

    See Also
    --------
    APIClient.scripts.list_containers.runs_outputs
    """
    client = APIClient() if client is None else client
    # Retrieve run outputs
    try:
        outputs = client.scripts.list_containers_runs_outputs(job_id, run_id)
    except CivisAPIError as err:
        if err.status_code == 404:
            six.raise_from(IOError('Could not find job/run ID {}/{}'
                           .format(job_id, run_id)), err)
        else:
            raise

    # Find file in the run outputs.
    if not regex:
        # Require an exact match on the "name" string.
        obj = find_one(outputs, name=name, object_type='File')
    else:
        # Search for a filename which contains the "name" string
        obj_matches = [o for o in outputs
                       if re.search(name, o.name) and o.object_type == 'File']
        if len(obj_matches) > 1:
            log.warning('Found %s matches to "%s". Returning the first.',
                        len(obj_matches), name)
        obj = None if not obj_matches else obj_matches[0]
    if obj is None:
        prefix = "A file containing the pattern" if regex else "File"
        raise FileNotFoundError('{} "{}" is not an output of job/run ID '
                                '{}/{}.'.format(prefix, name, job_id, run_id))
    return obj['object_id']
Ejemplo n.º 44
0
def sparse_feature_cross(indices,
                         values,
                         shapes,
                         dense,
                         hashed_output,
                         num_buckets,
                         out_type,
                         internal_type,
                         name=None):
    r"""Generates sparse cross form a list of sparse tensors.

  The op takes two lists, one of 2D `SparseTensor` and one of 2D `Tensor`, each
  representing features of one feature column. It outputs a 2D `SparseTensor` with
  the batchwise crosses of these features.

  For example, if the inputs are

      inputs[0]: SparseTensor with shape = [2, 2]
      [0, 0]: "a"
      [1, 0]: "b"
      [1, 1]: "c"

      inputs[1]: SparseTensor with shape = [2, 1]
      [0, 0]: "d"
      [1, 0]: "e"

      inputs[2]: Tensor [["f"], ["g"]]

  then the output will be

      shape = [2, 2]
      [0, 0]: "a_X_d_X_f"
      [1, 0]: "b_X_e_X_g"
      [1, 1]: "c_X_e_X_g"

  if hashed_output=true then the output will be

      shape = [2, 2]
      [0, 0]: HashCombine(
                  Fingerprint64("f"), HashCombine(
                      Fingerprint64("d"), Fingerprint64("a")))
      [1, 0]: HashCombine(
                  Fingerprint64("g"), HashCombine(
                      Fingerprint64("e"), Fingerprint64("b")))
      [1, 1]: HashCombine(
                  Fingerprint64("g"), HashCombine(
                      Fingerprint64("e"), Fingerprint64("c")))

  Args:
    indices: A list of `Tensor` objects with type `int64`.
      2-D.  Indices of each input `SparseTensor`.
    values: A list of `Tensor` objects with types from: `int64`, `string`.
      1-D.   values of each `SparseTensor`.
    shapes: A list with the same length as `indices` of `Tensor` objects with type `int64`.
      1-D.   Shapes of each `SparseTensor`.
    dense: A list of `Tensor` objects with types from: `int64`, `string`.
      2-D.    Columns represented by dense `Tensor`.
    hashed_output: A `bool`.
    num_buckets: An `int` that is `>= 0`.
    out_type: A `tf.DType` from: `tf.int64, tf.string`.
    internal_type: A `tf.DType` from: `tf.int64, tf.string`.
    name: A name for the operation (optional).

  Returns:
    A tuple of `Tensor` objects (output_indices, output_values, output_shape).

    output_indices: A `Tensor` of type `int64`. 2-D.  Indices of the concatenated `SparseTensor`.
    output_values: A `Tensor` of type `out_type`. 1-D.  Non-empty values of the concatenated or hashed
      `SparseTensor`.
    output_shape: A `Tensor` of type `int64`. 1-D.  Shape of the concatenated `SparseTensor`.
  """
    _ctx = _context._context
    if _ctx is None or not _ctx._eager_context.is_eager:
        if not isinstance(indices, (list, tuple)):
            raise TypeError("Expected list for 'indices' argument to "
                            "'sparse_feature_cross' Op, not %r." % indices)
        _attr_N = len(indices)
        if not isinstance(shapes, (list, tuple)):
            raise TypeError("Expected list for 'shapes' argument to "
                            "'sparse_feature_cross' Op, not %r." % shapes)
        if len(shapes) != _attr_N:
            raise ValueError(
                "List argument 'shapes' to 'sparse_feature_cross' Op with length %d "
                "must match length %d of argument 'indices'." %
                (len(shapes), _attr_N))
        hashed_output = _execute.make_bool(hashed_output, "hashed_output")
        num_buckets = _execute.make_int(num_buckets, "num_buckets")
        out_type = _execute.make_type(out_type, "out_type")
        internal_type = _execute.make_type(internal_type, "internal_type")
        _, _, _op = _op_def_lib._apply_op_helper("SparseFeatureCross",
                                                 indices=indices,
                                                 values=values,
                                                 shapes=shapes,
                                                 dense=dense,
                                                 hashed_output=hashed_output,
                                                 num_buckets=num_buckets,
                                                 out_type=out_type,
                                                 internal_type=internal_type,
                                                 name=name)
        _result = _op.outputs[:]
        _inputs_flat = _op.inputs
        _attrs = ("N", _op.get_attr("N"), "hashed_output",
                  _op.get_attr("hashed_output"), "num_buckets",
                  _op.get_attr("num_buckets"), "sparse_types",
                  _op.get_attr("sparse_types"), "dense_types",
                  _op.get_attr("dense_types"), "out_type",
                  _op.get_attr("out_type"), "internal_type",
                  _op.get_attr("internal_type"))
        _execute.record_gradient("SparseFeatureCross", _inputs_flat, _attrs,
                                 _result, name)
        _result = _SparseFeatureCrossOutput._make(_result)
        return _result

    else:
        try:
            _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
                _ctx._context_handle, _ctx._eager_context.device_name,
                "SparseFeatureCross", name, _ctx._post_execution_callbacks,
                indices, values, shapes, dense, "hashed_output", hashed_output,
                "num_buckets", num_buckets, "out_type", out_type,
                "internal_type", internal_type)
            _result = _SparseFeatureCrossOutput._make(_result)
            return _result
        except _core._FallbackException:
            return sparse_feature_cross_eager_fallback(
                indices,
                values,
                shapes,
                dense,
                hashed_output=hashed_output,
                num_buckets=num_buckets,
                out_type=out_type,
                internal_type=internal_type,
                name=name,
                ctx=_ctx)
        except _core._NotOkStatusException as e:
            if name is not None:
                message = e.message + " name: " + name
            else:
                message = e.message
            _six.raise_from(_core._status_to_exception(e.code, message), None)
Ejemplo n.º 45
0
 def _load_registry(self):
     try:
         return datapackage_registry.Registry()
     except DataPackageRegistryException as e:
         six.raise_from(SchemaError(e), e)
Ejemplo n.º 46
0
    def create_reservation(self, region, availability_zone, reservation):
        """Create a reservation in an Availability Zone.

        On success:
            Return True, reservation_id
        On Failure:
            Return False, ''
        """
        try:
            reservation_translated = {
                translate_to_api_names(k): v for k, v in reservation.items()
            }

            try:
                duration = parse_duration(self.ttl)
            except Exception as err:
                six.raise_from(AnsibleError("could not parse duration {}".format(self.ttl)), err)

            if len(self.tags) > 0:
                tag_spec =[{
                    'ResourceType': 'capacity-reservation',
                    'Tags':self.tags,
                }]
            else:
                tag_spec = []

            response = self.clients[region].create_capacity_reservation(
                AvailabilityZone=availability_zone,
                EndDate=datetime.datetime.utcnow() + duration,
                EndDateType='limited',
                InstanceMatchCriteria='open',
                TagSpecifications=tag_spec,
                **reservation_translated
            )

            check_response(response)
            display.vvvv("response: %s" % pp.pformat(response))

            display.vvv(pp.pformat(response))
        except botocore.exceptions.ClientError as err:
            if 'InstanceLimitExceeded' in str(err):
                display.display("InstanceLimitExceeded %s - %d * %s"
                                %(availability_zone,
                                  reservation['instance_count'],
                                  reservation['instance_type']))
                return False, ''

            if 'InsufficientInstanceCapacity' in str(err):
                display.display("InsufficientInstanceCapacity %s - %d * %s"
                                %(availability_zone, reservation['instance_count'],
                                    reservation['instance_type']))

                return False, ''

            display.error(pp.pformat(err))
            six.raise_from(AnsibleError("Client Error while creating reservation."), err)

        if response['ResponseMetadata']['HTTPStatusCode'] == 200:
            # if success, continue
            r_id = response['CapacityReservation']['CapacityReservationId']
            display.display("Reservation created: %s (%s)" %(r_id, availability_zone))
            return True, r_id

        return False, ''
Ejemplo n.º 47
0
def block_lstm(seq_len_max, x, cs_prev, h_prev, w, wci, wcf, wco, b, forget_bias=1, cell_clip=3, use_peephole=False, name=None):
  r"""Computes the LSTM cell forward propagation for all the time steps.

  This is equivalent to applying LSTMBlockCell in a loop, like so:

  ```python
  for x1 in unpack(x):
    i1, cs1, f1, o1, ci1, co1, h1 = LSTMBlock(
      x1, cs_prev, h_prev, w, wci, wcf, wco, b)
    cs_prev = cs1
    h_prev = h1
    i.append(i1)
    cs.append(cs1)
    f.append(f1)
    o.append(o1)
    ci.append(ci1)
    co.append(co1)
    h.append(h1)
  return pack(i), pack(cs), pack(f), pack(o), pack(ci), pack(ch), pack(h)
  ```

  Args:
    seq_len_max: A `Tensor` of type `int64`.
      Maximum time length actually used by this input. Outputs are padded
      with zeros beyond this length.
    x: A `Tensor`. Must be one of the following types: `half`, `float32`.
      The sequence input to the LSTM, shape (timelen, batch_size, num_inputs).
    cs_prev: A `Tensor`. Must have the same type as `x`.
      Value of the initial cell state.
    h_prev: A `Tensor`. Must have the same type as `x`.
      Initial output of cell (to be used for peephole).
    w: A `Tensor`. Must have the same type as `x`. The weight matrix.
    wci: A `Tensor`. Must have the same type as `x`.
      The weight matrix for input gate peephole connection.
    wcf: A `Tensor`. Must have the same type as `x`.
      The weight matrix for forget gate peephole connection.
    wco: A `Tensor`. Must have the same type as `x`.
      The weight matrix for output gate peephole connection.
    b: A `Tensor`. Must have the same type as `x`. The bias vector.
    forget_bias: An optional `float`. Defaults to `1`. The forget gate bias.
    cell_clip: An optional `float`. Defaults to `3`.
      Value to clip the 'cs' value to.
    use_peephole: An optional `bool`. Defaults to `False`.
      Whether to use peephole weights.
    name: A name for the operation (optional).

  Returns:
    A tuple of `Tensor` objects (i, cs, f, o, ci, co, h).

    i: A `Tensor`. Has the same type as `x`. The input gate over the whole time sequence.
    cs: A `Tensor`. Has the same type as `x`. The cell state before the tanh over the whole time sequence.
    f: A `Tensor`. Has the same type as `x`. The forget gate over the whole time sequence.
    o: A `Tensor`. Has the same type as `x`. The output gate over the whole time sequence.
    ci: A `Tensor`. Has the same type as `x`. The cell input over the whole time sequence.
    co: A `Tensor`. Has the same type as `x`. The cell after the tanh over the whole time sequence.
    h: A `Tensor`. Has the same type as `x`. The output h vector over the whole time sequence.
  """
  _ctx = _context._context
  if _ctx is not None and _ctx._eager_context.is_eager:
    try:
      _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
        _ctx._context_handle, _ctx._eager_context.device_name, "BlockLSTM",
        name, _ctx._post_execution_callbacks, seq_len_max, x, cs_prev, h_prev,
        w, wci, wcf, wco, b, "forget_bias", forget_bias, "cell_clip",
        cell_clip, "use_peephole", use_peephole)
      _result = _BlockLSTMOutput._make(_result)
      return _result
    except _core._FallbackException:
      try:
        return block_lstm_eager_fallback(
            seq_len_max, x, cs_prev, h_prev, w, wci, wcf, wco, b,
            forget_bias=forget_bias, cell_clip=cell_clip,
            use_peephole=use_peephole, name=name, ctx=_ctx)
      except _core._SymbolicException:
        pass  # Add nodes to the TensorFlow graph.
      except (TypeError, ValueError):
        result = _dispatch.dispatch(
              block_lstm, seq_len_max=seq_len_max, x=x, cs_prev=cs_prev,
                          h_prev=h_prev, w=w, wci=wci, wcf=wcf, wco=wco, b=b,
                          forget_bias=forget_bias, cell_clip=cell_clip,
                          use_peephole=use_peephole, name=name)
        if result is not _dispatch.OpDispatcher.NOT_SUPPORTED:
          return result
        raise
    except _core._NotOkStatusException as e:
      if name is not None:
        message = e.message + " name: " + name
      else:
        message = e.message
      _six.raise_from(_core._status_to_exception(e.code, message), None)
  # Add nodes to the TensorFlow graph.
  if forget_bias is None:
    forget_bias = 1
  forget_bias = _execute.make_float(forget_bias, "forget_bias")
  if cell_clip is None:
    cell_clip = 3
  cell_clip = _execute.make_float(cell_clip, "cell_clip")
  if use_peephole is None:
    use_peephole = False
  use_peephole = _execute.make_bool(use_peephole, "use_peephole")
  try:
    _, _, _op = _op_def_lib._apply_op_helper(
        "BlockLSTM", seq_len_max=seq_len_max, x=x, cs_prev=cs_prev,
                     h_prev=h_prev, w=w, wci=wci, wcf=wcf, wco=wco, b=b,
                     forget_bias=forget_bias, cell_clip=cell_clip,
                     use_peephole=use_peephole, name=name)
  except (TypeError, ValueError):
    result = _dispatch.dispatch(
          block_lstm, seq_len_max=seq_len_max, x=x, cs_prev=cs_prev,
                      h_prev=h_prev, w=w, wci=wci, wcf=wcf, wco=wco, b=b,
                      forget_bias=forget_bias, cell_clip=cell_clip,
                      use_peephole=use_peephole, name=name)
    if result is not _dispatch.OpDispatcher.NOT_SUPPORTED:
      return result
    raise
  _result = _op.outputs[:]
  _inputs_flat = _op.inputs
  _attrs = ("forget_bias", _op.get_attr("forget_bias"), "cell_clip",
            _op.get_attr("cell_clip"), "use_peephole",
            _op.get_attr("use_peephole"), "T", _op.get_attr("T"))
  _execute.record_gradient(
      "BlockLSTM", _inputs_flat, _attrs, _result, name)
  _result = _BlockLSTMOutput._make(_result)
  return _result
Ejemplo n.º 48
0
 def _check_schema(self):
     try:
         self._validator.check_schema(self._schema)
     except jsonschema.exceptions.SchemaError as e:
         six.raise_from(SchemaError.create_from(e), e)
Ejemplo n.º 49
0
def block_lstm_grad(seq_len_max, x, cs_prev, h_prev, w, wci, wcf, wco, b, i, cs, f, o, ci, co, h, cs_grad, h_grad, use_peephole, name=None):
  r"""Computes the LSTM cell backward propagation for the entire time sequence.

  This implementation is to be used in conjunction of LSTMBlock.

  Args:
    seq_len_max: A `Tensor` of type `int64`.
      Maximum time length actually used by this input. Outputs are padded
      with zeros beyond this length.
    x: A `Tensor`. Must be one of the following types: `half`, `float32`.
      The sequence input to the LSTM, shape (timelen, batch_size, num_inputs).
    cs_prev: A `Tensor`. Must have the same type as `x`.
      Value of the initial cell state.
    h_prev: A `Tensor`. Must have the same type as `x`.
      Initial output of cell (to be used for peephole).
    w: A `Tensor`. Must have the same type as `x`. The weight matrix.
    wci: A `Tensor`. Must have the same type as `x`.
      The weight matrix for input gate peephole connection.
    wcf: A `Tensor`. Must have the same type as `x`.
      The weight matrix for forget gate peephole connection.
    wco: A `Tensor`. Must have the same type as `x`.
      The weight matrix for output gate peephole connection.
    b: A `Tensor`. Must have the same type as `x`. The bias vector.
    i: A `Tensor`. Must have the same type as `x`.
      The input gate over the whole time sequence.
    cs: A `Tensor`. Must have the same type as `x`.
      The cell state before the tanh over the whole time sequence.
    f: A `Tensor`. Must have the same type as `x`.
      The forget gate over the whole time sequence.
    o: A `Tensor`. Must have the same type as `x`.
      The output gate over the whole time sequence.
    ci: A `Tensor`. Must have the same type as `x`.
      The cell input over the whole time sequence.
    co: A `Tensor`. Must have the same type as `x`.
      The cell after the tanh over the whole time sequence.
    h: A `Tensor`. Must have the same type as `x`.
      The output h vector over the whole time sequence.
    cs_grad: A `Tensor`. Must have the same type as `x`.
      The current gradient of cs.
    h_grad: A `Tensor`. Must have the same type as `x`.
      The gradient of h vector.
    use_peephole: A `bool`. Whether to use peephole weights.
    name: A name for the operation (optional).

  Returns:
    A tuple of `Tensor` objects (x_grad, cs_prev_grad, h_prev_grad, w_grad, wci_grad, wcf_grad, wco_grad, b_grad).

    x_grad: A `Tensor`. Has the same type as `x`. The gradient of x to be back-propped.
    cs_prev_grad: A `Tensor`. Has the same type as `x`. The gradient of cs_prev to be back-propped.
    h_prev_grad: A `Tensor`. Has the same type as `x`. The gradient of h_prev to be back-propped.
    w_grad: A `Tensor`. Has the same type as `x`. The gradient for w to be back-propped.
    wci_grad: A `Tensor`. Has the same type as `x`. The gradient for wci to be back-propped.
    wcf_grad: A `Tensor`. Has the same type as `x`. The gradient for wcf to be back-propped.
    wco_grad: A `Tensor`. Has the same type as `x`. The gradient for wco to be back-propped.
    b_grad: A `Tensor`. Has the same type as `x`. The gradient for w to be back-propped.
  """
  _ctx = _context._context
  if _ctx is not None and _ctx._eager_context.is_eager:
    try:
      _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
        _ctx._context_handle, _ctx._eager_context.device_name,
        "BlockLSTMGrad", name, _ctx._post_execution_callbacks, seq_len_max, x,
        cs_prev, h_prev, w, wci, wcf, wco, b, i, cs, f, o, ci, co, h, cs_grad,
        h_grad, "use_peephole", use_peephole)
      _result = _BlockLSTMGradOutput._make(_result)
      return _result
    except _core._FallbackException:
      try:
        return block_lstm_grad_eager_fallback(
            seq_len_max, x, cs_prev, h_prev, w, wci, wcf, wco, b, i, cs, f, o,
            ci, co, h, cs_grad, h_grad, use_peephole=use_peephole, name=name,
            ctx=_ctx)
      except _core._SymbolicException:
        pass  # Add nodes to the TensorFlow graph.
      except (TypeError, ValueError):
        result = _dispatch.dispatch(
              block_lstm_grad, seq_len_max=seq_len_max, x=x, cs_prev=cs_prev,
                               h_prev=h_prev, w=w, wci=wci, wcf=wcf, wco=wco,
                               b=b, i=i, cs=cs, f=f, o=o, ci=ci, co=co, h=h,
                               cs_grad=cs_grad, h_grad=h_grad,
                               use_peephole=use_peephole, name=name)
        if result is not _dispatch.OpDispatcher.NOT_SUPPORTED:
          return result
        raise
    except _core._NotOkStatusException as e:
      if name is not None:
        message = e.message + " name: " + name
      else:
        message = e.message
      _six.raise_from(_core._status_to_exception(e.code, message), None)
  # Add nodes to the TensorFlow graph.
  use_peephole = _execute.make_bool(use_peephole, "use_peephole")
  try:
    _, _, _op = _op_def_lib._apply_op_helper(
        "BlockLSTMGrad", seq_len_max=seq_len_max, x=x, cs_prev=cs_prev,
                         h_prev=h_prev, w=w, wci=wci, wcf=wcf, wco=wco, b=b,
                         i=i, cs=cs, f=f, o=o, ci=ci, co=co, h=h,
                         cs_grad=cs_grad, h_grad=h_grad,
                         use_peephole=use_peephole, name=name)
  except (TypeError, ValueError):
    result = _dispatch.dispatch(
          block_lstm_grad, seq_len_max=seq_len_max, x=x, cs_prev=cs_prev,
                           h_prev=h_prev, w=w, wci=wci, wcf=wcf, wco=wco, b=b,
                           i=i, cs=cs, f=f, o=o, ci=ci, co=co, h=h,
                           cs_grad=cs_grad, h_grad=h_grad,
                           use_peephole=use_peephole, name=name)
    if result is not _dispatch.OpDispatcher.NOT_SUPPORTED:
      return result
    raise
  _result = _op.outputs[:]
  _inputs_flat = _op.inputs
  _attrs = ("use_peephole", _op.get_attr("use_peephole"), "T",
            _op.get_attr("T"))
  _execute.record_gradient(
      "BlockLSTMGrad", _inputs_flat, _attrs, _result, name)
  _result = _BlockLSTMGradOutput._make(_result)
  return _result
Ejemplo n.º 50
0
def lstm_block_cell_grad(x, cs_prev, h_prev, w, wci, wcf, wco, b, i, cs, f, o, ci, co, cs_grad, h_grad, use_peephole, name=None):
  r"""Computes the LSTM cell backward propagation for 1 timestep.

  This implementation is to be used in conjunction of LSTMBlockCell.

  Args:
    x: A `Tensor`. Must be one of the following types: `half`, `float32`.
      The input to the LSTM cell, shape (batch_size, num_inputs).
    cs_prev: A `Tensor`. Must have the same type as `x`.
      The previous cell state.
    h_prev: A `Tensor`. Must have the same type as `x`. The previous h state.
    w: A `Tensor`. Must have the same type as `x`. The weight matrix.
    wci: A `Tensor`. Must have the same type as `x`.
      The weight matrix for input gate peephole connection.
    wcf: A `Tensor`. Must have the same type as `x`.
      The weight matrix for forget gate peephole connection.
    wco: A `Tensor`. Must have the same type as `x`.
      The weight matrix for output gate peephole connection.
    b: A `Tensor`. Must have the same type as `x`. The bias vector.
    i: A `Tensor`. Must have the same type as `x`. The input gate.
    cs: A `Tensor`. Must have the same type as `x`.
      The cell state before the tanh.
    f: A `Tensor`. Must have the same type as `x`. The forget gate.
    o: A `Tensor`. Must have the same type as `x`. The output gate.
    ci: A `Tensor`. Must have the same type as `x`. The cell input.
    co: A `Tensor`. Must have the same type as `x`. The cell after the tanh.
    cs_grad: A `Tensor`. Must have the same type as `x`.
      The current gradient of cs.
    h_grad: A `Tensor`. Must have the same type as `x`.
      The gradient of h vector.
    use_peephole: A `bool`. Whether the cell uses peephole connections.
    name: A name for the operation (optional).

  Returns:
    A tuple of `Tensor` objects (cs_prev_grad, dicfo, wci_grad, wcf_grad, wco_grad).

    cs_prev_grad: A `Tensor`. Has the same type as `x`. The gradient of cs to be back-propped.
    dicfo: A `Tensor`. Has the same type as `x`. The derivative wrt to [i, cs, f, o].
    wci_grad: A `Tensor`. Has the same type as `x`. The gradient for wci to be back-propped.
    wcf_grad: A `Tensor`. Has the same type as `x`. The gradient for wcf to be back-propped.
    wco_grad: A `Tensor`. Has the same type as `x`. The gradient for wco to be back-propped.
  """
  _ctx = _context._context
  if _ctx is not None and _ctx._eager_context.is_eager:
    try:
      _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
        _ctx._context_handle, _ctx._eager_context.device_name,
        "LSTMBlockCellGrad", name, _ctx._post_execution_callbacks, x, cs_prev,
        h_prev, w, wci, wcf, wco, b, i, cs, f, o, ci, co, cs_grad, h_grad,
        "use_peephole", use_peephole)
      _result = _LSTMBlockCellGradOutput._make(_result)
      return _result
    except _core._FallbackException:
      try:
        return lstm_block_cell_grad_eager_fallback(
            x, cs_prev, h_prev, w, wci, wcf, wco, b, i, cs, f, o, ci, co,
            cs_grad, h_grad, use_peephole=use_peephole, name=name, ctx=_ctx)
      except _core._SymbolicException:
        pass  # Add nodes to the TensorFlow graph.
      except (TypeError, ValueError):
        result = _dispatch.dispatch(
              lstm_block_cell_grad, x=x, cs_prev=cs_prev, h_prev=h_prev, w=w,
                                    wci=wci, wcf=wcf, wco=wco, b=b, i=i,
                                    cs=cs, f=f, o=o, ci=ci, co=co,
                                    cs_grad=cs_grad, h_grad=h_grad,
                                    use_peephole=use_peephole, name=name)
        if result is not _dispatch.OpDispatcher.NOT_SUPPORTED:
          return result
        raise
    except _core._NotOkStatusException as e:
      if name is not None:
        message = e.message + " name: " + name
      else:
        message = e.message
      _six.raise_from(_core._status_to_exception(e.code, message), None)
  # Add nodes to the TensorFlow graph.
  use_peephole = _execute.make_bool(use_peephole, "use_peephole")
  try:
    _, _, _op = _op_def_lib._apply_op_helper(
        "LSTMBlockCellGrad", x=x, cs_prev=cs_prev, h_prev=h_prev, w=w,
                             wci=wci, wcf=wcf, wco=wco, b=b, i=i, cs=cs, f=f,
                             o=o, ci=ci, co=co, cs_grad=cs_grad,
                             h_grad=h_grad, use_peephole=use_peephole,
                             name=name)
  except (TypeError, ValueError):
    result = _dispatch.dispatch(
          lstm_block_cell_grad, x=x, cs_prev=cs_prev, h_prev=h_prev, w=w,
                                wci=wci, wcf=wcf, wco=wco, b=b, i=i, cs=cs,
                                f=f, o=o, ci=ci, co=co, cs_grad=cs_grad,
                                h_grad=h_grad, use_peephole=use_peephole,
                                name=name)
    if result is not _dispatch.OpDispatcher.NOT_SUPPORTED:
      return result
    raise
  _result = _op.outputs[:]
  _inputs_flat = _op.inputs
  _attrs = ("use_peephole", _op.get_attr("use_peephole"), "T",
            _op.get_attr("T"))
  _execute.record_gradient(
      "LSTMBlockCellGrad", _inputs_flat, _attrs, _result, name)
  _result = _LSTMBlockCellGradOutput._make(_result)
  return _result
Ejemplo n.º 51
0
def init(
    job_type=None,
    dir=None,
    config=None,  # TODO(jhr): type is a union for argparse/absl
    project=None,
    entity=None,
    reinit=None,
    tags=None,
    group=None,
    name=None,
    notes=None,
    magic=None,  # TODO(jhr): type is union
    config_exclude_keys=None,
    config_include_keys=None,
    anonymous=None,
    mode=None,
    allow_val_change=None,
    resume=None,
    force=None,
    tensorboard=None,  # alias for sync_tensorboard
    sync_tensorboard=None,
    monitor_gym=None,
    save_code=None,
    id=None,
    settings=None,
):
    """Initialize a wandb Run.

    Args:
        entity: alias for team.
        team: personal user or team to use for Run.
        project: project name for the Run.

    Raises:
        Exception: if problem.

    Returns:
        wandb Run object

    """
    assert not wandb._IS_INTERNAL_PROCESS
    kwargs = locals()
    error_seen = None
    except_exit = None
    try:
        wi = _WandbInit()
        wi.setup(kwargs)
        except_exit = wi.settings._except_exit
        try:
            run = wi.init()
            except_exit = wi.settings._except_exit
        except (KeyboardInterrupt, Exception) as e:
            if not isinstance(e, KeyboardInterrupt):
                sentry_exc(e)
            getcaller()
            assert logger
            if wi.settings.problem == "fatal":
                raise
            if wi.settings.problem == "warn":
                pass
            # TODO(jhr): figure out how to make this RunDummy
            run = None
    except UsageError:
        raise
    except KeyboardInterrupt as e:
        assert logger
        logger.warning("interrupted", exc_info=e)
        six.raise_from(Exception("interrupted"), e)
    except Exception as e:
        error_seen = e
        traceback.print_exc()
        assert logger
        logger.error("error", exc_info=e)
        # Need to build delay into this sentry capture because our exit hooks
        # mess with sentry's ability to send out errors before the program ends.
        sentry_exc(e, delay=True)
        # reraise(*sys.exc_info())
        # six.raise_from(Exception("problem"), e)
    finally:
        if error_seen:
            wandb.termerror("Abnormal program exit")
            if except_exit:
                os._exit(-1)
            six.raise_from(Exception("problem"), error_seen)
    return run
Ejemplo n.º 52
0
def lstm_block_cell(x, cs_prev, h_prev, w, wci, wcf, wco, b, forget_bias=1, cell_clip=3, use_peephole=False, name=None):
  r"""Computes the LSTM cell forward propagation for 1 time step.

  This implementation uses 1 weight matrix and 1 bias vector, and there's an
  optional peephole connection.

  This kernel op implements the following mathematical equations:

  ```python
  xh = [x, h_prev]
  [i, f, ci, o] = xh * w + b
  f = f + forget_bias

  if not use_peephole:
    wci = wcf = wco = 0

  i = sigmoid(cs_prev * wci + i)
  f = sigmoid(cs_prev * wcf + f)
  ci = tanh(ci)

  cs = ci .* i + cs_prev .* f
  cs = clip(cs, cell_clip)

  o = sigmoid(cs * wco + o)
  co = tanh(cs)
  h = co .* o
  ```

  Args:
    x: A `Tensor`. Must be one of the following types: `half`, `float32`.
      The input to the LSTM cell, shape (batch_size, num_inputs).
    cs_prev: A `Tensor`. Must have the same type as `x`.
      Value of the cell state at previous time step.
    h_prev: A `Tensor`. Must have the same type as `x`.
      Output of the previous cell at previous time step.
    w: A `Tensor`. Must have the same type as `x`. The weight matrix.
    wci: A `Tensor`. Must have the same type as `x`.
      The weight matrix for input gate peephole connection.
    wcf: A `Tensor`. Must have the same type as `x`.
      The weight matrix for forget gate peephole connection.
    wco: A `Tensor`. Must have the same type as `x`.
      The weight matrix for output gate peephole connection.
    b: A `Tensor`. Must have the same type as `x`. The bias vector.
    forget_bias: An optional `float`. Defaults to `1`. The forget gate bias.
    cell_clip: An optional `float`. Defaults to `3`.
      Value to clip the 'cs' value to.
    use_peephole: An optional `bool`. Defaults to `False`.
      Whether to use peephole weights.
    name: A name for the operation (optional).

  Returns:
    A tuple of `Tensor` objects (i, cs, f, o, ci, co, h).

    i: A `Tensor`. Has the same type as `x`. The input gate.
    cs: A `Tensor`. Has the same type as `x`. The cell state before the tanh.
    f: A `Tensor`. Has the same type as `x`. The forget gate.
    o: A `Tensor`. Has the same type as `x`. The output gate.
    ci: A `Tensor`. Has the same type as `x`. The cell input.
    co: A `Tensor`. Has the same type as `x`. The cell after the tanh.
    h: A `Tensor`. Has the same type as `x`. The output h vector.
  """
  _ctx = _context._context
  if _ctx is not None and _ctx._eager_context.is_eager:
    try:
      _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
        _ctx._context_handle, _ctx._eager_context.device_name,
        "LSTMBlockCell", name, _ctx._post_execution_callbacks, x, cs_prev,
        h_prev, w, wci, wcf, wco, b, "forget_bias", forget_bias, "cell_clip",
        cell_clip, "use_peephole", use_peephole)
      _result = _LSTMBlockCellOutput._make(_result)
      return _result
    except _core._FallbackException:
      try:
        return lstm_block_cell_eager_fallback(
            x, cs_prev, h_prev, w, wci, wcf, wco, b, forget_bias=forget_bias,
            cell_clip=cell_clip, use_peephole=use_peephole, name=name,
            ctx=_ctx)
      except _core._SymbolicException:
        pass  # Add nodes to the TensorFlow graph.
      except (TypeError, ValueError):
        result = _dispatch.dispatch(
              lstm_block_cell, x=x, cs_prev=cs_prev, h_prev=h_prev, w=w,
                               wci=wci, wcf=wcf, wco=wco, b=b,
                               forget_bias=forget_bias, cell_clip=cell_clip,
                               use_peephole=use_peephole, name=name)
        if result is not _dispatch.OpDispatcher.NOT_SUPPORTED:
          return result
        raise
    except _core._NotOkStatusException as e:
      if name is not None:
        message = e.message + " name: " + name
      else:
        message = e.message
      _six.raise_from(_core._status_to_exception(e.code, message), None)
  # Add nodes to the TensorFlow graph.
  if forget_bias is None:
    forget_bias = 1
  forget_bias = _execute.make_float(forget_bias, "forget_bias")
  if cell_clip is None:
    cell_clip = 3
  cell_clip = _execute.make_float(cell_clip, "cell_clip")
  if use_peephole is None:
    use_peephole = False
  use_peephole = _execute.make_bool(use_peephole, "use_peephole")
  try:
    _, _, _op = _op_def_lib._apply_op_helper(
        "LSTMBlockCell", x=x, cs_prev=cs_prev, h_prev=h_prev, w=w, wci=wci,
                         wcf=wcf, wco=wco, b=b, forget_bias=forget_bias,
                         cell_clip=cell_clip, use_peephole=use_peephole,
                         name=name)
  except (TypeError, ValueError):
    result = _dispatch.dispatch(
          lstm_block_cell, x=x, cs_prev=cs_prev, h_prev=h_prev, w=w, wci=wci,
                           wcf=wcf, wco=wco, b=b, forget_bias=forget_bias,
                           cell_clip=cell_clip, use_peephole=use_peephole,
                           name=name)
    if result is not _dispatch.OpDispatcher.NOT_SUPPORTED:
      return result
    raise
  _result = _op.outputs[:]
  _inputs_flat = _op.inputs
  _attrs = ("forget_bias", _op.get_attr("forget_bias"), "cell_clip",
            _op.get_attr("cell_clip"), "use_peephole",
            _op.get_attr("use_peephole"), "T", _op.get_attr("T"))
  _execute.record_gradient(
      "LSTMBlockCell", _inputs_flat, _attrs, _result, name)
  _result = _LSTMBlockCellOutput._make(_result)
  return _result
Ejemplo n.º 53
0
def stochastic_hard_routing_gradient(input_data,
                                     tree_parameters,
                                     tree_biases,
                                     path_probability,
                                     path,
                                     tree_depth,
                                     name=None):
    r"""  Computes the derivative of the routing loss with respect to each decision

    node.

    tree_depth: The depth of the decision tree.

    input_data: The training batch's features as a 2-d tensor; `input_data[i][j]`
     gives the j-th feature of the i-th input
    tree_parameters: `tree_parameters[i]` gives the weight of
     the logistic regression model that translates from node features to
     probabilities.
    tree_biases: `tree_biases[i]` gives the bias of the logistic
     regression model that translates from node features to
     probabilities.
    path_probility: `path_probability[i]` gives the probability of reaching each
     node in `path[i]`.
    path: `path[i][j]` gives the jth node in the path taken by the ith data
     instance.

    routing_gradient: `routing_gradient` provides du / df, where u is the routing
     function and f is the (vector of) decision functions.  A decision function
     f_i computes the routing decision at node i.
    data_gradient: `data_gradient` provides df / dx, where f is the (vector
     of) decision functions and x is a batch of data.
    parameter_gradient: `parameter_gradient` provides df / dw, where f is the
     (vector of) decision functions and w is the matrix of parameters that
     determine how instances are routed through a tree.
    bias_gradient: `bias_gradient` provides df / db, where f is the
     (vector of) decision functions and b is the vector of bias parameters that
     determine how instances are routed through a tree.

    f_i is parameterized by t_i (parameters) and b_i (bias) and takes data x as
    input.  This op is called in training_ops.py to compute du / df, and we use
    that to compute

       du / dx = du / df * df / dx,
       du / dt = du / df * df / dt, and
       du / db = du / df * df / db.

  Args:
    input_data: A `Tensor` of type `float32`.
    tree_parameters: A `Tensor` of type `float32`.
    tree_biases: A `Tensor` of type `float32`.
    path_probability: A `Tensor` of type `float32`.
    path: A `Tensor` of type `int32`.
    tree_depth: An `int`.
    name: A name for the operation (optional).

  Returns:
    A tuple of `Tensor` objects (routing_gradient, data_gradient, parameter_gradient, bias_gradient).

    routing_gradient: A `Tensor` of type `float32`.
    data_gradient: A `Tensor` of type `float32`.
    parameter_gradient: A `Tensor` of type `float32`.
    bias_gradient: A `Tensor` of type `float32`.
  """
    _ctx = _context._context
    if _ctx is None or not _ctx._eager_context.is_eager:
        tree_depth = _execute.make_int(tree_depth, "tree_depth")
        _, _, _op = _op_def_lib._apply_op_helper(
            "StochasticHardRoutingGradient",
            input_data=input_data,
            tree_parameters=tree_parameters,
            tree_biases=tree_biases,
            path_probability=path_probability,
            path=path,
            tree_depth=tree_depth,
            name=name)
        _result = _op.outputs[:]
        _inputs_flat = _op.inputs
        _attrs = ("tree_depth", _op.get_attr("tree_depth"))
        _execute.record_gradient("StochasticHardRoutingGradient", _inputs_flat,
                                 _attrs, _result, name)
        _result = _StochasticHardRoutingGradientOutput._make(_result)
        return _result

    else:
        try:
            _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
                _ctx._context_handle, _ctx._eager_context.device_name,
                "StochasticHardRoutingGradient", name,
                _ctx._post_execution_callbacks, input_data, tree_parameters,
                tree_biases, path_probability, path, "tree_depth", tree_depth)
            _result = _StochasticHardRoutingGradientOutput._make(_result)
            return _result
        except _core._FallbackException:
            return stochastic_hard_routing_gradient_eager_fallback(
                input_data,
                tree_parameters,
                tree_biases,
                path_probability,
                path,
                tree_depth=tree_depth,
                name=name,
                ctx=_ctx)
        except _core._NotOkStatusException as e:
            if name is not None:
                message = e.message + " name: " + name
            else:
                message = e.message
            _six.raise_from(_core._status_to_exception(e.code, message), None)
Ejemplo n.º 54
0
def roll(input, shift, axis, name=None):
    r"""Rolls the elements of a tensor along an axis.

  The elements are shifted positively (towards larger indices) by the offset of
  `shift` along the dimension of `axis`. Negative `shift` values will shift
  elements in the opposite direction. Elements that roll passed the last position
  will wrap around to the first and vice versa. Multiple shifts along multiple
  axes may be specified.

  For example:

  ```
  # 't' is [0, 1, 2, 3, 4]
  roll(t, shift=2, axis=0) ==> [3, 4, 0, 1, 2]

  # shifting along multiple dimensions
  # 't' is [[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]]
  roll(t, shift=[1, -2], axis=[0, 1]) ==> [[7, 8, 9, 5, 6], [2, 3, 4, 0, 1]]

  # shifting along the same axis multiple times
  # 't' is [[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]]
  roll(t, shift=[2, -3], axis=[1, 1]) ==> [[1, 2, 3, 4, 0], [6, 7, 8, 9, 5]]
  ```

  Args:
    input: A `Tensor`.
    shift: A `Tensor`. Must be one of the following types: `int32`, `int64`.
      Dimension must be 0-D or 1-D. `shift[i]` specifies the number of places by which
      elements are shifted positively (towards larger indices) along the dimension
      specified by `axis[i]`. Negative shifts will roll the elements in the opposite
      direction.
    axis: A `Tensor`. Must be one of the following types: `int32`, `int64`.
      Dimension must be 0-D or 1-D. `axis[i]` specifies the dimension that the shift
      `shift[i]` should occur. If the same axis is referenced more than once, the
      total shift for that axis will be the sum of all the shifts that belong to that
      axis.
    name: A name for the operation (optional).

  Returns:
    A `Tensor`. Has the same type as `input`.
  """
    _ctx = _context._context or _context.context()
    if _ctx is not None and _ctx._thread_local_data.is_eager:
        try:
            _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
                _ctx._context_handle, _ctx._thread_local_data.device_name,
                "Roll", name, _ctx._post_execution_callbacks, input, shift,
                axis)
            return _result
        except _core._FallbackException:
            try:
                return roll_eager_fallback(input,
                                           shift,
                                           axis,
                                           name=name,
                                           ctx=_ctx)
            except _core._SymbolicException:
                pass  # Add nodes to the TensorFlow graph.
        except _core._NotOkStatusException as e:
            if name is not None:
                message = e.message + " name: " + name
            else:
                message = e.message
            _six.raise_from(_core._status_to_exception(e.code, message), None)
    # Add nodes to the TensorFlow graph.
    _, _, _op = _op_def_lib._apply_op_helper("Roll",
                                             input=input,
                                             shift=shift,
                                             axis=axis,
                                             name=name)
    _result = _op.outputs[:]
    _inputs_flat = _op.inputs
    _attrs = ("T", _op.get_attr("T"), "Tshift", _op.get_attr("Tshift"),
              "Taxis", _op.get_attr("Taxis"))
    _execute.record_gradient("Roll", _inputs_flat, _attrs, _result, name)
    _result, = _result
    return _result
Ejemplo n.º 55
0
def routing_gradient(input_data,
                     tree_parameters,
                     tree_biases,
                     routes,
                     max_nodes,
                     name=None):
    r"""  Computes the derivative of the routing loss with respect to each decision

    node.

    max_nodes: The number of nodes in the tree.

    tree_parameters: `tree_parameters[i]` gives the weight of
     the logistic regression model that translates from node features to
     probabilities.
    tree_biases: `tree_biases[i]` gives the bias of the logistic
     regression model that translates from node features to
     probabilities.
    routes: The routes computed by routing_function_op.

    routing_gradient: `routing_gradient` provides du / df, where u is the routing
     function and f is the (vector of) decision functions.  A decision function
     f_i computes the routing decision at node i.

     f_i is parameterized by t_i (parameters) and b_i (bias) and takes data x as
     input.  This op is called in training_ops.py to compute du / df, and we use
     that to compute

       du / dx = du / df * df / dx,
       du / dt = du / df * df / dt, and
       du / db = du / df * df / db.

  Args:
    input_data: A `Tensor` of type `float32`.
    tree_parameters: A `Tensor` of type `float32`.
    tree_biases: A `Tensor` of type `float32`.
    routes: A `Tensor` of type `float32`.
    max_nodes: An `int`.
    name: A name for the operation (optional).

  Returns:
    A `Tensor` of type `float32`.
  """
    _ctx = _context._context
    if _ctx is None or not _ctx._eager_context.is_eager:
        max_nodes = _execute.make_int(max_nodes, "max_nodes")
        _, _, _op = _op_def_lib._apply_op_helper(
            "RoutingGradient",
            input_data=input_data,
            tree_parameters=tree_parameters,
            tree_biases=tree_biases,
            routes=routes,
            max_nodes=max_nodes,
            name=name)
        _result = _op.outputs[:]
        _inputs_flat = _op.inputs
        _attrs = ("max_nodes", _op.get_attr("max_nodes"))
        _execute.record_gradient("RoutingGradient", _inputs_flat, _attrs,
                                 _result, name)
        _result, = _result
        return _result

    else:
        try:
            _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
                _ctx._context_handle, _ctx._eager_context.device_name,
                "RoutingGradient", name, _ctx._post_execution_callbacks,
                input_data, tree_parameters, tree_biases, routes, "max_nodes",
                max_nodes)
            return _result
        except _core._FallbackException:
            return routing_gradient_eager_fallback(input_data,
                                                   tree_parameters,
                                                   tree_biases,
                                                   routes,
                                                   max_nodes=max_nodes,
                                                   name=name,
                                                   ctx=_ctx)
        except _core._NotOkStatusException as e:
            if name is not None:
                message = e.message + " name: " + name
            else:
                message = e.message
            _six.raise_from(_core._status_to_exception(e.code, message), None)
Ejemplo n.º 56
0
def adjust_hsv_in_yiq(images, delta_h, scale_s, scale_v, name=None):
  r"""Adjust the YIQ hue of one or more images.

  `images` is a tensor of at least 3 dimensions.  The last dimension is
  interpreted as channels, and must be three.

  We used linear transformation described in:
   beesbuzz.biz/code/hsv_color_transforms.php
  The input image is considered in the RGB colorspace. Conceptually, the RGB
  colors are first mapped into YIQ space, rotated around the Y channel by
  delta_h in radians, multiplying the chrominance channels (I, Q)  by scale_s,
  multiplying all channels (Y, I, Q)  by scale_v, and then remapped back to RGB
  colorspace. Each operation described above is a linear transformation.

  Args:
    images: A `Tensor`. Must be one of the following types: `uint8`, `int8`, `int16`, `int32`, `int64`, `half`, `float32`, `float64`.
      Images to adjust.  At least 3-D.
    delta_h: A `Tensor` of type `float32`.
      A float scale that represents the hue rotation amount, in radians.
      Although delta_h can be any float value.
    scale_s: A `Tensor` of type `float32`.
      A float scale that represents the factor to multiply the saturation by.
      scale_s needs to be non-negative.
    scale_v: A `Tensor` of type `float32`.
      A float scale that represents the factor to multiply the value by.
      scale_v needs to be non-negative.
    name: A name for the operation (optional).

  Returns:
    A `Tensor`. Has the same type as `images`.
    The hsv-adjusted image or images. No clipping will be done in this op.
    The client can clip them using additional ops in their graph.
  """
  _ctx = _context._context
  if _ctx is not None and _ctx._eager_context.is_eager:
    try:
      _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
        _ctx._context_handle, _ctx._eager_context.device_name,
        "AdjustHsvInYiq", name, _ctx._post_execution_callbacks, images,
        delta_h, scale_s, scale_v)
      return _result
    except _core._FallbackException:
      try:
        return adjust_hsv_in_yiq_eager_fallback(
            images, delta_h, scale_s, scale_v, name=name, ctx=_ctx)
      except _core._SymbolicException:
        pass  # Add nodes to the TensorFlow graph.
      except (TypeError, ValueError):
        result = _dispatch.dispatch(
              adjust_hsv_in_yiq, images=images, delta_h=delta_h,
                                 scale_s=scale_s, scale_v=scale_v, name=name)
        if result is not _dispatch.OpDispatcher.NOT_SUPPORTED:
          return result
        raise
    except _core._NotOkStatusException as e:
      if name is not None:
        message = e.message + " name: " + name
      else:
        message = e.message
      _six.raise_from(_core._status_to_exception(e.code, message), None)
  # Add nodes to the TensorFlow graph.
  try:
    _, _, _op = _op_def_lib._apply_op_helper(
        "AdjustHsvInYiq", images=images, delta_h=delta_h, scale_s=scale_s,
                          scale_v=scale_v, name=name)
  except (TypeError, ValueError):
    result = _dispatch.dispatch(
          adjust_hsv_in_yiq, images=images, delta_h=delta_h, scale_s=scale_s,
                             scale_v=scale_v, name=name)
    if result is not _dispatch.OpDispatcher.NOT_SUPPORTED:
      return result
    raise
  _result = _op.outputs[:]
  _inputs_flat = _op.inputs
  _attrs = ("T", _op.get_attr("T"))
  _execute.record_gradient(
      "AdjustHsvInYiq", _inputs_flat, _attrs, _result, name)
  _result, = _result
  return _result
Ejemplo n.º 57
0
    def lookup(self, name, visibility=None, scope_limit=None):
        '''Look up a symbol in the symbol table. The lookup can be limited
        by visibility (e.g. just show public methods) or by scope_limit (e.g.
        just show symbols up to a certain scope).

        :param str name: name of the symbol.
        :param visibilty: the visibility or list of visibilities that the \
                          symbol must have.
        :type visibility: [list of] :py:class:`psyclone.symbols.Visibility`
        :param scope_limit: optional Node which limits the symbol \
            search space to the symbol tables of the nodes within the \
            given scope. If it is None (the default), the whole \
            scope (all symbol tables in ancestor nodes) is searched \
            otherwise ancestors of the scope_limit node are not \
            searched.
        :type scope_limit: :py:class:`psyclone.psyir.nodes.Node` or \
            `NoneType`

        :returns: the symbol with the given name and, if specified, visibility.
        :rtype: :py:class:`psyclone.psyir.symbols.Symbol`

        :raises TypeError: if the name argument is not a string.
        :raises SymbolError: if the name exists in the Symbol Table but does \
                             not have the specified visibility.
        :raises TypeError: if the visibility argument has the wrong type.
        :raises KeyError: if the given name is not in the Symbol Table.

        '''
        if not isinstance(name, six.string_types):
            raise TypeError(
                "Expected the name argument to the lookup() method to be "
                "a str but found '{0}'."
                "".format(type(name).__name__))

        try:
            symbol = self.get_symbols(scope_limit)[self._normalize(name)]
            if visibility:
                if not isinstance(visibility, list):
                    vis_list = [visibility]
                else:
                    vis_list = visibility
                if symbol.visibility not in vis_list:
                    vis_names = []
                    # Take care here in case the 'visibility' argument
                    # is of the wrong type
                    for vis in vis_list:
                        if not isinstance(vis, Symbol.Visibility):
                            raise TypeError(
                                "the 'visibility' argument to lookup() must be"
                                " an instance (or list of instances) of "
                                "Symbol.Visibility but got '{0}' when "
                                "searching for symbol '{1}'".format(
                                    type(vis).__name__, name))
                        vis_names.append(vis.name)
                    raise SymbolError(
                        "Symbol '{0}' exists in the Symbol Table but has "
                        "visibility '{1}' which does not match with the "
                        "requested visibility: {2}".format(
                            name, symbol.visibility.name, vis_names))
            return symbol
        except KeyError as err:
            six.raise_from(
                KeyError("Could not find '{0}' in the Symbol Table."
                         "".format(name)), err)
Ejemplo n.º 58
0
def stochastic_hard_routing_function(input_data,
                                     tree_parameters,
                                     tree_biases,
                                     tree_depth,
                                     random_seed,
                                     name=None):
    r"""  Samples a path for each instance in `input_data` and returns the

    probability of the path and the path taken.

    tree_depth: The depth of the decision tree.
    random_seed: The base random seed.

    input_data: The training batch's features as a 2-d tensor; `input_data[i][j]`
     gives the j-th feature of the i-th input.
    tree_parameters: `tree_parameters[i]` gives the weight of
     the logistic regression model that translates from node features to
     probabilities.
    tree_biases: `tree_biases[i]` gives the bias of the logistic
     regression model that translates from node features to
     probabilities.

    path_probility: `path_probability[i]` gives the probability of reaching each
     node in `path[i]`.
    path: `path[i][j]` gives the jth node in the path taken by the ith data
     instance.

  Args:
    input_data: A `Tensor` of type `float32`.
    tree_parameters: A `Tensor` of type `float32`.
    tree_biases: A `Tensor` of type `float32`.
    tree_depth: An `int`.
    random_seed: An `int`.
    name: A name for the operation (optional).

  Returns:
    A tuple of `Tensor` objects (path_probability, path).

    path_probability: A `Tensor` of type `float32`.
    path: A `Tensor` of type `int32`.
  """
    _ctx = _context._context
    if _ctx is None or not _ctx._eager_context.is_eager:
        tree_depth = _execute.make_int(tree_depth, "tree_depth")
        random_seed = _execute.make_int(random_seed, "random_seed")
        _, _, _op = _op_def_lib._apply_op_helper(
            "StochasticHardRoutingFunction",
            input_data=input_data,
            tree_parameters=tree_parameters,
            tree_biases=tree_biases,
            tree_depth=tree_depth,
            random_seed=random_seed,
            name=name)
        _result = _op.outputs[:]
        _inputs_flat = _op.inputs
        _attrs = ("tree_depth", _op.get_attr("tree_depth"), "random_seed",
                  _op.get_attr("random_seed"))
        _execute.record_gradient("StochasticHardRoutingFunction", _inputs_flat,
                                 _attrs, _result, name)
        _result = _StochasticHardRoutingFunctionOutput._make(_result)
        return _result

    else:
        try:
            _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
                _ctx._context_handle, _ctx._eager_context.device_name,
                "StochasticHardRoutingFunction", name,
                _ctx._post_execution_callbacks, input_data, tree_parameters,
                tree_biases, "tree_depth", tree_depth, "random_seed",
                random_seed)
            _result = _StochasticHardRoutingFunctionOutput._make(_result)
            return _result
        except _core._FallbackException:
            return stochastic_hard_routing_function_eager_fallback(
                input_data,
                tree_parameters,
                tree_biases,
                tree_depth=tree_depth,
                random_seed=random_seed,
                name=name,
                ctx=_ctx)
        except _core._NotOkStatusException as e:
            if name is not None:
                message = e.message + " name: " + name
            else:
                message = e.message
            _six.raise_from(_core._status_to_exception(e.code, message), None)
Ejemplo n.º 59
0
def resampler_grad(data, warp, grad_output, name=None):
  r"""Resampler Grad op.

  Args:
    data: A `Tensor`. Must be one of the following types: `half`, `bfloat16`, `float32`, `float64`.
    warp: A `Tensor`. Must have the same type as `data`.
    grad_output: A `Tensor`. Must have the same type as `data`.
    name: A name for the operation (optional).

  Returns:
    A tuple of `Tensor` objects (grad_data, grad_warp).

    grad_data: A `Tensor`. Has the same type as `data`.
    grad_warp: A `Tensor`. Has the same type as `data`.
  """
  _ctx = _context._context or _context.context()
  if _ctx is not None and _ctx._thread_local_data.is_eager:
    try:
      _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
        _ctx._context_handle, _ctx._thread_local_data.device_name,
        "ResamplerGrad", name, _ctx._post_execution_callbacks, data, warp,
        grad_output)
      _result = _ResamplerGradOutput._make(_result)
      return _result
    except _core._FallbackException:
      try:
        return resampler_grad_eager_fallback(
            data, warp, grad_output, name=name, ctx=_ctx)
      except _core._SymbolicException:
        pass  # Add nodes to the TensorFlow graph.
      except (TypeError, ValueError):
        result = _dispatch.dispatch(
              resampler_grad, data=data, warp=warp, grad_output=grad_output,
                              name=name)
        if result is not _dispatch.OpDispatcher.NOT_SUPPORTED:
          return result
        raise
    except _core._NotOkStatusException as e:
      if name is not None:
        message = e.message + " name: " + name
      else:
        message = e.message
      _six.raise_from(_core._status_to_exception(e.code, message), None)
  # Add nodes to the TensorFlow graph.
  try:
    _, _, _op = _op_def_lib._apply_op_helper(
        "ResamplerGrad", data=data, warp=warp, grad_output=grad_output,
                         name=name)
  except (TypeError, ValueError):
    result = _dispatch.dispatch(
          resampler_grad, data=data, warp=warp, grad_output=grad_output,
                          name=name)
    if result is not _dispatch.OpDispatcher.NOT_SUPPORTED:
      return result
    raise
  _result = _op.outputs[:]
  _inputs_flat = _op.inputs
  _attrs = ("T", _op.get_attr("T"))
  _execute.record_gradient(
      "ResamplerGrad", _inputs_flat, _attrs, _result, name)
  _result = _ResamplerGradOutput._make(_result)
  return _result
Ejemplo n.º 60
0
    def parse(self):
        """
        Parse the POST data and break it into a FILES MultiValueDict and a POST
        MultiValueDict.

        Return a tuple containing the POST and FILES dictionary, respectively.
        """
        from request_parser.http.request import QueryDict

        encoding = self._encoding
        handlers = self._upload_handlers

        # HTTP spec says that Content-Length >= 0 is valid
        # handling content-length == 0 before continuing
        if self._content_length == 0:
            return QueryDict(self.settings,
                             encoding=self._encoding), MultiValueDict()

        # See if any of the handlers take care of the parsing.
        # This allows overriding everything if need be.
        for handler in handlers:
            result = handler.handle_raw_input(
                self._input_data,
                self._meta,
                self._content_length,
                self._boundary,
                self.settings,
                encoding,
            )
            # Check to see if it was handled
            if result is not None:
                #If we're returning as soon as the result is not None, then
                #it means there can be only one handler that is allowed to parse
                #body and files - technically there can be more than one but the
                #first one is picked
                return result[0], result[1]

        # Create the data structures to be used later.
        self._post = QueryDict(self.settings, mutable=True)
        self._files = MultiValueDict()

        # Instantiate the stream:
        stream = LazyStream(ChunkIter(self._input_data, self._chunk_size))

        # Whether or not to signal a file-completion at the beginning of the loop.
        old_field_name = None
        counters = [0] * len(handlers)

        # Number of bytes that have been read.
        num_bytes_read = 0
        # To count the number of keys in the request.
        num_post_keys = 0
        # To limit the amount of data read from the request.
        read_size = None

        try:
            for item_type, meta_data, field_stream in Parser(
                    stream, self._boundary):
                if old_field_name:
                    # We run this at the beginning of the next loop
                    # since we cannot be sure a file is complete until
                    # we hit the next boundary/part of the multipart content.
                    self.handle_file_complete(old_field_name, counters)
                    old_field_name = None

                try:
                    disposition = meta_data['content-disposition'][1]
                    field_name = disposition['name'].strip()
                except (KeyError, IndexError, AttributeError):
                    continue

                transfer_encoding = meta_data.get('content-transfer-encoding')
                if transfer_encoding is not None:
                    transfer_encoding = transfer_encoding[0].strip()
                    transfer_encoding = force_text(transfer_encoding,
                                                   encoding,
                                                   errors='replace')
                field_name = force_text(field_name, encoding, errors='replace')

                if item_type == FIELD:
                    # Avoid storing more than DATA_UPLOAD_MAX_NUMBER_FIELDS.
                    num_post_keys += 1
                    if (self.settings.DATA_UPLOAD_MAX_NUMBER_FIELDS is not None
                            and self.settings.DATA_UPLOAD_MAX_NUMBER_FIELDS <
                            num_post_keys):
                        raise TooManyFieldsSent(
                            'The number of GET/POST parameters exceeded '
                            'settings.DATA_UPLOAD_MAX_NUMBER_FIELDS.')

                    # Avoid reading more than DATA_UPLOAD_MAX_MEMORY_SIZE.
                    if self.settings.DATA_UPLOAD_MAX_MEMORY_SIZE is not None:
                        read_size = self.settings.DATA_UPLOAD_MAX_MEMORY_SIZE - num_bytes_read

                    # This is a post field, we can just set it in the post
                    if transfer_encoding == 'base64':
                        #read only for the remaining size
                        raw_data = field_stream.read(size=read_size)
                        num_bytes_read += len(raw_data)
                        try:
                            #decode the data read
                            data = base64.b64decode(raw_data)
                        except binascii.Error:
                            data = raw_data
                    else:
                        data = field_stream.read(size=read_size)
                        num_bytes_read += len(data)

                    # Add two here to make the check consistent with the
                    # x-www-form-urlencoded check that includes '&='.
                    #QUESTION: We can't implement buffered reading if we
                    #have only read part of a stream. Can we?
                    num_bytes_read += len(field_name) + 2
                    if (self.settings.DATA_UPLOAD_MAX_MEMORY_SIZE is not None
                            and num_bytes_read >
                            self.settings.DATA_UPLOAD_MAX_MEMORY_SIZE):
                        raise RequestDataTooBig(
                            'Request body exceeded settings.DATA_UPLOAD_MAX_MEMORY_SIZE.'
                        )

                    #force_text(data, encoding, errors='replace')
                    if transfer_encoding is None:
                        transfer_encoding = ''
                    content_type, content_type_extra = meta_data.get(
                        'content-type', ('', {}))
                    content_type = content_type.strip()
                    #print "data: "+data+"\r\ntype: "+content_type+"\r\nt.encoding: "+transfer_encoding
                    #self._post.appendlist(field_name, force_text(data, encoding, errors='replace'))
                    self._post.appendlist(
                        field_name, {
                            'data':
                            force_text(data, encoding, errors='replace'),
                            'content-type':
                            force_text(
                                content_type, encoding, errors='replace'),
                            'transfer-encoding':
                            transfer_encoding,
                            'content-type-extra':
                            content_type_extra
                        })
                elif item_type == FILE:
                    # This is a file, use the handler...
                    file_name = disposition.get('filename')
                    if file_name:
                        file_name = force_text(file_name,
                                               encoding,
                                               errors='replace')
                        file_name = self.IE_sanitize(
                            unescape_entities(file_name))
                    if not file_name:
                        continue

                    content_type, content_type_extra = meta_data.get(
                        'content-type', ('', {}))
                    content_type = content_type.strip()
                    charset = content_type_extra.get('charset')

                    try:
                        content_length = int(
                            meta_data.get('content-length')[0])
                    except (IndexError, TypeError, ValueError):
                        content_length = None

                    counters = [0] * len(handlers)
                    try:
                        for handler in handlers:
                            try:
                                handler.new_file(field_name, file_name,
                                                 content_type, content_length,
                                                 charset, content_type_extra,
                                                 transfer_encoding)
                            #if a handler is handling a new file, it raises StopFutureHandlers
                            #to prevent others from handling it
                            except StopFutureHandlers:
                                break

                        for chunk in field_stream:
                            if transfer_encoding == 'base64':
                                # We only special-case base64 transfer encoding
                                # We should always decode base64 chunks by multiple of 4,
                                # ignoring whitespace.

                                stripped_chunk = b"".join(chunk.split())

                                remaining = len(stripped_chunk) % 4
                                while remaining != 0:
                                    over_chunk = field_stream.read(4 -
                                                                   remaining)
                                    stripped_chunk += b"".join(
                                        over_chunk.split())
                                    remaining = len(stripped_chunk) % 4

                                try:
                                    chunk = base64.b64decode(stripped_chunk)
                                except Exception as exc:
                                    # Since this is only a chunk, any error is an unfixable error.
                                    raise_from(
                                        MultiPartParserError(
                                            "Could not decode base64 data."),
                                        exc)

                            for i, handler in enumerate(handlers):
                                chunk_length = len(chunk)
                                #stream data into the temp file
                                chunk = handler.receive_data_chunk(
                                    chunk, counters[i])
                                counters[i] += chunk_length

                                #None means no errors, which means we break
                                if chunk is None:
                                    # Don't continue if the chunk received by
                                    # the handler is None.
                                    break

                    #QUESTION: When does this occur?
                    except SkipFile:
                        self._close_files()
                        # Just use up the rest of this file...
                        exhaust(field_stream)
                    else:
                        # Handle file upload completions on next iteration.
                        old_field_name = field_name
                else:
                    # If this is neither a FIELD or a FILE, just exhaust the stream.
                    exhaust(stream)
        #QUESTION: When does this occur?
        #ANSWER: This is used when one of the file handler on line 257 signals to
        #stop any more further file handline. This means any further file upload
        #request needs to be abruptly stopped.
        #See django's tests/file_uploads/uploadhandler.py for more details.
        #TODO: Repurpose this so that we can stop file upload parsing whenver needed.
        except StopUpload as e:
            self._close_files()
            if not e.connection_reset:
                exhaust(self._input_data)
        else:
            # Make sure that the request data is all fed
            exhaust(self._input_data)

        # Signal that the upload has completed.
        # any() shortcircuits if a handler's upload_complete() returns a value.

        #Perform any clean up after file upload is complete
        any(handler.upload_complete() for handler in handlers)
        self._post._mutable = False
        return self._post, self._files