Exemplo n.º 1
0
def _get_subfield_superclass():
    # hardcore trick to support django < 1.3 - there was something wrong with
    # inheritance and SubfieldBase before django 1.3
    # see https://github.com/django/django/commit/222c73261650201f5ce99e8dd4b1ce0d30a69eb4
    if django.VERSION < (1,3):
        return models.Field
    return six.with_metaclass(models.SubfieldBase, models.Field)
Exemplo n.º 2
0
def with_metaclasses(metaclasses, *bases):
    """Make a class inheriting from ``bases`` whose metaclass inherits from
    all of ``metaclasses``.

    Like :func:`six.with_metaclass`, but allows multiple metaclasses.

    Parameters
    ----------
    metaclasses : iterable[type]
        A tuple of types to use as metaclasses.
    *bases : tuple[type]
        A tuple of types to use as bases.

    Returns
    -------
    base : type
        A subtype of ``bases`` whose metaclass is a subtype of ``metaclasses``.

    Notes
    -----
    The metaclasses must be written to support cooperative multiple
    inheritance. This means that they must delegate all calls to ``super()``
    instead of inlining their super class by name.
    """
    return six.with_metaclass(compose_types(*metaclasses), *bases)
Exemplo n.º 3
0
def setup_filterset(filterset_class):
    """ Wrap a provided filterset in Graphene-specific functionality
    """
    return type(
        'Graphene{}'.format(filterset_class.__name__),
        (six.with_metaclass(GrapheneFilterSetMetaclass, GrapheneFilterSetMixin, filterset_class),),
        {},
    )
Exemplo n.º 4
0
def create_deferred_base_class(name, fields={}, meta={}, polymorphic=False):
    metaclass = deferred.ForeignKeyBuilder
    model_class = models.Model

    if polymorphic:
        metaclass = deferred.PolymorphicForeignKeyBuilder
        model_class = PolymorphicModel

    meta.setdefault('app_label', 'foo')
    meta.setdefault('abstract', True)
    Meta = type(str('Meta'), (), meta)
    return type(
        str(name),
        (six.with_metaclass(metaclass, model_class),),
        dict(Meta=Meta, __module__=__name__, **fields),
    )
Exemplo n.º 5
0
def enable_final(base=(), meta_base=()):
    """Returns a base class in which ``final`` decorator is made available.

    Inheriting from the returned value of this function enables
    :meth:``~chainer.utils.final`` decorator to be applied to the methods of
    the class.

    Args:
        base (type or tuple of types): Base classes of the returned class.
        meta_base (type or tuples of type): Base metaclasses. If any descendant
            classes can directly or indirectly have any metaclasses, these
            metaclasses should be specified here to avoid the metaclass
            conflict.
    """
    if not isinstance(base, (list, tuple)):
        base = (base,)
    if not isinstance(meta_base, (list, tuple)):
        meta_base = (meta_base,)

    base_metaclass = type('base_metaclass', (_EnableFinal,) + meta_base, {})
    return six.with_metaclass(base_metaclass, *base)
Exemplo n.º 6
0
class Process(six.with_metaclass(abc.ABCMeta, object)):
    def __init__(self, toolpath_object, **kwargs):
        # type: (Dict[Text, Any], **Any) -> None
        """
        kwargs:

        metadata: tool document metadata
        requirements: inherited requirements
        hints: inherited hints
        loader: schema_salad.ref_resolver.Loader used to load tool document
        avsc_names: CWL Avro schema object used to validate document
        strict: flag to determine strict validation (fail on unrecognized fields)
        """

        self.metadata = kwargs.get("metadata", {})  # type: Dict[Text,Any]
        self.names = None  # type: avro.schema.Names

        global SCHEMA_FILE, SCHEMA_DIR, SCHEMA_ANY  # pylint: disable=global-statement
        if SCHEMA_FILE is None:
            get_schema("v1.0")
            SCHEMA_ANY = cast(Dict[Text, Any],
                              SCHEMA_CACHE["v1.0"][3].idx["https://w3id.org/cwl/salad#Any"])
            SCHEMA_FILE = cast(Dict[Text, Any],
                               SCHEMA_CACHE["v1.0"][3].idx["https://w3id.org/cwl/cwl#File"])
            SCHEMA_DIR = cast(Dict[Text, Any],
                              SCHEMA_CACHE["v1.0"][3].idx["https://w3id.org/cwl/cwl#Directory"])

        names = schema_salad.schema.make_avro_schema([SCHEMA_FILE, SCHEMA_DIR, SCHEMA_ANY],
                                                     schema_salad.ref_resolver.Loader({}))[0]
        if isinstance(names, avro.schema.SchemaParseException):
            raise names
        else:
            self.names = names
        self.tool = toolpath_object
        self.requirements = (kwargs.get("requirements", []) +
                             self.tool.get("requirements", []) +
                             get_overrides(kwargs.get("overrides", []), self.tool["id"]).get("requirements", []))
        self.hints = kwargs.get("hints", []) + self.tool.get("hints", [])
        self.formatgraph = None  # type: Graph
        if "loader" in kwargs:
            self.formatgraph = kwargs["loader"].graph

        self.doc_loader = kwargs["loader"]
        self.doc_schema = kwargs["avsc_names"]

        checkRequirements(self.tool, supportedProcessRequirements)
        self.validate_hints(kwargs["avsc_names"], self.tool.get("hints", []),
                            strict=kwargs.get("strict"))

        self.schemaDefs = {}  # type: Dict[Text,Dict[Text, Any]]

        sd, _ = self.get_requirement("SchemaDefRequirement")

        if sd:
            sdtypes = sd["types"]
            av = schema_salad.schema.make_valid_avro(sdtypes, {t["name"]: t for t in avroize_type(sdtypes)}, set())
            for i in av:
                self.schemaDefs[i["name"]] = i  # type: ignore
            AvroSchemaFromJSONData(av, self.names)  # type: ignore

        # Build record schema from inputs
        self.inputs_record_schema = {
            "name": "input_record_schema", "type": "record",
            "fields": []}  # type: Dict[Text, Any]
        self.outputs_record_schema = {
            "name": "outputs_record_schema", "type": "record",
            "fields": []}  # type: Dict[Text, Any]

        for key in ("inputs", "outputs"):
            for i in self.tool[key]:
                c = copy.copy(i)
                c["name"] = shortname(c["id"])
                del c["id"]

                if "type" not in c:
                    raise validate.ValidationException(u"Missing `type` in parameter `%s`" % c["name"])

                if "default" in c and "null" not in aslist(c["type"]):
                    c["type"] = ["null"] + aslist(c["type"])
                else:
                    c["type"] = c["type"]
                c["type"] = avroize_type(c["type"], c["name"])
                if key == "inputs":
                    self.inputs_record_schema["fields"].append(c)
                elif key == "outputs":
                    self.outputs_record_schema["fields"].append(c)

        try:
            self.inputs_record_schema = cast(Dict[six.text_type, Any], schema_salad.schema.make_valid_avro(self.inputs_record_schema, {}, set()))
            AvroSchemaFromJSONData(self.inputs_record_schema, self.names)
        except avro.schema.SchemaParseException as e:
            raise validate.ValidationException(u"Got error `%s` while processing inputs of %s:\n%s" %
                                               (Text(e), self.tool["id"],
                                                json.dumps(self.inputs_record_schema, indent=4)))

        try:
            self.outputs_record_schema = cast(Dict[six.text_type, Any], schema_salad.schema.make_valid_avro(self.outputs_record_schema, {}, set()))
            AvroSchemaFromJSONData(self.outputs_record_schema, self.names)
        except avro.schema.SchemaParseException as e:
            raise validate.ValidationException(u"Got error `%s` while processing outputs of %s:\n%s" %
                                               (Text(e), self.tool["id"],
                                                json.dumps(self.outputs_record_schema, indent=4)))

    def _init_job(self, joborder, **kwargs):
        # type: (Dict[Text, Text], **Any) -> Builder
        """
        kwargs:

        eval_timeout: javascript evaluation timeout
        use_container: do/don't use Docker when DockerRequirement hint provided
        make_fs_access: make an FsAccess() object with given basedir
        basedir: basedir for FsAccess
        docker_outdir: output directory inside docker for this job
        docker_tmpdir: tmpdir inside docker for this job
        docker_stagedir: stagedir inside docker for this job
        outdir: outdir on host for this job
        tmpdir: tmpdir on host for this job
        stagedir: stagedir on host for this job
        select_resources: callback to select compute resources
        debug: enable debugging output
        js_console: enable javascript console output
        """

        builder = Builder()
        builder.job = cast(Dict[Text, Union[Dict[Text, Any], List,
                                            Text]], copy.deepcopy(joborder))

        # Validate job order
        try:
            fillInDefaults(self.tool[u"inputs"], builder.job)
            normalizeFilesDirs(builder.job)
            validate.validate_ex(self.names.get_name("input_record_schema", ""), builder.job,
                                 strict=False, logger=_logger_validation_warnings)
        except (validate.ValidationException, WorkflowException) as e:
            raise WorkflowException("Invalid job input record:\n" + Text(e))

        builder.files = []
        builder.bindings = CommentedSeq()
        builder.schemaDefs = self.schemaDefs
        builder.names = self.names
        builder.requirements = self.requirements
        builder.hints = self.hints
        builder.resources = {}
        builder.timeout = kwargs.get("eval_timeout")
        builder.debug = kwargs.get("debug")
        builder.js_console = kwargs.get("js_console")
        builder.mutation_manager = kwargs.get("mutation_manager")

        builder.make_fs_access = kwargs.get("make_fs_access") or StdFsAccess
        builder.fs_access = builder.make_fs_access(kwargs["basedir"])
        builder.force_docker_pull = kwargs.get("force_docker_pull")

        loadListingReq, _ = self.get_requirement("http://commonwl.org/cwltool#LoadListingRequirement")
        if loadListingReq:
            builder.loadListing = loadListingReq.get("loadListing")

        dockerReq, is_req = self.get_requirement("DockerRequirement")
        defaultDocker = None

        if dockerReq is None and "default_container" in kwargs:
            defaultDocker = kwargs["default_container"]

        if (dockerReq or defaultDocker) and kwargs.get("use_container"):
            if dockerReq:
                # Check if docker output directory is absolute
                if dockerReq.get("dockerOutputDirectory") and dockerReq.get("dockerOutputDirectory").startswith('/'):
                    builder.outdir = dockerReq.get("dockerOutputDirectory")
                else:
                    builder.outdir = builder.fs_access.docker_compatible_realpath(
                        dockerReq.get("dockerOutputDirectory") or kwargs.get("docker_outdir") or "/var/spool/cwl")
            elif defaultDocker:
                builder.outdir = builder.fs_access.docker_compatible_realpath(
                    kwargs.get("docker_outdir") or "/var/spool/cwl")
            builder.tmpdir = builder.fs_access.docker_compatible_realpath(kwargs.get("docker_tmpdir") or "/tmp")
            builder.stagedir = builder.fs_access.docker_compatible_realpath(kwargs.get("docker_stagedir") or "/var/lib/cwl")
        else:
            builder.outdir = builder.fs_access.realpath(kwargs.get("outdir") or tempfile.mkdtemp())
            builder.tmpdir = builder.fs_access.realpath(kwargs.get("tmpdir") or tempfile.mkdtemp())
            builder.stagedir = builder.fs_access.realpath(kwargs.get("stagedir") or tempfile.mkdtemp())

        if self.formatgraph:
            for i in self.tool["inputs"]:
                d = shortname(i["id"])
                if d in builder.job and i.get("format"):
                    checkFormat(builder.job[d], builder.do_eval(i["format"]), self.formatgraph)

        builder.bindings.extend(builder.bind_input(self.inputs_record_schema, builder.job))

        if self.tool.get("baseCommand"):
            for n, b in enumerate(aslist(self.tool["baseCommand"])):
                builder.bindings.append({
                    "position": [-1000000, n],
                    "datum": b
                })

        if self.tool.get("arguments"):
            for i, a in enumerate(self.tool["arguments"]):
                lc = self.tool["arguments"].lc.data[i]
                fn = self.tool["arguments"].lc.filename
                builder.bindings.lc.add_kv_line_col(len(builder.bindings), lc)
                if isinstance(a, dict):
                    a = copy.copy(a)
                    if a.get("position"):
                        a["position"] = [a["position"], i]
                    else:
                        a["position"] = [0, i]
                    builder.bindings.append(a)
                elif ("$(" in a) or ("${" in a):
                    cm = CommentedMap((
                        ("position", [0, i]),
                        ("valueFrom", a)
                    ))
                    cm.lc.add_kv_line_col("valueFrom", lc)
                    cm.lc.filename = fn
                    builder.bindings.append(cm)
                else:
                    cm = CommentedMap((
                        ("position", [0, i]),
                        ("datum", a)
                    ))
                    cm.lc.add_kv_line_col("datum", lc)
                    cm.lc.filename = fn
                    builder.bindings.append(cm)

        # use python2 like sorting of heterogeneous lists
        # (containing str and int types),
        # TODO: unify for both runtime
        if six.PY3:
            key = cmp_to_key(cmp_like_py2)
        else:  # PY2
            key = lambda dict: dict["position"]
        builder.bindings.sort(key=key)
        builder.resources = self.evalResources(builder, kwargs)
        builder.job_script_provider = kwargs.get("job_script_provider", None)
        return builder

    def evalResources(self, builder, kwargs):
        # type: (Builder, Dict[str, Any]) -> Dict[Text, Union[int, Text]]
        resourceReq, _ = self.get_requirement("ResourceRequirement")
        if resourceReq is None:
            resourceReq = {}
        request = {
            "coresMin": 1,
            "coresMax": 1,
            "ramMin": 1024,
            "ramMax": 1024,
            "tmpdirMin": 1024,
            "tmpdirMax": 1024,
            "outdirMin": 1024,
            "outdirMax": 1024
        }
        for a in ("cores", "ram", "tmpdir", "outdir"):
            mn = None
            mx = None
            if resourceReq.get(a + "Min"):
                mn = builder.do_eval(resourceReq[a + "Min"])
            if resourceReq.get(a + "Max"):
                mx = builder.do_eval(resourceReq[a + "Max"])
            if mn is None:
                mn = mx
            elif mx is None:
                mx = mn

            if mn:
                request[a + "Min"] = mn
                request[a + "Max"] = mx

        if kwargs.get("select_resources"):
            return kwargs["select_resources"](request)
        else:
            return {
                "cores": request["coresMin"],
                "ram": request["ramMin"],
                "tmpdirSize": request["tmpdirMin"],
                "outdirSize": request["outdirMin"],
            }

    def validate_hints(self, avsc_names, hints, strict):
        # type: (Any, List[Dict[Text, Any]], bool) -> None
        for i, r in enumerate(hints):
            sl = SourceLine(hints, i, validate.ValidationException)
            with sl:
                if avsc_names.get_name(r["class"], "") is not None:
                    plain_hint = dict((key, r[key]) for key in r if key not in
                                      self.doc_loader.identifiers)  # strip identifiers
                    validate.validate_ex(
                        avsc_names.get_name(plain_hint["class"], ""),
                        plain_hint, strict=strict)
                else:
                    _logger.info(sl.makeError(u"Unknown hint %s" % (r["class"])))

    def get_requirement(self, feature):  # type: (Any) -> Tuple[Any, bool]
        return get_feature(self, feature)

    def visit(self, op):  # type: (Callable[[Dict[Text, Any]], None]) -> None
        op(self.tool)

    @abc.abstractmethod
    def job(self,
            job_order,  # type: Dict[Text, Text]
            output_callbacks,  # type: Callable[[Any, Any], Any]
            **kwargs  # type: Any
            ):
        # type: (...) -> Generator[Any, None, None]
        return None
Exemplo n.º 7
0
                    _Map,
                    _List,
            )):
                children = obj.get_children()
                for child_name in children:
                    ret[jp_compose([name, child_name])] = children[child_name]

        self.update_children_cache(ret)
        return ret

    def get_attrs(self, namespace, group_cls=None):
        """ attach an pyopenapi.migration.spec.AttributeGroup

        Args:
         - namespace: different attribute goups are separated/accessed by namespace
         - group_cls: the AttributeGroup to init when None is found
        """

        if namespace in self.attrs:
            return self.attrs[namespace]

        if group_cls is None:
            return None

        group = group_cls({})
        self.attrs[namespace] = group
        return group


Base2 = six.with_metaclass(FieldMeta, Base2Obj)
Exemplo n.º 8
0
class ImmutabilityPolicyState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
    """The ImmutabilityPolicy state of a blob container, possible values include: Locked and Unlocked.
    """

    LOCKED = "Locked"
    UNLOCKED = "Unlocked"
Exemplo n.º 9
0
        model_name = 'ClassFields FakeModel'


class ClassFieldFakeRemoteField(object):
    """Make this look a bit like a ForeignKey (but not).
    Workaround for bug in SQLUpdateCompiler.as_sql()
    """
    model = FakeModel
    parent_link = True


if DJANGO_VERSION >= (1, 8):
    Field = models.Field
else:
    from django.db.models import SubfieldBase
    Field = six.with_metaclass(SubFieldBase, models.Field)

class ClassField(Field):
    """A field which can store and return a class.

    This is useful for improving models that have a 'type code smell'.
    Instead of sniffing the type code, the field can provide one of several
    instantiable classes that can have named methods.
    """

    description = _('Class Field')
    
    rel = None

    _south_introspects = True
Exemplo n.º 10
0
class _Common(six.with_metaclass(abc.ABCMeta, object)):
    """Base class for Command and Group."""
    _cli_generator = None
    _is_hidden = False
    _is_unicode_supported = False
    _release_track = None
    _valid_release_tracks = None
    _notices = None

    def __init__(self, is_group=False):
        self.exit_code = 0
        self.is_group = is_group

    @staticmethod
    def Args(parser):
        """Set up arguments for this command.

    Args:
      parser: An argparse.ArgumentParser.
    """
        pass

    @staticmethod
    def _Flags(parser):
        """Adds subclass flags.

    Args:
      parser: An argparse.ArgumentParser object.
    """
        pass

    @classmethod
    def IsHidden(cls):
        return cls._is_hidden

    @classmethod
    def IsUnicodeSupported(cls):
        if six.PY2:
            return cls._is_unicode_supported
        # We always support unicode on Python 3.
        return True

    @classmethod
    def ReleaseTrack(cls):
        return cls._release_track

    @classmethod
    def ValidReleaseTracks(cls):
        return cls._valid_release_tracks

    @classmethod
    def GetTrackedAttribute(cls, obj, attribute):
        """Gets the attribute value from obj for tracks.

    The values are checked in ReleaseTrack._ALL order.

    Args:
      obj: The object to extract attribute from.
      attribute: The attribute name in object.

    Returns:
      The attribute value from obj for tracks.
    """
        for track in ReleaseTrack._ALL:  # pylint: disable=protected-access
            if track not in cls._valid_release_tracks:
                continue
            names = []
            names.append(attribute + '_' + track.id)
            if track.prefix:
                names.append(attribute + '_' + track.prefix)
            for name in names:
                if hasattr(obj, name):
                    return getattr(obj, name)
        return getattr(obj, attribute, None)

    @classmethod
    def Notices(cls):
        return cls._notices

    @classmethod
    def AddNotice(cls, tag, msg, preserve_existing=False):
        if not cls._notices:
            cls._notices = {}
        if tag in cls._notices and preserve_existing:
            return
        cls._notices[tag] = msg

    @classmethod
    def GetCLIGenerator(cls):
        """Get a generator function that can be used to execute a gcloud command.

    Returns:
      A bound generator function to execute a gcloud command.
    """
        if cls._cli_generator:
            return cls._cli_generator.Generate
        return None
Exemplo n.º 11
0
class ExtendedLocationTypes(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
    """The type of extendedLocation.
    """

    EDGE_ZONE = "EdgeZone"
Exemplo n.º 12
0
class BaseSkin(with_metaclass(ABCMeta, object)):
    """
    Abstract class for creating fully customized skins

    .. warning:: This class is meant for subclassing and cannot be instantiated directly!
        A sublcass must implement all the following properties.
    """
    @abstractmethod
    def images(self):
        """
        Get the base directory for image files

        :rtype: str
        """
        return

    @abstractmethod
    def x_margin(self):
        """
        Get horizontal adjustment for the header background
        if the main background has transparent edges.

        :rtype: int
        """
        return

    @abstractmethod
    def y_margin(self):
        """
        Get vertical adjustment for the header background
        if the main background has transparent edges.

        :rtype: int
        """
        return

    @abstractmethod
    def title_bar_x_shift(self):
        """
        Get horizontal adjustment for title bar texture

        :rtype: int
        """
        return

    @abstractmethod
    def title_bar_y_shift(self):
        """
        Get vertical adjustment for title bar texture

        :rtype: int
        """
        return

    @abstractmethod
    def title_back_y_shift(self):
        """
        Get header position adjustment
        if the main background has visible borders.

        :rtype: int
        """
        return

    @abstractmethod
    def header_height(self):
        """
        Get the height of a window header
        (for the title background and the title label).

        :rtype: int
        """
        return

    @abstractmethod
    def close_btn_width(self):
        """
        Get the width of the top-right close button

        :rtype: int
        """
        return

    @abstractmethod
    def close_btn_height(self):
        """
        Get the height of the top-right close button

        :rtype: int
        """
        return

    @abstractmethod
    def close_btn_x_offset(self):
        """
        Get close button horizontal adjustment

        :rtype: int
        """
        return

    @abstractmethod
    def close_btn_y_offset(self):
        """
        Get close button vertical adjustment

        :rtype: int
        """
        return

    @abstractmethod
    def header_align(self):
        """
        Get a numeric value for header text alignment

        For example:

        - ``0``: left
        - ``6``: center

        :rtype: int
        """
        return

    @abstractmethod
    def header_text_color(self):
        """
        Get the color of the header text

        :rtype: str
        """
        return

    @abstractmethod
    def background_img(self):
        """
        Get dialog background texture

        :rtype: str
        """
        return

    @abstractmethod
    def title_background_img(self):
        """
        Get title bar background texture

        :rtype: str
        """
        return

    @abstractmethod
    def close_button_focus(self):
        """
        Get close button focused texture

        :rtype: str
        """
        return

    @abstractmethod
    def close_button_no_focus(self):
        """
        Get close button unfocused texture

        :rtype: str
        """
        return

    @abstractmethod
    def main_bg_img(self):
        """
        Get fullscreen background for
        :class:`AddonFullWindow<pyxbmct.addonwindow.AddonFullWindow>` class

        :rtype: str
        """
        return
Exemplo n.º 13
0
    @staticmethod
    def get_replaced_method(orig_method, new_method):
        def func(self):
            return new_method(self, orig_method)
        return func

    @classmethod
    def replace_class_methods_with_hooks(mcs, bases, dct):
        methods_replacement_map = {'create': _create_hook,
                                   'update': _update_hook,
                                   'to_dict': _to_dict_hook}
        for orig_method_name, new_method in methods_replacement_map.items():
            orig_method = mcs.get_attribute(orig_method_name, bases, dct)
            hook_method = mcs.get_replaced_method(orig_method,
                                                  new_method)
            dct[orig_method_name] = hook_method

    def __new__(mcs, name, bases, dct):
        mcs.validate_existing_attrs(name, dct)
        mcs.update_synthetic_fields(bases, dct)
        mcs.replace_class_methods_with_hooks(bases, dct)
        cls = type(name, (RbacNeutronDbObjectMixin,) + bases, dct)
        cls.add_extra_filter_name('shared')
        mcs.subscribe_to_rbac_events(cls)

        return cls


NeutronRbacObject = with_metaclass(RbacNeutronMetaclass, base.NeutronDbObject)
Exemplo n.º 14
0
class DescribeCommand(six.with_metaclass(abc.ABCMeta, Command)):
    """A command that prints one resource in the 'default' format."""
Exemplo n.º 15
0
class CacheCommand(six.with_metaclass(abc.ABCMeta, Command)):
    """A command that affects the resource URI cache."""
    def __init__(self, *args, **kwargs):
        super(CacheCommand, self).__init__(*args, **kwargs)
        self._uri_cache_enabled = True
Exemplo n.º 16
0
class SilentCommand(six.with_metaclass(abc.ABCMeta, Command)):
    """A command that produces no output."""
    @staticmethod
    def _Flags(parser):
        parser.display_info.AddFormat('none')
Exemplo n.º 17
0
class TopicCommand(six.with_metaclass(abc.ABCMeta, Command)):
    """A command that displays its own help on execution."""
    def Run(self, args):
        self.ExecuteCommandDoNotUse(args.command_path[1:] +
                                    ['--document=style=topic'])
        return None
Exemplo n.º 18
0
class Command(six.with_metaclass(abc.ABCMeta, _Common)):
    """Command is a base class for commands to implement.

  Attributes:
    _cli_do_not_use_directly: calliope.cli.CLI, The CLI object representing this
      command line tool. This should *only* be accessed via commands that
      absolutely *need* introspection of the entire CLI.
    context: {str:object}, A set of key-value pairs that can be used for
        common initialization among commands.
    _uri_cache_enabled: bool, The URI cache enabled state.
  """
    def __init__(self, cli, context):
        super(Command, self).__init__(is_group=False)
        self._cli_do_not_use_directly = cli
        self.context = context
        self._uri_cache_enabled = False

    @property
    def _cli_power_users_only(self):
        return self._cli_do_not_use_directly

    def ExecuteCommandDoNotUse(self, args):
        """Execute a command using the given CLI.

    Do not introduce new invocations of this method unless your command
    *requires* it; any such new invocations must be approved by a team lead.

    Args:
      args: list of str, the args to Execute() via the CLI.

    Returns:
      pass-through of the return value from Execute()
    """
        return self._cli_power_users_only.Execute(args,
                                                  call_arg_complete=False)

    @staticmethod
    def _Flags(parser):
        """Sets the default output format.

    Args:
      parser: The argparse parser.
    """
        parser.display_info.AddFormat('default')

    @abc.abstractmethod
    def Run(self, args):
        """Runs the command.

    Args:
      args: argparse.Namespace, An object that contains the values for the
          arguments specified in the .Args() method.

    Returns:
      A resource object dispatched by display.Displayer().
    """
        pass

    def Epilog(self, resources_were_displayed):
        """Called after resources are displayed if the default format was used.

    Args:
      resources_were_displayed: True if resources were displayed.
    """
        _ = resources_were_displayed

    def GetReferencedKeyNames(self, args):
        """Returns the key names referenced by the filter and format expressions."""
        return display.Displayer(self, args, None).GetReferencedKeyNames()

    def GetUriFunc(self):
        """Returns a function that transforms a command resource item to a URI.

    Returns:
      func(resource) that transforms resource into a URI.
    """
        return None
Exemplo n.º 19
0
    lambda self: self.sourceModel(),
    [
        "_set_context_id",
        "_get_context_id",
        "_set_changeblocked",
        "_get_changeblocked",
        "_about_to_change",
        "_change",
        "_update",
        "_rows_updated",
        "name",
    ],
    base_class=STRIPE_PROXY_BASE,
)

STRIP_PROXY_SIX_BASE = six.with_metaclass(STRIP_PROXY_META_CLASS, STRIPE_PROXY_BASE)


class StripeProxyModel(STRIP_PROXY_SIX_BASE):  # (STRIPE_PROXY_BASE, metaclass=STRIP_PROXY_META_CLASS):
    # __metaclass__ = STRIP_PROXY_META_CLASS

    def __init__(self, parent=None, numduplicates=1):
        STRIPE_PROXY_BASE.__init__(self, parent=parent)
        self._nd = numduplicates

    def rowCount(self, parent=QtCore.QModelIndex()):
        sourceParent = self.mapToSource(parent)
        source_rows = self.sourceModel().rowCount(parent=sourceParent)
        rows = math.ceil(source_rows / self._nd)
        # print('StripeProxyModel.rowCount(): %r %r' % (source_rows, rows))
        return int(rows)
Exemplo n.º 20
0
class ComplexType(six.with_metaclass(Complex_PythonType, Type)):
    '''
    Parent for XML elements that have sub-elements.
    '''
    INDICATOR = Sequence  # Indicator see: class Indicators. To be defined in sub-type.
    INHERITANCE = None    # Type of inheritance see: class Inheritance, to be defined in sub-type.
    SCHEMA = None
    def __new__(cls, *args, **kwargs):
        instance = super(ComplexType, cls).__new__(cls)

        for field in instance._meta.all:
            try:
                setattr(instance, field._name, field.empty_value())
            except RuntimeError as ex:
                logger.warning("Reccursion exception %s occured on %s for field: %s and was IGNORED"%(str(ex),str(cls),str(field._name)))
        return instance

    def __init__(self, **kwargs):
        for key, value in kwargs.items():
            setattr(self, key, value)

    def __setattr__(self, attr, value):
        if attr == '_xmlelement':
            super(ComplexType, self).__setattr__(attr, value)
        else:
            try:
                field = self._find_field(self._meta.all, attr)
                super(ComplexType, self).__setattr__(attr, field.accept(value))
            except IndexError:
                raise AttributeError("Model '%s' doesn't have attribute '%s'." % (self.__class__.__name__, attr))

    def __str__(self):
        fields = {f._name: getattr(self, f._name, '<UNKNOWN FIELD>') for f in self._meta.fields}
        str_fields = ', '.join('%s=%s' % item for item in fields.items())
        return '<{class_name}: {fields}>'.format(class_name=self.__class__.__name__, fields=str_fields)

    def __hash__(self):
        # FIXME: We should do this without the conversion back to XML.
        return hash(etree.tostring(self._xmlelement)) if hasattr(self, '_xmlelement') else id(self)

    def __eq__(self, other):
        # FIXME: We should do this without the conversion back to XML.
        return hasattr(self, '_xmlelement') and hasattr(other, '_xmlelement') \
            and etree.tostring(self._xmlelement) == etree.tostring(other._xmlelement)

    def __lt__(self, other):
        # FIXME: We should do this without the conversion back to XML.
        return hasattr(self, '_xmlelement') and hasattr(other, '_xmlelement') \
            and etree.tostring(self._xmlelement) < etree.tostring(other._xmlelement)

    def __ne__(self, other):
        return not self.__eq__(other)

    def accept(self, value):
        '''
        Instance methods that validate other instances.
        '''
        if value is None:
            return None
        elif isinstance(value, self.__class__):
            return value
        else:
            raise ValueError('Wrong value object type %r for %s.' % (value, self.__class__.__name__))

    def render(self, parent, instance, namespace=None, elementFormDefault=None):
        if instance is None:
            return None
        if self.SCHEMA:
            namespace = self.SCHEMA.targetNamespace
        for field in instance._meta.all:
            field.render(
                parent=parent,
                field_name=field.tagname or field._name,
                value=getattr(instance, field._name),
                namespace=namespace,
                elementFormDefault=elementFormDefault)

    @classmethod
    def _find_field(cls, fields, name):
        try:
            return next(f for f in fields if f._name == name)
        except StopIteration:
            pass
        raise ValueError("%s has no field '%s'" % (cls.__name__, name))

    @classmethod
    def _get_field_by_name(cls, fields, field_name):
        for field in fields:
            if field.tagname == field_name or field._name == field_name:
                return field
        raise ValueError("Field not found '%s', fields: %s" % (field_name, fields))

    @classmethod
    def _is_matching_element(cls, field, xmlelement):
        def gettagns(tag):
            '''
            Translates a tag string in a format {namespace} tag to a tuple
            (namespace, tag).
            '''
            if tag[0] == '{':
                return tag[1:].split('}', 1)
            else:
                return (None, tag)
        if isinstance(xmlelement, etree._Comment):
            return False
        ns, tag = gettagns(xmlelement.tag)
        return (tag == field._name) or (tag == field.tagname)

    @classmethod
    def _find_subelement(cls, field, xmlelement):
        subelements = []
        for subelement in xmlelement:
            if cls._is_matching_element(field, subelement):
                subelements.append(subelement)
        return subelements

    @classmethod
    def parse_xmlelement(cls, xmlelement):
        instance = cls()
        instance._xmlelement = xmlelement
        for attribute in instance._meta.attributes:
            attribute.parse(instance, attribute._name, xmlelement)

        is_choice = (instance._meta.cls.INDICATOR == Choice)
        for field in instance._meta.fields:
            if is_choice:
                if not cls._is_matching_element(field, xmlelement):
                    continue
                subelements = [xmlelement]
            else:
                subelements = cls._find_subelement(field, xmlelement)
            for subelement in subelements:
                field.parse(instance, field._name, subelement)
            if is_choice:
                break

        for group in instance._meta.groups:
            group.parse(instance, group._name, xmlelement)

        return instance

    @classmethod
    def __parse_with_validation(cls, xml, schema):
        from .py2xsd import generate_xsd
        schema = generate_xsd(schema)
        schemaelement = etree.XMLSchema(schema)
        if isinstance(xml, six.string_types):
            parser = etree.XMLParser(schema=schemaelement)
            xmlelement = etree.fromstring(xml, parser)
        else:
            schemaelement.assertValid(xml)
            xmlelement = xml
        return xmlelement

    @classmethod
    def parsexml(cls, xml, schema=None):
        if schema is None:
            parser = etree.fromstring
        else:
            if not isinstance(schema, etree.XMLSchema):
                from .py2xsd import generate_xsd
                schema = etree.XMLSchema(generate_xsd(schema))
            xmlparser = etree.XMLParser(schema=schema)
            parser = functools.partial(etree.fromstring, parser=xmlparser)
        xmlelement = parser(xml)
        return cls.parse_xmlelement(xmlelement)

    def xml(self, tagname, namespace=None, elementFormDefault=None, schema=None, pretty_print=True):
        if namespace:
            tagname = '{%s}%s' % (namespace, tagname)
        xmlelement = etree.Element(tagname)
        self.render(xmlelement, self, namespace, elementFormDefault)
        if schema is not None:
            schema.assertValid(xmlelement)
        return etree.tostring(xmlelement, pretty_print=pretty_print)

    @classmethod
    def _force_elements_type_evalution(cls):
        '''
        Allows a schema object to force elements type evalution for XSD
        generation.
        '''
        for element in cls._meta.all:
            element._evaluate_type()
Exemplo n.º 21
0
	
	def to_python(self, value):
		return _enum_coerce(self, self.enum, value)
	
	def _get_flat_choices(self):
		for k, v in self.choices:
			if isinstance(v, (list, tuple)):
				for k2, v2 in v:
					yield k2, v2
			else:
				yield k, v

if django.VERSION < (1, 8):
	from django.db.models.fields.subclassing import SubfieldBase
	
	ModelFieldBase = six.with_metaclass(SubfieldBase, models.IntegerField)
else:
	ModelFieldBase = models.IntegerField
	

class EnumField(ModelFieldBase):
	
	empty_strings_allowed = False
	validators = []
	
	def __init__(self, enum, *args, **kwargs):
		self.enum = enum
		kwargs.update(
			choices = enum.Choices.items(),
			null = False,
			blank = False
Exemplo n.º 22
0
class BaseProvider(object, with_metaclass(abc.ABCMeta)):
    @property
    def platform(self):
        raise NotImplementedError("Missing provider platform attribute.")
Exemplo n.º 23
0
import json
import six

from django.core.exceptions import ValidationError
from django.db import models

try:
    from django.utils.encoding import smart_unicode as smart_text
    smart_text  # placate pyflakes
except ImportError:
    from django.utils.encoding import smart_text


try:
    base_class = six.with_metaclass(models.SubfieldBase, models.TextField)
except AttributeError:  # Django 1.10 removed SubfieldBase
    base_class = models.TextField


class JSONField(base_class):
    """Simple JSON field that stores python structures as JSON strings
    on database.
    """

    def __init__(self, *args, **kwargs):
        kwargs.setdefault('default', {})
        super(JSONField, self).__init__(*args, **kwargs)

    def to_python(self, value):
        """
        Convert the input JSON value into python structures, raises
Exemplo n.º 24
0
class BaseEndpointStorageProvider(object, with_metaclass(abc.ABCMeta)):
    @abc.abstractmethod
    def get_storage(self, ctxt, connection_info):
        """ Returns all the storage options available"""
        pass
Exemplo n.º 25
0
class ExpirationAction(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
    """The SAS expiration action. Can only be Log.
    """

    LOG = "Log"
Exemplo n.º 26
0
class BaseEndpointNetworksProvider(object, with_metaclass(abc.ABCMeta)):
    """Defines operations for endpoints networks."""
    @abc.abstractmethod
    def get_networks(self, ctxt, connection_info, env):
        """Returns a list of networks """
        raise NotImplementedError()
Exemplo n.º 27
0
class HttpProtocol(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
    """The protocol permitted for a request made with the account SAS.
    """

    HTTPS_HTTP = "https,http"
    HTTPS = "https"
Exemplo n.º 28
0
class BaseEndpointDestinationOptionsProvider(object,
                                             with_metaclass(abc.ABCMeta)):
    @abc.abstractmethod
    def get_target_environment_options(self,
                                       ctxt,
                                       connection_info,
                                       env=None,
                                       option_names=None):
        """ Returns all possible values for the target environment options, as
        well as any settings the options might have in the configuration files.

        param env: dict: optional target environment options
        param option_names: list(str): optional list of parameter names to show
        values for

        Example returned values for the following options:
        schema = {
            "properties": {
                "migr_network": {
                    "type": "string"
                },
                "security_groups": {
                    "type": "array",
                    "items": { "type": "string" }
                },
                "migr_image": {
                    "type": "object",
                    "properties": {
                        "id": { "type": "string" },
                        "name": { "type": "integer" }
                    }
                }
            }
        }
        The provider should return:
        options = [
            {
                "name": "migr_network",
                "values": ["net1", "net2", "net3"],
                "config_default": "net2"},
            {
                "name": "security_groups",
                "values": ["secgroup1", "secgroup2", "secgroup3"],
                "config_default": ["secgroup2", "secgroup3"]},
            {
                "name": "migr_image",
                "values": [
                    {"name": "testimage1", "id": 101},
                    {"name": "testimg2", "id": 4}],
                "config_default": {"name": "testimg2", "id": 4}}}
        ]
        Observations:
            - base types such as 'integer' or 'string' are preserved
            - 'array' types will return an array with all the options which are
              settable through that paramter (any, all or none may be set)
            - for fields where both a name or ID may be returned, returning the
              name will be preferred. The provider must ensure that, if there
              are objects with the same name, the IDs of those objects are
              offered as an option instead of two identical names.
        """
        pass
Exemplo n.º 29
0
class InventoryRuleType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
    """The valid value is Inventory
    """

    INVENTORY = "Inventory"
Exemplo n.º 30
0
class Configuration(six.with_metaclass(TypeWithDefault, object)):
    """NOTE: This class is auto generated by OpenAPI Generator

    Ref: https://openapi-generator.tech
    Do not edit the class manually.

    :param host: Base url
    :param api_key: Dict to store API key(s)
    :param api_key_prefix: Dict to store API prefix (e.g. Bearer)
    :param username: Username for HTTP basic authentication
    :param password: Password for HTTP basic authentication
    """
    def __init__(self,
                 host="http://petstore.swagger.io:80/v2",
                 api_key={},
                 api_key_prefix={},
                 username="",
                 password=""):
        """Constructor
        """
        self.host = host
        """Default Base url
        """
        self.temp_folder_path = None
        """Temp file folder for downloading files
        """
        # Authentication Settings
        self.api_key = api_key
        """dict to store API key(s)
        """
        self.api_key_prefix = api_key_prefix
        """dict to store API prefix (e.g. Bearer)
        """
        self.refresh_api_key_hook = None
        """function hook to refresh API key if expired
        """
        self.username = username
        """Username for HTTP basic authentication
        """
        self.password = password
        """Password for HTTP basic authentication
        """
        self.access_token = ""
        """access token for OAuth/Bearer
        """
        self.logger = {}
        """Logging Settings
        """
        self.logger["package_logger"] = logging.getLogger("petstore_api")
        self.logger["urllib3_logger"] = logging.getLogger("urllib3")
        self.logger_format = '%(asctime)s %(levelname)s %(message)s'
        """Log format
        """
        self.logger_stream_handler = None
        """Log stream handler
        """
        self.logger_file_handler = None
        """Log file handler
        """
        self.logger_file = None
        """Debug file location
        """
        self.debug = False
        """Debug switch
        """

        self.verify_ssl = True
        """SSL/TLS verification
           Set this to false to skip verifying SSL certificate when calling API
           from https server.
        """
        self.ssl_ca_cert = None
        """Set this to customize the certificate file to verify the peer.
        """
        self.cert_file = None
        """client certificate file
        """
        self.key_file = None
        """client key file
        """
        self.assert_hostname = None
        """Set this to True/False to enable/disable SSL hostname verification.
        """

        self.connection_pool_maxsize = 100
        """This value is passed to the aiohttp to limit simultaneous connections.
           Default values is 100, None means no-limit.
        """

        self.proxy = None
        """Proxy URL
        """
        self.proxy_headers = None
        """Proxy headers
        """
        self.safe_chars_for_path_param = ''
        """Safe chars for path_param
        """
        self.retries = None
        """Adding retries to override urllib3 default value 3
        """
        # Disable client side validation
        self.client_side_validation = True

    @property
    def logger_file(self):
        """The logger file.

        If the logger_file is None, then add stream handler and remove file
        handler. Otherwise, add file handler and remove stream handler.

        :param value: The logger_file path.
        :type: str
        """
        return self.__logger_file

    @logger_file.setter
    def logger_file(self, value):
        """The logger file.

        If the logger_file is None, then add stream handler and remove file
        handler. Otherwise, add file handler and remove stream handler.

        :param value: The logger_file path.
        :type: str
        """
        self.__logger_file = value
        if self.__logger_file:
            # If set logging file,
            # then add file handler and remove stream handler.
            self.logger_file_handler = logging.FileHandler(self.__logger_file)
            self.logger_file_handler.setFormatter(self.logger_formatter)
            for _, logger in six.iteritems(self.logger):
                logger.addHandler(self.logger_file_handler)

    @property
    def debug(self):
        """Debug status

        :param value: The debug status, True or False.
        :type: bool
        """
        return self.__debug

    @debug.setter
    def debug(self, value):
        """Debug status

        :param value: The debug status, True or False.
        :type: bool
        """
        self.__debug = value
        if self.__debug:
            # if debug status is True, turn on debug logging
            for _, logger in six.iteritems(self.logger):
                logger.setLevel(logging.DEBUG)
            # turn on httplib debug
            httplib.HTTPConnection.debuglevel = 1
        else:
            # if debug status is False, turn off debug logging,
            # setting log level to default `logging.WARNING`
            for _, logger in six.iteritems(self.logger):
                logger.setLevel(logging.WARNING)
            # turn off httplib debug
            httplib.HTTPConnection.debuglevel = 0

    @property
    def logger_format(self):
        """The logger format.

        The logger_formatter will be updated when sets logger_format.

        :param value: The format string.
        :type: str
        """
        return self.__logger_format

    @logger_format.setter
    def logger_format(self, value):
        """The logger format.

        The logger_formatter will be updated when sets logger_format.

        :param value: The format string.
        :type: str
        """
        self.__logger_format = value
        self.logger_formatter = logging.Formatter(self.__logger_format)

    def get_api_key_with_prefix(self, identifier):
        """Gets API key (with prefix if set).

        :param identifier: The identifier of apiKey.
        :return: The token for api key authentication.
        """
        if self.refresh_api_key_hook is not None:
            self.refresh_api_key_hook(self)
        key = self.api_key.get(identifier)
        if key:
            prefix = self.api_key_prefix.get(identifier)
            if prefix:
                return "%s %s" % (prefix, key)
            else:
                return key

    def get_basic_auth_token(self):
        """Gets HTTP basic authentication header (string).

        :return: The token for basic HTTP authentication.
        """
        return urllib3.util.make_headers(basic_auth=self.username + ':' +
                                         self.password).get('authorization')

    def auth_settings(self):
        """Gets Auth Settings dict for api client.

        :return: The Auth Settings information dict.
        """
        return {
            'api_key': {
                'type': 'api_key',
                'in': 'header',
                'key': 'api_key',
                'value': self.get_api_key_with_prefix('api_key')
            },
            'api_key_query': {
                'type': 'api_key',
                'in': 'query',
                'key': 'api_key_query',
                'value': self.get_api_key_with_prefix('api_key_query')
            },
            'http_basic_test': {
                'type': 'basic',
                'in': 'header',
                'key': 'Authorization',
                'value': self.get_basic_auth_token()
            },
            'petstore_auth': {
                'type': 'oauth2',
                'in': 'header',
                'key': 'Authorization',
                'value': 'Bearer ' + self.access_token
            },
        }

    def to_debug_report(self):
        """Gets the essential information for debugging.

        :return: The report for debugging.
        """
        return "Python SDK Debug Report:\n"\
               "OS: {env}\n"\
               "Python Version: {pyversion}\n"\
               "Version of the API: 1.0.0\n"\
               "SDK Package Version: 1.0.0".\
               format(env=sys.platform, pyversion=sys.version)

    def get_host_settings(self):
        """Gets an array of host settings

        :return: An array of host settings
        """
        return [{
            'url': "http://petstore.swagger.io:80/v2",
            'description': "No description provided",
        }]

    def get_host_from_settings(self, index, variables={}):
        """Gets host URL based on the index and variables
        :param index: array index of the host settings
        :param variables: hash of variable and the corresponding value
        :return: URL based on host settings
        """

        servers = self.get_host_settings()

        # check array index out of bound
        if index < 0 or index >= len(servers):
            raise ValueError(
                "Invalid index {} when selecting the host settings. Must be less than {}"  # noqa: E501
                .format(index, len(servers)))

        server = servers[index]
        url = server['url']

        # go through variable and assign a value
        for variable_name in server['variables']:
            if variable_name in variables:
                if variables[variable_name] in server['variables'][
                        variable_name]['enum_values']:
                    url = url.replace("{" + variable_name + "}",
                                      variables[variable_name])
                else:
                    raise ValueError(
                        "The variable `{}` in the host URL has invalid value {}. Must be {}."  # noqa: E501
                        .format(
                            variable_name, variables[variable_name],
                            server['variables'][variable_name]['enum_values']))
            else:
                # use default value
                url = url.replace(
                    "{" + variable_name + "}",
                    server['variables'][variable_name]['default_value'])

        return url
Exemplo n.º 31
0
 def filter_class(self):
     return six.with_metaclass(self.filter_meta)
Exemplo n.º 32
0
class ServiceResource(six.with_metaclass(CodingStyleMixin,
                                         BaseServiceResource)):
    ORDER = None  # type: int

    def __init__(self, creds_manager):
        super(ServiceResource, self).__init__()
        if self.ORDER is None:
            raise ValueError(
                'Class {}.{} must override the "ORDER" class attribute'.format(
                    self.__module__, self.__class__.__name__)  # type: ignore
            )

        self.cleanup_project_id = creds_manager.project_id
        self.cloud = creds_manager.cloud
        self.options = creds_manager.options

    @classmethod
    def order(cls):
        return cls.ORDER

    def check_prerequisite(self):
        return True

    @abc.abstractmethod
    def list(self):
        raise NotImplementedError

    def should_delete(self, resource):
        project_id = resource.get('project_id', resource.get('tenant_id'))
        if project_id:
            return project_id == self.cleanup_project_id
        else:
            # Uncomment the following line once the SDK and all OpenStack
            # services returns the resource owner. In the mean time no need
            # to be worrying.
            # logging.warning("Can't determine owner of resource %s", resource)
            return True

    @abc.abstractmethod
    def delete(self, resource):
        raise NotImplementedError

    def disable(self, resource):
        msg = "The disable feature is not supported for %s, No action will" \
              "be taken against the resource(id=%s, name=%s)."
        logging.warning(
            msg, self.__class__.__name__,
            resource.get('id'), resource.get('name')
        )

    @staticmethod
    @abc.abstractmethod
    def to_str(resource):
        raise NotImplementedError

    def wait_for_check_prerequisite(self, exit):
        timeout = time.time() + 120
        sleep = 2
        while time.time() < timeout:
            if exit.is_set():
                raise RuntimeError(
                    "Resource manager exited because it was interrupted or "
                    "another resource manager failed"
                )
            if self.check_prerequisite():
                break
            logging.info("Waiting for check_prerequisite() in %s",
                         self.__class__.__name__)
            time.sleep(sleep)
            sleep = min(sleep * 2, 8)
        else:
            raise exceptions.TimeoutError(
                "Timeout exceeded waiting for check_prerequisite()")
Exemplo n.º 33
0
    def __init__(self, *args, **kwargs):
        kwargs.pop('TestCase', None)
        super(FixturesMeta, self).__init__(*args, **kwargs)

    def with_test(cls, func):
        meta = type(cls)
        tc_cls = (cls._TestCase,) if '_test' not in cls.__dict__ else ()
        bases = tuple(b for b in cls.__bases__ if b is not object) + tc_cls
        members = dict(cls.__dict__)
        members['_test'] = func
        return super(FixturesMeta, meta).__new__(
            meta, func.__name__, bases, members)


Fixtures = six.with_metaclass(FixturesMeta)


def WithTestClass(cls):
    class metaclass(type):
        def __new__(cls_, name, this_bases, d):
            return FixturesMeta(name, (), d, TestCase=cls)
    return type.__new__(metaclass, "WithTestClass_"+cls.__name__, (), {})


def _make_testfunc_runner(value, fake_loc,
                          container_loc, cls_name, member_name):
    def _run_test(self):
        try:
            return self._test(*value)
        except Exception as exc:
class SourceKindType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
    """Source Kind to pull the configuration data from.
    """

    GIT_REPOSITORY = "GitRepository"
    BUCKET = "Bucket"
Exemplo n.º 35
0
    return property(_getter_, _setter_)


class AttributeMeta(type):
    """ metaclass to init attributes
    """

    def __new__(mcs, name, bases, spec):
        attrs = spec.setdefault('__attributes__', {})

        for name_, args in six.iteritems(attrs):
            args = copy.copy(args)
            builder = args.pop('builder', None) or attr
            spec[name_] = builder(args.pop('key', None) or name_, **args)

        return type.__new__(mcs, name, bases, spec)


class _Attrs(object):
    """ You can attach attributes to a Base2Obj. It provides a mechanism to
    keep runtime info to an OpenApi objects. Example usage is 'ref_obj' for
    'Reference' object.
    """

    def __init__(self, attrs=None):
        self.attrs = attrs or {}


AttributeGroup = six.with_metaclass(AttributeMeta, _Attrs)
class AKSIdentityType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
    """The identity type.
    """

    SYSTEM_ASSIGNED = "SystemAssigned"
    USER_ASSIGNED = "UserAssigned"
Exemplo n.º 37
0
from django.core.exceptions import ValidationError
from django.db import models

try:
    from django.utils.encoding import smart_unicode as smart_text
    smart_text  # placate pyflakes
except ImportError:
    from django.utils.encoding import smart_text

from social.utils import setting_name


if VERSION >= (1, 8):
    _JSONFieldBase = models.Field
else:
    _JSONFieldBase = six.with_metaclass(models.SubfieldBase, models.TextField)

USE_POSTGRES_NATIVE_JSON_FIELD = getattr(settings, setting_name('USE_POSTGRES_NATIVE_JSON_FIELD'), False)

if USE_POSTGRES_NATIVE_JSON_FIELD:
    from django.contrib.postgres.fields import JSONField
else:
    class JSONField(_JSONFieldBase):
        """Simple JSON field that stores python structures as JSON strings
        on database.
        """

        def __init__(self, *args, **kwargs):
            kwargs.setdefault('default', {})
            super(JSONField, self).__init__(*args, **kwargs)
Exemplo n.º 38
0
class SQLObjectBase(six.with_metaclass(SQLObjectMeta, Storm)):
    """The root class of all SQLObject-emulating classes in your application.

    The general strategy for using Storm's SQLObject emulation layer
    is to create an application-specific subclass of SQLObjectBase
    (probably named "SQLObject") that provides an implementation of
    _get_store to return an instance of L{storm.store.Store}. It may
    even be implemented as returning a global L{Store} instance. Then
    all database classes should subclass that class.
    """

    q = DotQ()
    _SO_creating = False

    def __init__(self, *args, **kwargs):
        store = self._get_store()
        store.add(self)
        try:
            self._create(None, **kwargs)
        except:
            store.remove(self)
            raise

    def __storm_loaded__(self):
        self._init(None)

    def _init(self, id, *args, **kwargs):
        pass

    def _create(self, _id_, **kwargs):
        self._SO_creating = True
        self.set(**kwargs)
        del self._SO_creating
        self._init(None)

    def set(self, **kwargs):
        for attr, value in six.iteritems(kwargs):
            setattr(self, attr, value)

    def destroySelf(self):
        Store.of(self).remove(self)

    @staticmethod
    def _get_store():
        raise NotImplementedError("SQLObjectBase._get_store() "
                                  "must be implemented")

    @classmethod
    def delete(cls, id):
        # destroySelf() should be extended to support cascading, so
        # we'll mimic what SQLObject does here, even if more expensive.
        obj = cls.get(id)
        obj.destroySelf()

    @classmethod
    def get(cls, id):
        id = cls._idType(id)
        store = cls._get_store()
        obj = store.get(cls, id)
        if obj is None:
            raise SQLObjectNotFound("Object not found")
        return obj

    @classmethod
    def _parse_orderBy(cls, orderBy):
        result = []
        if not isinstance(orderBy, (tuple, list)):
            orderBy = (orderBy,)
        for item in orderBy:
            if isinstance(item, six.string_types):
                desc = item.startswith("-")
                if desc:
                    item = item[1:]
                item = cls._attr_to_prop.get(item, item)
                if desc:
                    item = Desc(item)
            result.append(item)
        return tuple(result)

    @classmethod
    def select(cls, *args, **kwargs):
        return SQLObjectResultSet(cls, *args, **kwargs)

    @classmethod
    def selectBy(cls, orderBy=None, **kwargs):
        return SQLObjectResultSet(cls, orderBy=orderBy, by=kwargs)

    @classmethod
    def selectOne(cls, *args, **kwargs):
        return SQLObjectResultSet(cls, *args, **kwargs)._one()

    @classmethod
    def selectOneBy(cls, **kwargs):
        return SQLObjectResultSet(cls, by=kwargs)._one()

    @classmethod
    def selectFirst(cls, *args, **kwargs):
        return SQLObjectResultSet(cls, *args, **kwargs)._first()

    @classmethod
    def selectFirstBy(cls, orderBy=None, **kwargs):
        result = SQLObjectResultSet(cls, orderBy=orderBy, by=kwargs)
        return result._first()

    def syncUpdate(self):
        self._get_store().flush()

    def sync(self):
        store = self._get_store()
        store.flush()
        store.autoreload(self)
Exemplo n.º 39
0
def singleton_particle(*bases):
    """Defines a singleton instance immediately when defining the class.  The
    name of the class will refer the instance instead.
    """
    return with_metaclass(SingletonParticleMeta, SingletonParticle, *bases)
Exemplo n.º 40
0
        properties['ramdisk_id'] = data['ramdisk']
    if data.get('architecture'):
        properties['architecture'] = data['architecture']

    if api.glance.VERSIONS.active < 2:
        meta.update({'is_public': is_public, 'properties': properties})
    else:
        meta['visibility'] = 'public' if is_public else 'private'
        meta.update(properties)

    return meta


if api.glance.get_image_upload_mode() == 'direct':
    FileField = forms.ExternalFileField
    CreateParent = six.with_metaclass(forms.ExternalUploadMeta,
                                      forms.SelfHandlingForm)
else:
    FileField = forms.FileField
    CreateParent = forms.SelfHandlingForm


class CreateImageForm(CreateParent):
    name = forms.CharField(max_length=255, label=_("Name"))
    description = forms.CharField(
        max_length=255,
        widget=forms.Textarea(attrs={'rows': 4}),
        label=_("Description"),
        required=False)
    source_type = forms.ChoiceField(
        label=_('Image Source'),
        required=False,
Exemplo n.º 41
0
class EFPBase(with_metaclass(ABCMeta, object)):
    def __init__(self, measure, beta, kappa, normed, coords, check_input):

        if 'efpm' in measure:
            raise ValueError('\'efpm\' no longer supported')
        if 'efm' in measure:
            raise ValueError('\'efm\' no longer supported')

        # store measure object
        self._measure = Measure(measure, beta, kappa, normed, coords,
                                check_input)

    def get_zs_thetas_dict(self, event, zs, thetas):
        if event is not None:
            zs, thetas = self._measure.evaluate(event)
        elif zs is None or thetas is None:
            raise TypeError(
                'if event is None then zs and/or thetas cannot also be None')
        return zs, {w: thetas**w for w in self._weight_set}

    @abstractproperty
    def _weight_set(self):
        pass

    @property
    def measure(self):
        return self._measure.measure

    @property
    def beta(self):
        return self._measure.beta

    @property
    def kappa(self):
        return self._measure.kappa

    @property
    def normed(self):
        return self._measure.normed

    @property
    def check_input(self):
        return self._measure.check_input

    @property
    def subslicing(self):
        return self._measure.subslicing

    def _batch_compute_func(self, event):
        return self.compute(event, batch_call=True)

    @abstractmethod
    def compute(self, *args, **kwargs):
        pass

    def batch_compute(self, events, n_jobs=-1):
        """Computes the value of the EFP on several events.

        **Arguments**

        - **events** : array_like or `fastjet.PseudoJet`
            - The events as an array of arrays of particles in coordinates
            matching those anticipated by `coords`.
        - **n_jobs** : _int_ 
            - The number of worker processes to use. A value of `-1` will attempt
            to use as many processes as there are CPUs on the machine.

        **Returns**

        - _1-d numpy.ndarray_
            - A vector of the EFP value for each event.
        """

        if n_jobs == -1:
            try:
                self.n_jobs = multiprocessing.cpu_count()
            except:
                self.n_jobs = 4  # choose reasonable value

        # setup processor pool
        chunksize = max(len(events) // self.n_jobs, 1)
        if sys.version_info[0] == 3:
            with multiprocessing.Pool(self.n_jobs) as pool:
                results = np.asarray(
                    list(pool.imap(self._batch_compute_func, events,
                                   chunksize)))
        # Pool is not a context manager in python 2
        else:
            pool = multiprocessing.Pool(self.n_jobs)
            results = np.asarray(
                list(pool.imap(self._batch_compute_func, events, chunksize)))
            pool.close()

        return results
Exemplo n.º 42
0
class GACReader(six.with_metaclass(ABCMeta)):

    scan_freq = 2.0 / 1000.0
    """Scanning frequency (scanlines per millisecond)"""
    def __init__(self,
                 interpolate_coords=True,
                 adjust_clock_drift=True,
                 tle_dir=None,
                 tle_name=None,
                 tle_thresh=7):
        """
        Args:
            interpolate_coords: Interpolate coordinates from every eighth pixel
                to all pixels.
            adjust_clock_drift: Adjust the geolocation to compensate for the
                clock error (POD satellites only).
            tle_dir: Directory holding TLE files
            tle_name: Filename pattern of TLE files.
            tle_thresh: Maximum number of days between observation and nearest
                TLE
        """
        self.interpolate_coords = interpolate_coords
        self.adjust_clock_drift = adjust_clock_drift
        self.tle_dir = tle_dir
        self.tle_name = tle_name
        self.tle_thresh = tle_thresh
        self.head = None
        self.scans = None
        self.spacecraft_name = None
        self.spacecraft_id = None
        self.utcs = None
        self.lats = None
        self.lons = None
        self.times = None
        self.tle_lines = None
        self.filename = None
        self._mask = None

    @abstractmethod
    def read(self, filename):
        """Read GAC data.

        Args:
            filename (str): Specifies the GAC file to be read.
        """
        self.filename = os.path.basename(filename)
        LOG.info('Reading %s', self.filename)

    @abstractmethod
    def get_header_timestamp(self):
        """Read start timestamp from the header.

        Returns:
            datetime.datetime: Start timestamp
        """
        raise NotImplementedError

    def get_counts(self):
        packed_data = self.scans["sensor_data"]
        gac_counts = np.zeros((len(self.scans), 409 * 5))
        gac_counts[:, 0::3] = (packed_data >> 20) & 1023
        gac_counts[:, 1::3] = (packed_data >> 10) & 1023
        gac_counts[:, 2::3] = (packed_data & 1023)[:, :-1]
        gac_counts = gac_counts.reshape((-1, 409, 5))
        try:
            switch = self.get_ch3_switch()
        except AttributeError:
            return gac_counts
        else:
            channels = np.zeros((len(self.scans), 409, 6),
                                dtype=gac_counts.dtype)
            channels[:, :, :2] = gac_counts[:, :, :2]
            channels[:, :, -2:] = gac_counts[:, :, -2:]
            channels[:, :, 2][switch == 1] = gac_counts[:, :, 2][switch == 1]
            channels[:, :, 3][switch == 0] = gac_counts[:, :, 2][switch == 0]
            return channels

    @abstractmethod
    def _get_times(self):
        """Specifies how to read scanline timestamps from GAC data.

        Returns:
            int: year
            int: day of year
            int: milliseconds since 00:00
        """
        raise NotImplementedError

    def get_times(self):
        """Read scanline timestamps and try to correct invalid values.

        Note:
            Also sets self.utcs and self.times!

        Returns:
            UTC timestamps
        """
        if self.utcs is None:
            # Read timestamps
            year, jday, msec = self._get_times()

            # Correct invalid values
            year, jday, msec = self.correct_times_median(year=year,
                                                         jday=jday,
                                                         msec=msec)
            self.utcs = self.to_datetime64(year=year, jday=jday, msec=msec)
            self.correct_times_thresh()

            # Convert timestamps to datetime objects
            self.times = self.to_datetime(self.utcs)

        return self.utcs

    @staticmethod
    def to_datetime64(year, jday, msec):
        """Convert timestamps to numpy.datetime64

        Args:
            year: Year
            jday: Day of the year (1-based)
            msec: Milliseconds since 00:00

        Returns:
            numpy.datetime64: Converted timestamps
        """
        return (
            ((year - 1970).astype('datetime64[Y]') +
             (jday - 1).astype('timedelta64[D]')).astype('datetime64[ms]') +
            msec.astype('timedelta64[ms]'))

    @staticmethod
    def to_datetime(datetime64):
        """Convert numpy.datetime64 to datetime.datetime

        Args:
            datetime64 (numpy.datetime64): Numpy timestamp to be converted.

        Returns:
            datetime.datetime: Converted timestamp
        """
        return datetime64.astype(datetime.datetime)

    def lineno2msec(self, scan_line_number):
        """Compute ideal scanline timestamp based on the scanline number.

        Assumes a constant scanning frequency.

        Args:
            scan_line_number: Specifies the scanline number (1-based)

        Returns:
            Corresponding timestamps in milliseconds since 1970-01-01 00:00,
            i.e. the first scanline has timestamp 0.
        """
        return (scan_line_number - 1) / self.scan_freq

    def compute_lonlat(self, width, utcs=None, clock_drift_adjust=True):
        """Compute lat/lon coordinates.

        Args:
            width: Number of coordinates per scanlines
            utcs: Scanline timestamps
            clock_drift_adjust: If True, adjust clock drift.
        """
        tle1, tle2 = self.get_tle_lines()

        scan_points = np.arange(3.5, 2048, 5)

        if utcs is None:
            utcs = self.get_times()

        # adjusting clock for drift
        tic = datetime.datetime.now()
        if clock_drift_adjust:
            from pygac.clock_offsets_converter import get_offsets
            try:
                offset_times, clock_error = get_offsets(self.spacecraft_name)
            except KeyError:
                LOG.info("No clock drift info available for %s",
                         self.spacecraft_name)
            else:
                offset_times = np.array(offset_times, dtype='datetime64[ms]')
                offsets = np.interp(utcs.astype(np.uint64),
                                    offset_times.astype(np.uint64),
                                    clock_error)
                utcs -= (offsets * 1000).astype('timedelta64[ms]')

        t = utcs[0].astype(datetime.datetime)

        if "constant_yaw_attitude_error" in self.head.dtype.fields:
            rpy = np.deg2rad([
                self.head["constant_roll_attitude_error"] / 1e3,
                self.head["constant_pitch_attitude_error"] / 1e3,
                self.head["constant_yaw_attitude_error"] / 1e3
            ])
        else:
            rpy = [0, 0, 0]

        LOG.info("Using rpy: %s", str(rpy))

        from pyorbital.geoloc_instrument_definitions import avhrr_gac
        from pyorbital.geoloc import compute_pixels, get_lonlatalt
        sgeom = avhrr_gac(utcs.astype(datetime.datetime), scan_points, 55.385)
        s_times = sgeom.times(t)

        pixels_pos = compute_pixels((tle1, tle2), sgeom, s_times, rpy)
        pos_time = get_lonlatalt(pixels_pos, s_times)

        toc = datetime.datetime.now()

        LOG.warning("Computation of geolocation: %s", str(toc - tic))

        lons, lats = pos_time[:2]

        return lons.reshape(-1, width), lats.reshape(-1, width)

    def get_calibrated_channels(self):
        channels = self.get_counts()
        self.get_times()
        year = self.times[0].year
        delta = self.times[0].date() - datetime.date(year, 1, 1)
        jday = delta.days + 1

        # Earth-Sun distance correction factor
        corr = 1.0 - 0.0334 * np.cos(2.0 * np.pi * (jday - 2) / 365.25)

        # how many reflective channels are there ?
        tot_ref = channels.shape[2] - 3

        channels[:, :,
                 0:tot_ref] = calibrate_solar(channels[:, :, 0:tot_ref],
                                              np.arange(tot_ref), year, jday,
                                              self.spacecraft_name, corr)
        prt, ict, space = self.get_telemetry()
        for chan in [3, 4, 5]:
            channels[:, :, chan - 6] = calibrate_thermal(
                channels[:, :,
                         chan - 6], prt, ict[:, chan - 3], space[:, chan - 3],
                self.scans["scan_line_number"], chan, self.spacecraft_name)

        # Mask out corrupt values
        channels[self.mask] = np.nan

        # Apply KLM/POD specific postprocessing
        self.postproc(channels)

        # Mask pixels affected by scan motor issue
        if self.is_tsm_affected():
            LOG.info('Correcting for temporary scan motor issue')
            self.mask_tsm_pixels(channels)

        return channels

    def get_lonlat(self):
        """Compute lat/lon coordinates.

        TODO: Switch to faster interpolator?
        """
        if self.lons is None and self.lats is None:
            self.lons, self.lats = self._get_lonlat()

            # Interpolate from every eighth pixel to all pixels.
            if self.interpolate_coords:
                self.lons, self.lats = gtp.Gac_Lat_Lon_Interpolator(
                    self.lons, self.lats)

            # Adjust clock drift
            if self.adjust_clock_drift:
                self._adjust_clock_drift()

            # Mask out corrupt scanlines
            self.lons[self.mask] = np.nan
            self.lats[self.mask] = np.nan

            # Mask values outside the valid range
            self.lats[np.fabs(self.lats) > 90.0] = np.nan
            self.lons[np.fabs(self.lons) > 180.0] = np.nan

        return self.lons, self.lats

    @abstractmethod
    def _get_lonlat(self):
        """KLM/POD specific readout of lat/lon coordinates."""
        raise NotImplementedError

    @property
    def mask(self):
        """Mask for corrupt scanlines."""
        if self._mask is None:
            self._mask = self._get_corrupt_mask()
        return self._mask

    @abstractmethod
    def _get_corrupt_mask(self):
        """KLM/POD specific readout of corrupt scanline mask."""
        raise NotImplementedError

    @abstractmethod
    def postproc(self, channels):
        """Apply KLM/POD specific postprocessing."""
        raise NotImplementedError

    @abstractmethod
    def _adjust_clock_drift(self):
        """Adjust clock drift."""
        raise NotImplementedError

    @staticmethod
    def tle2datetime64(times):
        """Convert TLE timestamps to numpy.datetime64

        Args:
           times (float): TLE timestamps as %y%j.1234, e.g. 18001.25
        """
        # Convert %y%j.12345 to %Y%j.12345 (valid for 1950-2049)
        times = np.where(times > 50000, times + 1900000, times + 2000000)

        # Convert float to datetime64
        doys = (times % 1000).astype('int') - 1
        years = (times // 1000).astype('int')
        msecs = np.rint(24 * 3600 * 1000 * (times % 1))
        times64 = (years -
                   1970).astype('datetime64[Y]').astype('datetime64[ms]')
        times64 += doys.astype('timedelta64[D]')
        times64 += msecs.astype('timedelta64[ms]')

        return times64

    def get_tle_file(self):
        """Find TLE file for the current satellite."""
        tle_dir, tle_name = self.tle_dir, self.tle_name

        # If user didn't specify TLE dir/name, try config file
        if tle_dir is None or tle_name is None:
            conf = ConfigParser.ConfigParser()
            try:
                conf.read(CONFIG_FILE)
            except ConfigParser.NoSectionError:
                LOG.exception('Failed reading configuration file: %s',
                              str(CONFIG_FILE))
                raise

            options = {}
            for option, value in conf.items('tle', raw=True):
                options[option] = value

            tle_dir = options['tledir']
            tle_name = options['tlename']

        values = {
            "satname": self.spacecraft_name,
        }
        tle_filename = os.path.join(tle_dir, tle_name % values)
        LOG.info('TLE filename = ' + str(tle_filename))

        return tle_filename

    def read_tle_file(self, tle_filename):
        """Read TLE file."""
        with open(tle_filename, 'r') as fp_:
            return fp_.readlines()

    def get_tle_lines(self):
        """Find closest two line elements (TLEs) for the current orbit

        Raises:
            IndexError, if the closest TLE is more than :meth:`pygac.GACReader.tle_thresh` days apart
        """
        if self.tle_lines is not None:
            return self.tle_lines

        self.get_times()
        tle_data = self.read_tle_file(self.get_tle_file())
        sdate = np.datetime64(self.times[0], '[ms]')
        dates = self.tle2datetime64(
            np.array([float(line[18:32]) for line in tle_data[::2]]))

        # Find index "iindex" such that dates[iindex-1] < sdate <= dates[iindex]
        # Notes:
        #     1. If sdate < dates[0] then iindex = 0
        #     2. If sdate > dates[-1] then iindex = len(dates), beyond the right boundary!
        iindex = np.searchsorted(dates, sdate)

        if iindex in (0, len(dates)):
            if iindex == len(dates):
                # Reset index if beyond the right boundary (see note 2. above)
                iindex -= 1
        elif abs(sdate - dates[iindex - 1]) < abs(sdate - dates[iindex]):
            # Choose the closest of the two surrounding dates
            iindex -= 1

        # Make sure the TLE we found is within the threshold
        delta_days = abs(sdate - dates[iindex]) / np.timedelta64(1, 'D')
        if delta_days > self.tle_thresh:
            raise IndexError(
                "Can't find tle data for %s within +/- %d days around %s" %
                (self.spacecraft_name, self.tle_thresh, sdate))

        if delta_days > 3:
            LOG.warning("Found TLE data for %s that is %f days appart", sdate,
                        delta_days)
        else:
            LOG.debug("Found TLE data for %s that is %f days appart", sdate,
                      delta_days)

        # Select TLE data
        tle1 = tle_data[iindex * 2]
        tle2 = tle_data[iindex * 2 + 1]
        self.tle_lines = tle1, tle2
        return tle1, tle2

    def get_angles(self):
        """Get azimuth and zenith angles.

        Azimuth angle definition is the same as in pyorbital, but with
        different units (degrees not radians for sun azimuth angles)
        and different ranges.

        Returns:
            sat_azi: satellite azimuth angle
                degree clockwise from north in range ]-180, 180],
            sat_zentih: satellite zenith angles in degrees in range [0,90],
            sun_azi: sun azimuth angle
                degree clockwise from north in range ]-180, 180],
            sun_zentih: sun zenith angles in degrees in range [0,90],
            rel_azi: absolute azimuth angle difference in degrees between sun
                and sensor in range [0, 180]

        """
        self.get_times()
        self.get_lonlat()
        tle1, tle2 = self.get_tle_lines()
        orb = Orbital(self.spacecrafts_orbital[self.spacecraft_id],
                      line1=tle1,
                      line2=tle2)

        sat_azi, sat_elev = orb.get_observer_look(self.times[:, np.newaxis],
                                                  self.lons, self.lats, 0)

        sat_zenith = 90 - sat_elev

        sun_zenith = astronomy.sun_zenith_angle(self.times[:, np.newaxis],
                                                self.lons, self.lats)

        alt, sun_azi = astronomy.get_alt_az(self.times[:, np.newaxis],
                                            self.lons, self.lats)
        del alt
        sun_azi = np.rad2deg(sun_azi)
        rel_azi = get_absolute_azimuth_angle_diff(sun_azi, sat_azi)

        # Scale angles range to half open interval ]-180, 180]
        sat_azi = centered_modulus(sat_azi, 360.0)
        sun_azi = centered_modulus(sun_azi, 360.0)

        # Mask corrupt scanlines
        for arr in (sat_azi, sat_zenith, sun_azi, sun_zenith, rel_azi):
            arr[self.mask] = np.nan

        return sat_azi, sat_zenith, sun_azi, sun_zenith, rel_azi

    def correct_times_median(self, year, jday, msec):
        """Replace invalid timestamps with statistical estimates (using median).

        Assumes that the majority of timestamps is ok.

        Args:
            year: Year
            jday: Day of the year
            msec: Milliseconds since 00:00

        Returns:
            Corrected year
            Corrected day of the year
            Corrected milliseconds
        """
        # Estimate ideal timestamps based on the scanline number. Still without
        # offset, e.g. the first scanline has timestamp 1970-01-01 00:00
        msec_lineno = self.lineno2msec(self.scans["scan_line_number"])

        jday = np.where(np.logical_or(jday < 1, jday > 366), np.median(jday),
                        jday)
        if_wrong_jday = np.ediff1d(jday, to_begin=0)
        jday = np.where(if_wrong_jday < 0, max(jday), jday)

        if_wrong_msec = np.where(msec < 1)
        if_wrong_msec = if_wrong_msec[0]
        if len(if_wrong_msec) > 0:
            if if_wrong_msec[0] != 0:
                msec = msec[0] + msec_lineno
            else:
                msec0 = np.median(msec - msec_lineno)
                msec = msec0 + msec_lineno

        if_wrong_msec = np.ediff1d(msec, to_begin=0)
        msec = np.where(
            np.logical_and(
                np.logical_or(if_wrong_msec < -1000, if_wrong_msec > 1000),
                if_wrong_jday != 1), msec[0] + msec_lineno, msec)

        # checking if year value is out of valid range
        if_wrong_year = np.where(
            np.logical_or(year < 1978, year > datetime.datetime.now().year))
        if_wrong_year = if_wrong_year[0]
        if len(if_wrong_year) > 0:
            # if the first scanline has valid time stamp
            if if_wrong_year[0] != 0:
                year = year[0]
                jday = jday[0]
                msec = msec[0] + msec_lineno
            # Otherwise use median time stamp
            else:
                year = np.median(year)
                jday = np.median(jday)
                msec0 = np.median(msec - msec_lineno)
                msec = msec0 + msec_lineno

        return year, jday, msec

    def correct_scan_line_numbers(self):
        """Remove scanlines with corrupted scanline numbers

        This includes:
            - Scanline numbers outside the valide range
            - Scanline numbers deviating more than a certain threshold from the
            ideal case (1,2,3,...N)

        Example files having corrupt scanline numbers:
            - NSS.GHRR.NJ.D96144.S2000.E2148.B0720102.GC
            - NSS.GHRR.NJ.D96064.S0043.E0236.B0606162.WI
            - NSS.GHRR.NJ.D99286.S1818.E2001.B2466869.WI

        Returns:
            Intermediate and final results (for plotting purpose)
        """
        along_track = np.arange(1, len(self.scans["scan_line_number"]) + 1)
        results = {
            'along_track': along_track,
            'n_orig': self.scans['scan_line_number'].copy()
        }

        # Remove scanlines whose scanline number is outside the valid range
        within_range = np.logical_and(self.scans["scan_line_number"] < 15000,
                                      self.scans["scan_line_number"] >= 0)
        self.scans = self.scans[within_range]

        # Remove scanlines deviating more than a certain threshold from the
        # ideal case (1,2,3,...N).
        ideal = np.arange(1, len(self.scans["scan_line_number"]) + 1)

        # ... Estimate possible offset (in case some scanlines are missing in
        # the beginning of the scan)
        offsets = self.scans["scan_line_number"] - ideal
        med_offset = np.median(offsets)

        # ... Compute difference to ideal case (1,2,3,...N) + offset
        diffs = np.abs(self.scans["scan_line_number"] - (ideal + med_offset))

        # ... Remove those scanlines whose difference is larger than a certain
        # threshold. For the threshold computation we only regard nonzero
        # differences.
        nz_diffs = diffs[diffs > 0]
        if len(nz_diffs) < 50:
            # Not enough differences for reliable statistics. Use fixed
            # threshold.
            thresh = 500
        else:
            mean_nz_diffs = np.mean(nz_diffs)
            std_nz_diffs = np.std(nz_diffs)
            med_nz_diffs = np.median(nz_diffs)
            mad_nz_diffs = np.median(np.abs(nz_diffs - med_nz_diffs))
            if mean_nz_diffs / float(med_nz_diffs) < 3:
                # Relatively small variation, keep (almost) everything
                thresh = mean_nz_diffs + 3 * std_nz_diffs
            else:
                # Large variation, filter more agressively. Use median and
                # median absolute deviation (MAD) as they are less sensitive to
                # outliers. However, allow differences < 500 scanlines as they
                # occur quite often.
                thresh = max(500, med_nz_diffs + 3 * mad_nz_diffs)
        self.scans = self.scans[diffs <= thresh]

        LOG.debug('Removed %s scanline(s) with corrupt scanline numbers',
                  str(len(along_track) - len(self.scans)))

        results.update({
            'n_corr': self.scans['scan_line_number'],
            'within_range': within_range,
            'diffs': diffs,
            'thresh': thresh,
            'nz_diffs': nz_diffs
        })
        return results

    def correct_times_thresh(self,
                             max_diff_from_t0_head=6 * 60 * 1000,
                             min_frac_near_t0_head=0.01,
                             max_diff_from_ideal_t=10 * 1000):
        """Correct corrupted timestamps using a threshold approach.

        The threshold approach is based on the scanline number and the header
        timestamp. It also works if the majority of scanlines has corrupted
        timestamps.

        The header timestamp is used as a guideline to estimate the offset
        between timestamps computed from the scanline number and the actual
        scanline timestamps in the data. If header timestamp and scanline
        timestamps do not match, no correction is applied.

        Once the offset has been estimated, one can calculate the ideal
        timestamps based on the scanline number. Timestamps deviating more than
        a certain threshold from the ideal timestamps are replaced by
        the ideal timestamps.

        Example files having corrupt timestamps:
            - NSS.GHRR.NA.D81193.S2329.E0116.B1061214.WI
            - NSS.GHRR.NL.D01035.S2342.E0135.B0192627.WI

        Args:
            max_diff_from_t0_head (int): Threshold for offset estimation: A
                scanline timestamp matches the header timestamp t0_head if it is
                within the interval

                    [t0_head - max_diff_from_t0_head,
                    t0_head + max_diff_from_t0_head]

                around the header timestamp.
            min_frac_near_t0_head (float): Specifies the minimum fraction of
                scanline timestamps matching the header timestamp required for
                applying the correction.
            max_diff_from_ideal_t (float): Threshold for timestamp correction:
                If a scanline timestamp deviates more than max_diff_from_ideal_t
                from the ideal timestamp, it is regarded as corrupt and will be
                replaced with the ideal timestamp.
        Returns:
            Intermediate and final results (for plotting purpose)
        """
        results = {}
        dt64_msec = ">M8[ms]"

        # Check whether scanline number increases monotonically
        n = self.scans["scan_line_number"]
        results.update({'t': self.utcs.copy(), 'n': n})
        if np.any(np.diff(n) < 0):
            LOG.error("Cannot perform timestamp correction. Scanline number "
                      "does not increase monotonically.")
            results['fail_reason'] = "Scanline number jumps backwards"
            return results

        # Convert time to milliseconds since 1970-01-01
        t = self.utcs.astype("i8")
        try:
            t0_head = np.array([self.get_header_timestamp().isoformat()],
                               dtype="datetime64[ms]").astype("i8")[0]
        except ValueError as err:
            LOG.error("Cannot perform timestamp correction: %s", err)
            return

        # Compute ideal timestamps based on the scanline number. Still
        # without offset, i.e. scanline 0 has timestamp 1970-01-01 00:00
        tn = self.lineno2msec(self.scans["scan_line_number"])

        # Try to determine the timestamp t0 of the first scanline. Since none
        # of the actual timestamps is trustworthy, use the header timestamp
        # as a guideline. However, the header timestamp may also be corrupted,
        # so we only apply corrections if there is a minimum fraction of
        # scanlines whose timestamps match the header timestamp.
        #
        # 1) Compute offsets between actual timestamps and idealized timestamps
        offsets = t - tn

        # 2) If the offsets of a certain minimum fraction of scanlines are
        #    within a certain interval around the header timestamp, estimate
        #    t0 by calculating the median offset among these timestamps. If not,
        #    we do not have reliable information and cannot proceed.
        near_t0_head = np.where(
            np.fabs(offsets - t0_head) <= max_diff_from_t0_head)[0]
        results.update({
            'offsets': offsets,
            't0_head': t0_head,
            'max_diff_from_t0_head': max_diff_from_t0_head
        })
        if near_t0_head.size / float(n.size) >= min_frac_near_t0_head:
            t0 = np.median(offsets[near_t0_head])
        else:
            LOG.error("Timestamp mismatch. Cannot perform correction.")
            results['fail_reason'] = "Timestamp mismatch"
            return results

        # Add estimated offset to the ideal timestamps
        tn += t0

        # Replace timestamps deviating more than a certain threshold from the
        # ideal timestamp with the ideal timestamp.
        corrupt_lines = np.where(np.fabs(t - tn) > max_diff_from_ideal_t)
        self.utcs[corrupt_lines] = tn[corrupt_lines].astype(dt64_msec)
        LOG.debug("Corrected %s timestamp(s)", str(len(corrupt_lines[0])))

        results.update({'tn': tn, 'tcorr': self.utcs, 't0': t0})
        return results

    @abstractproperty
    def tsm_affected_intervals(self):
        """Specifies time intervals being affected by the scan motor problem.

        Returns:
            dict: Affected time intervals. A dictionary containing a list of
                (start, end) tuples for each affected platform. Both start and
                end must be datetime.datetime objects.
        """
        raise NotImplementedError

    def is_tsm_affected(self):
        """Determine whether this orbit is affected by the scan motor problem.

        Returns:
            bool: True if the orbit is affected, False otherwise.
        """
        self.get_times()
        ts = self.times[0]
        te = self.times[-1]
        try:
            for interval in self.tsm_affected_intervals[self.spacecraft_id]:
                if ts >= interval[0] and te <= interval[1]:
                    # Found a matching interval
                    return True

            # No matching interval, orbit is not affected
            return False
        except KeyError:
            # Platform is not affected at all
            return False

    def get_midnight_scanline(self):
        """Find the scanline where the UTC date increases by one day.

        Returns:
            int: The midnight scanline if it exists and is unique.
                 None, else.
        """
        self.get_times()
        d0 = np.datetime64(datetime.date(1970, 1, 1), 'D')
        days = (self.utcs.astype('datetime64[D]') - d0).astype(int)
        incr = np.where(np.diff(days) == 1)[0]
        if len(incr) != 1:
            if len(incr) > 1:
                LOG.warning('Unable to determine midnight scanline: '
                            'UTC date increases more than once. ')
            return None
        else:
            return incr[0]

    def get_miss_lines(self):
        """Find missing scanlines, i.e. scanlines which were dropped for some
        reason or were never recorded.

        Returns:
            Indices of missing scanlines
        """
        # Compare scanline number against the ideal case (1, 2, 3, ...) and
        # find the missing line numbers.
        ideal = set(range(1, self.scans['scan_line_number'][-1] + 1))
        missing = sorted(ideal.difference(set(self.scans['scan_line_number'])))
        return np.array(missing, dtype=int)

    def mask_tsm_pixels(self, channels):
        """Mask pixels affected by the scan motor issue."""
        idx = self.get_tsm_pixels(channels)
        channels[idx] = np.nan

    @abstractmethod
    def get_tsm_pixels(self, channels):
        """Determine pixels affected by the scan motor issue.

        Channel selection is POD/KLM specific.
        """
        raise NotImplementedError
class OperatorScopeType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
    """Scope at which the operator will be installed.
    """

    CLUSTER = "cluster"
    NAMESPACE = "namespace"
Exemplo n.º 44
0
class Configuration(six.with_metaclass(TypeWithDefault, object)):
    """NOTE: This class is auto generated by the swagger code generator program.

    Ref: https://github.com/swagger-api/swagger-codegen
    Do not edit the class manually.
    """
    def __init__(self):
        """Constructor"""
        # Default Base url
        self.host = "https://app.ercdex.com/api/v2"
        # Temp file folder for downloading files
        self.temp_folder_path = None

        # Authentication Settings
        # dict to store API key(s)
        self.api_key = {}
        # dict to store API prefix (e.g. Bearer)
        self.api_key_prefix = {}
        # Username for HTTP basic authentication
        self.username = ""
        # Password for HTTP basic authentication
        self.password = ""

        # Logging Settings
        self.logger = {}
        self.logger["package_logger"] = logging.getLogger("erc_dex")
        self.logger["urllib3_logger"] = logging.getLogger("urllib3")
        # Log format
        self.logger_format = '%(asctime)s %(levelname)s %(message)s'
        # Log stream handler
        self.logger_stream_handler = None
        # Log file handler
        self.logger_file_handler = None
        # Debug file location
        self.logger_file = None
        # Debug switch
        self.debug = False

        # SSL/TLS verification
        # Set this to false to skip verifying SSL certificate when calling API
        # from https server.
        self.verify_ssl = True
        # Set this to customize the certificate file to verify the peer.
        self.ssl_ca_cert = None
        # client certificate file
        self.cert_file = None
        # client key file
        self.key_file = None
        # Set this to True/False to enable/disable SSL hostname verification.
        self.assert_hostname = None

        # urllib3 connection pool's maximum number of connections saved
        # per pool. urllib3 uses 1 connection as default value, but this is
        # not the best value when you are making a lot of possibly parallel
        # requests to the same host, which is often the case here.
        # cpu_count * 5 is used as default value to increase performance.
        self.connection_pool_maxsize = multiprocessing.cpu_count() * 5

        # Proxy URL
        self.proxy = None
        # Safe chars for path_param
        self.safe_chars_for_path_param = ''

    @property
    def logger_file(self):
        """The logger file.

        If the logger_file is None, then add stream handler and remove file
        handler. Otherwise, add file handler and remove stream handler.

        :param value: The logger_file path.
        :type: str
        """
        return self.__logger_file

    @logger_file.setter
    def logger_file(self, value):
        """The logger file.

        If the logger_file is None, then add stream handler and remove file
        handler. Otherwise, add file handler and remove stream handler.

        :param value: The logger_file path.
        :type: str
        """
        self.__logger_file = value
        if self.__logger_file:
            # If set logging file,
            # then add file handler and remove stream handler.
            self.logger_file_handler = logging.FileHandler(self.__logger_file)
            self.logger_file_handler.setFormatter(self.logger_formatter)
            for _, logger in six.iteritems(self.logger):
                logger.addHandler(self.logger_file_handler)
                if self.logger_stream_handler:
                    logger.removeHandler(self.logger_stream_handler)
        else:
            # If not set logging file,
            # then add stream handler and remove file handler.
            self.logger_stream_handler = logging.StreamHandler()
            self.logger_stream_handler.setFormatter(self.logger_formatter)
            for _, logger in six.iteritems(self.logger):
                logger.addHandler(self.logger_stream_handler)
                if self.logger_file_handler:
                    logger.removeHandler(self.logger_file_handler)

    @property
    def debug(self):
        """Debug status

        :param value: The debug status, True or False.
        :type: bool
        """
        return self.__debug

    @debug.setter
    def debug(self, value):
        """Debug status

        :param value: The debug status, True or False.
        :type: bool
        """
        self.__debug = value
        if self.__debug:
            # if debug status is True, turn on debug logging
            for _, logger in six.iteritems(self.logger):
                logger.setLevel(logging.DEBUG)
            # turn on httplib debug
            httplib.HTTPConnection.debuglevel = 1
        else:
            # if debug status is False, turn off debug logging,
            # setting log level to default `logging.WARNING`
            for _, logger in six.iteritems(self.logger):
                logger.setLevel(logging.WARNING)
            # turn off httplib debug
            httplib.HTTPConnection.debuglevel = 0

    @property
    def logger_format(self):
        """The logger format.

        The logger_formatter will be updated when sets logger_format.

        :param value: The format string.
        :type: str
        """
        return self.__logger_format

    @logger_format.setter
    def logger_format(self, value):
        """The logger format.

        The logger_formatter will be updated when sets logger_format.

        :param value: The format string.
        :type: str
        """
        self.__logger_format = value
        self.logger_formatter = logging.Formatter(self.__logger_format)

    def get_api_key_with_prefix(self, identifier):
        """Gets API key (with prefix if set).

        :param identifier: The identifier of apiKey.
        :return: The token for api key authentication.
        """
        if (self.api_key.get(identifier)
                and self.api_key_prefix.get(identifier)):
            return self.api_key_prefix[identifier] + ' ' + self.api_key[
                identifier]  # noqa: E501
        elif self.api_key.get(identifier):
            return self.api_key[identifier]

    def get_basic_auth_token(self):
        """Gets HTTP basic authentication header (string).

        :return: The token for basic HTTP authentication.
        """
        return urllib3.util.make_headers(basic_auth=self.username + ':' +
                                         self.password).get('authorization')

    def auth_settings(self):
        """Gets Auth Settings dict for api client.

        :return: The Auth Settings information dict.
        """
        return {}

    def to_debug_report(self):
        """Gets the essential information for debugging.

        :return: The report for debugging.
        """
        return "Python SDK Debug Report:\n"\
               "OS: {env}\n"\
               "Python Version: {pyversion}\n"\
               "Version of the API: 0.0.1-alpha\n"\
               "SDK Package Version: 0.2.0".\
               format(env=sys.platform, pyversion=sys.version)
class OperatorType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
    """Type of the operator
    """

    FLUX = "Flux"
Exemplo n.º 46
0
class EncryptionScopeState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
    """The state of the encryption scope. Possible values (case-insensitive):  Enabled, Disabled.
    """

    ENABLED = "Enabled"
    DISABLED = "Disabled"
Exemplo n.º 47
0
 def test_case(self, predicate=None):
     predicate = predicate or self.is_test_method
     return six.with_metaclass(auto_decorate(self.use_cassette, predicate))
Exemplo n.º 48
0
class BaseErrorCode(six.with_metaclass(ErrorCodeMetaClass)):
    CODE_MESSAGE_MAP = NotImplemented