Ejemplo n.º 1
0
 def register_type_replacement(cls, object_type, replacement_function):
     def copyreg_function(obj):
         return replacement_function(obj).__reduce__()
     try:
         copyreg.pickle(object_type, copyreg_function)
     except TypeError:
         pass
Ejemplo n.º 2
0
 def test_dynamic_class(self):
     a = create_dynamic_class("my_dynamic_class", (object,))
     copyreg.pickle(pickling_metaclass, pickling_metaclass.__reduce__)
     for proto in protocols:
         s = self.dumps(a, proto)
         b = self.loads(s)
         self.assertEqual(a, b)
Ejemplo n.º 3
0
    def run(self, cell_model, param_values, sim=None, isolate=None):
        """Instantiate protocol"""

        if isolate is None:
            isolate = True

        if isolate:
            def _reduce_method(meth):
                """Overwrite reduce"""
                return (getattr, (meth.__self__, meth.__func__.__name__))

            import copyreg
            import types
            copyreg.pickle(types.MethodType, _reduce_method)

            import multiprocessing

            pool = multiprocessing.Pool(1, maxtasksperchild=1)
            responses = pool.apply(
                self._run_func,
                kwds={
                    'cell_model': cell_model,
                    'param_values': param_values,
                    'sim': sim})

            pool.terminate()
            pool.join()
            del pool
        else:
            responses = self._run_func(
                cell_model=cell_model,
                param_values=param_values,
                sim=sim)

        return responses
Ejemplo n.º 4
0
 def _register_identifiers_pickling(self):
     """
     Register identifiers pickling.
     """
     TextBackend.__build_identifier = self.build_identifier
     for cls in (TextPerson, TextOccupant, TextRoom):
         copyreg.pickle(cls, TextBackend._pickle_identifier, TextBackend._unpickle_identifier)
Ejemplo n.º 5
0
 def Flags(cls):
     if cls.__flags_class__ is not None:
         return cls.__flags_class__
     name = cls.__name__ + 'Flags'
     flags_class = type(name, (cls.IntEnumFlags,), {})
     flags_class.__enum_class__ = cls
     cls.__flags_class__ = flags_class
     copy_reg.pickle(flags_class, _int_enum_flags_pickler)
     return flags_class
Ejemplo n.º 6
0
Archivo: util.py Proyecto: irmen/Pyro4
    def register_type_replacement(cls, object_type, replacement_function):
        def copyreg_function(obj):
            return replacement_function(obj).__reduce__()

        if object_type is type or not inspect.isclass(object_type):
            raise ValueError("refusing to register replacement for a non-type or the type 'type' itself")
        try:
            copyreg.pickle(object_type, copyreg_function)
        except TypeError:
            pass
Ejemplo n.º 7
0
    def _register_identifiers_pickling(self):
        """
        Register identifiers pickling.

        As Slack needs live objects in its identifiers, we need to override their pickling behavior.
        But for the unpickling to work we need to use bot.build_identifier, hence the bot parameter here.
        But then we also need bot for the unpickling so we save it here at module level.
        """
        SlackBackend.__build_identifier = self.build_identifier
        for cls in (SlackPerson, SlackRoomOccupant, SlackRoom):
            copyreg.pickle(cls, SlackBackend._pickle_identifier, SlackBackend._unpickle_identifier)
Ejemplo n.º 8
0
def _setup_for_distributed():
    CurrentMPIComm._stack[-1] = MPI.COMM_SELF

    try:
        import copyreg
    except ImportError:  # Python 2
        import copy_reg as copyreg

    copyreg.pickle(MPI.Comm, _comm_pickle, _unpickle)
    copyreg.pickle(MPI.Intracomm, _comm_pickle, _unpickle)

    set_options(dask_chunk_size=1024 * 1024 * 2)
Ejemplo n.º 9
0
def install_cacheops():
    """
    Installs cacheops by numerous monkey patches
    """
    monkey_mix(Manager, ManagerMixin)
    monkey_mix(QuerySet, QuerySetMixin)
    QuerySet._cacheprofile = QuerySetMixin._cacheprofile
    QuerySet._cloning = QuerySetMixin._cloning

    # DateQuerySet existed in Django 1.7 and earlier
    # Values*QuerySet existed in Django 1.8 and earlier
    from django.db.models import query
    for cls_name in ('ValuesQuerySet', 'ValuesListQuerySet', 'DateQuerySet'):
        if hasattr(query, cls_name):
            cls = getattr(query, cls_name)
            monkey_mix(cls, QuerySetMixin, ['iterator'])

    try:
        # Use app registry in Django 1.7
        from django.apps import apps
        admin_used = apps.is_installed('django.contrib.admin')
        get_models = apps.get_models
    except ImportError:
        # Introspect INSTALLED_APPS in older djangos
        from django.conf import settings
        admin_used = 'django.contrib.admin' in settings.INSTALLED_APPS
        from django.db.models import get_models

    # Install profile and signal handlers for any earlier created models
    for model in get_models(include_auto_created=True):
        model._default_manager._install_cacheops(model)

    # Turn off caching in admin
    if admin_used:
        from django.contrib.admin.options import ModelAdmin

        # Renamed queryset to get_queryset in Django 1.6
        method_name = 'get_queryset' if hasattr(ModelAdmin, 'get_queryset') else 'queryset'

        @monkey(ModelAdmin, name=method_name)
        def get_queryset(self, request):
            return get_queryset.original(self, request).nocache()

    # Bind m2m changed handler
    m2m_changed.connect(invalidate_m2m)

    # Make buffers/memoryviews pickleable to serialize binary field data
    if six.PY2:
        import copy_reg
        copy_reg.pickle(buffer, lambda b: (buffer, (bytes(b),)))
    if six.PY3:
        import copyreg
        copyreg.pickle(memoryview, lambda b: (memoryview, (bytes(b),)))
Ejemplo n.º 10
0
 def test_copy_registry(self):
     class C(object):
         def __new__(cls, foo):
             obj = object.__new__(cls)
             obj.foo = foo
             return obj
     def pickle_C(obj):
         return (C, (obj.foo,))
     x = C(42)
     self.assertRaises(TypeError, copy.copy, x)
     copyreg.pickle(C, pickle_C, C)
     y = copy.copy(x)
Ejemplo n.º 11
0
def install_cacheops():
    """
    Installs cacheops by numerous monkey patches
    """
    monkey_mix(Manager, ManagerMixin)
    monkey_mix(QuerySet, QuerySetMixin)

    # Use app registry to introspect used apps
    from django.apps import apps

    # Install profile and signal handlers for any earlier created models
    for model in apps.get_models(include_auto_created=True):
        if family_has_profile(model):
            if not isinstance(model._default_manager, Manager):
                raise ImproperlyConfigured("Can't install cacheops for %s.%s model:"
                                           " non-django model class or manager is used."
                                            % (model._meta.app_label, model._meta.model_name))
            model._default_manager._install_cacheops(model)

            # Bind m2m changed handlers
            rel_attr = 'remote_field' if django.VERSION >= (1, 9) else 'rel'
            m2ms = (f for f in model._meta.get_fields(include_hidden=True) if f.many_to_many)
            for m2m in m2ms:
                rel = m2m if hasattr(m2m, 'through') else getattr(m2m, rel_attr, m2m)
                opts = rel.through._meta
                m2m_changed.connect(invalidate_m2m, sender=rel.through,
                                    dispatch_uid=(opts.app_label, opts.model_name))

    # Turn off caching in admin
    if apps.is_installed('django.contrib.admin'):
        from django.contrib.admin.options import ModelAdmin

        @monkey(ModelAdmin)
        def get_queryset(self, request):
            return get_queryset.original(self, request).nocache()

    # Make buffers/memoryviews pickleable to serialize binary field data
    if six.PY2:
        import copy_reg
        copy_reg.pickle(buffer, lambda b: (buffer, (bytes(b),)))  # noqa
    if six.PY3:
        import copyreg
        copyreg.pickle(memoryview, lambda b: (memoryview, (bytes(b),)))

    # Fix random ordered dict keys producing different SQL for same QuerySet
    if (3, 3) <= sys.version_info < (3, 6):
        from django.db.models.query_utils import Q

        def Q__init__(self, *args, **kwargs):  # noqa
            super(Q, self).__init__(children=list(args) + list(sorted(kwargs.items())))
        Q.__init__ = Q__init__
Ejemplo n.º 12
0
def copy_reg_pickle():
    # python 2 and 3 support
    # first try to import copyreg (python 3)
    # if the import fail we import copy_reg (python 2)
    try:
        import copyreg
    except ImportError:
        import copy_reg as copyreg

    copyreg.pickle(MotionVecd, motionvecPickle)
    copyreg.pickle(ForceVecd, forcevecPickle)
    copyreg.pickle(PTransformd, ptransformPickle)
    copyreg.pickle(RBInertiad, rbinertiaPickle)
    copyreg.pickle(ABInertiad, abinertiaPickle)
Ejemplo n.º 13
0
    def save_game(self):
        self.levels[self.level_i] = self.pack_level()
        save = {
                "levels": self.levels,
                "level_i": self.level_i,
                "p": self.p
            }
        fn = os.path.join(dirs.get_save_dir("cosarara", "pokerl"),
                          "save.pickle")
        import copyreg
        from types import FunctionType

        copyreg.pickle(FunctionType, lambda x : (str, ("STUB",)))
        copyreg.pickle(type(self.stdscr), lambda x : (str, ("STUB",)))
        with open(fn, "wb") as f:
            pickle.dump(save, f)
Ejemplo n.º 14
0
def InitMessage(descriptor, cls):
  cls._decoders_by_tag = {}
  cls._extensions_by_name = {}
  cls._extensions_by_number = {}
  if (descriptor.has_options and
      descriptor.GetOptions().message_set_wire_format):
    cls._decoders_by_tag[decoder.MESSAGE_SET_ITEM_TAG] = (
        decoder.MessageSetItemDecoder(cls._extensions_by_number), None)


  for field in descriptor.fields:
    _AttachFieldHelpers(cls, field)

  _AddEnumValues(descriptor, cls)
  _AddInitMethod(descriptor, cls)
  _AddPropertiesForFields(descriptor, cls)
  _AddPropertiesForExtensions(descriptor, cls)
  _AddStaticMethods(cls)
  _AddMessageMethods(descriptor, cls)
  _AddPrivateHelperMethods(descriptor, cls)
  copyreg.pickle(cls, lambda obj: (cls, (), obj.__getstate__()))
Ejemplo n.º 15
0
def copy_reg_pickle():
  # python 2 and 3 support
  # first try to import copyreg (python 3)
  # if the import fail we import copy_reg (python 2)
  try:
    import copyreg
  except ImportError:
    import copy_reg as copyreg

  # register sva pickle needed by some rbdyn type
  import spacevecalg as sva
  sva.copy_reg_pickle()

  copyreg.pickle(Body, bodyPickle)
  copyreg.pickle(Joint, jointPickle)
  copyreg.pickle(MultiBody, multiBodyPickle)
Ejemplo n.º 16
0
""" Allow pickling an lsst.afw.image.VisitInfo"""
import copyreg
from lsst.afw.image import VisitInfo
from lsst.afw.coord import Observatory, Weather


def pickleVisitInfo(info):
    return (VisitInfo,
            tuple(getattr(info, "get" + prop)() for
                  prop in ("ExposureId", "ExposureTime", "DarkTime", "Date", "Ut1", "Era", "BoresightRaDec",
                           "BoresightAzAlt", "BoresightAirmass", "BoresightRotAngle", "RotType",
                           "Observatory", "Weather")))


def pickleObservatory(obs):
    return (Observatory, (obs.getLongitude(), obs.getLatitude(), obs.getElevation()))


def pickleWeather(weather):
    return (Weather, (weather.getAirTemperature(), weather.getAirPressure(), weather.getHumidity()))


copyreg.pickle(VisitInfo, pickleVisitInfo)
copyreg.pickle(Observatory, pickleObservatory)
copyreg.pickle(Weather, pickleWeather)
Ejemplo n.º 17
0
# keep namespace clean
__version__ = version
__author__  = author
__license__ = license
del version, author, license


# support functions for pickling and unpickling
def __createPolygon(contour, hole):
    """rebuild Polygon from pickled data"""
    p = Polygon()
    for c, h in zip(contour, hole):
        p.addContour(c, h)
    return p


def __tuples(a):
    """map an array or list of lists to a tuple of tuples"""
    return tuple(tuple(i) for i in a)


def __reducePolygon(p):
    """return pickle data for Polygon """
    return (__createPolygon, (tuple([__tuples(x) for x in p]), p.isHole()))


import copyreg
copyreg.constructor(__createPolygon)
copyreg.pickle(Polygon, __reducePolygon, __createPolygon)
del copyreg
Ejemplo n.º 18
0
    def __coerce__(self, other):
        while isinstance(other, SafeStringWrapper):
            other = other.unsanitized
        return coerce(self.unsanitized, other)

    def __enter__(self):
        return self.unsanitized.__enter__()

    def __exit__(self, *args):
        return self.unsanitized.__exit__(*args)


class CallableSafeStringWrapper(SafeStringWrapper):

    def __call__(self, *args, **kwds):
        return self.__safe_string_wrapper_function__(self.unsanitized(*args, **kwds))


# Enable pickling/deepcopy
def pickle_SafeStringWrapper(safe_object):
    args = (safe_object.unsanitized, )
    cls = SafeStringWrapper
    if isinstance(safe_object, CallableSafeStringWrapper):
        cls = CallableSafeStringWrapper
    return (cls, args)


copyreg.pickle(SafeStringWrapper, pickle_SafeStringWrapper, wrap_with_safe_string)
copyreg.pickle(CallableSafeStringWrapper, pickle_SafeStringWrapper, wrap_with_safe_string)
Ejemplo n.º 19
0
    template = _compile_repl(template, pattern)
    if not template[0] and len(template[1]) == 1:
        # literal replacement
        return template[1][0]
    def filter(match, template=template):
        return sre_parse.expand_template(template, match)
    return filter

# register myself for pickling

import copyreg

def _pickle(p):
    return _compile, (p.pattern, p.flags)

copyreg.pickle(Pattern, _pickle, _compile)

# --------------------------------------------------------------------
# experimental stuff (see python-dev discussions for details)

class Scanner:
    def __init__(self, lexicon, flags=0):
        from sre_constants import BRANCH, SUBPATTERN
        if isinstance(flags, RegexFlag):
            flags = flags.value
        self.lexicon = lexicon
        # combine phrases into a compound pattern
        p = []
        s = sre_parse.Pattern()
        s.flags = flags
        for phrase, action in lexicon:
Ejemplo n.º 20
0
    template = _compile_repl(template, pattern)
    if not template[0] and len(template[1]) == 1:
        # literal replacement
        return template[1][0]
    def filter(match, template=template):
        return sre_parse.expand_template(template, match)
    return filter

# register myself for pickling

import copyreg

def _pickle(p):
    return _compile, (p.pattern, p.flags)

copyreg.pickle(_pattern_type, _pickle, _compile)

# --------------------------------------------------------------------
# experimental stuff (see python-dev discussions for details)

class Scanner:
    def __init__(self, lexicon, flags=0):
        from sre_constants import BRANCH, SUBPATTERN
        self.lexicon = lexicon
        # combine phrases into a compound pattern
        p = []
        s = sre_parse.Pattern()
        s.flags = flags
        for phrase, action in lexicon:
            p.append(sre_parse.SubPattern(s, [
                (SUBPATTERN, (len(p)+1, sre_parse.parse(phrase, flags))),
Ejemplo n.º 21
0
                                     nu,
                                     tf,
                                     dt=dt_adj,
                                     rho=rho,
                                     theta=theta,
                                     m=m,
                                     num_pops=num_pops,
                                     selfing=selfing,
                                     frozen=frozen)


# Allow LDstats objects to be pickled.
try:
    import copy_reg
except:
    import copyreg


def LDstats_pickler(y):
    return LDstats_unpickler, (y, y.num_pops, y.pop_ids)


def LDstats_unpickler(data, num_pops, pop_ids):
    return LDstats(data, num_pops=num_pops, pop_ids=pop_ids)


try:
    copy_reg.pickle(LDstats, LDstats_pickler, LDstats_unpickler)
except:
    copyreg.pickle(LDstats, LDstats_pickler, LDstats_unpickler)
Ejemplo n.º 22
0
import cloudpickle

from dask import config
from dask.local import MultiprocessingPoolExecutor, get_async, reraise
from dask.optimization import cull, fuse
from dask.system import CPU_COUNT
from dask.utils import ensure_dict


def _reduce_method_descriptor(m):
    return getattr, (m.__objclass__, m.__name__)


# type(set.union) is used as a proxy to <class 'method_descriptor'>
copyreg.pickle(type(set.union), _reduce_method_descriptor)

_dumps = partial(cloudpickle.dumps, protocol=pickle.HIGHEST_PROTOCOL)
_loads = cloudpickle.loads


def _process_get_id():
    return multiprocessing.current_process().ident


# -- Remote Exception Handling --
# By default, tracebacks can't be serialized using pickle. However, the
# `tblib` library can enable support for this. Since we don't mandate
# that tblib is installed, we do the following:
#
# - If tblib is installed, use it to serialize the traceback and reraise
Ejemplo n.º 23
0
    return co

def pickle_code(co):
    assert isinstance(co, types.CodeType)
    ms = marshal.dumps(co)
    return unpickle_code, (ms,)

# XXX KBK 24Aug02 function pickling capability not used in Idle
#  def unpickle_function(ms):
#      return ms

#  def pickle_function(fn):
#      assert isinstance(fn, type.FunctionType)
#      return repr(fn)

copyreg.pickle(types.CodeType, pickle_code, unpickle_code)
# copyreg.pickle(types.FunctionType, pickle_function, unpickle_function)

BUFSIZE = 8*1024
LOCALHOST = '127.0.0.1'

class RPCServer(socketserver.TCPServer):

    def __init__(self, addr, handlerclass=None):
        if handlerclass is None:
            handlerclass = RPCHandler
        socketserver.TCPServer.__init__(self, addr, handlerclass)

    def server_bind(self):
        "Override TCPServer method, no bind() phase for connecting entity"
        pass
Ejemplo n.º 24
0
# Register pickling support for layout instances such as
# torch.sparse_coo, etc
def _get_layout(name):
    """Get layout extension object from its string representation.
    """
    cache = _get_layout.cache  # type: ignore[attr-defined]
    if not cache:
        for v in torch.__dict__.values():
            if isinstance(v, torch.layout):
                cache[str(v)] = v
    return cache[name]


# There are yet not good way to type annotate function attributes https://github.com/python/mypy/issues/2087
_get_layout.cache = {}  # type: ignore[attr-defined]
copyreg.pickle(torch.layout, lambda obj: (_get_layout, (str(obj), )))


def _legacy_load(f, map_location, pickle_module, **pickle_load_args):
    deserialized_objects: Dict[int, Any] = {}

    restore_location = _get_restore_location(map_location)

    def _check_container_source(container_type, source_file, original_source):
        try:
            current_source = ''.join(
                get_source_lines_and_file(container_type)[0])
        except Exception:  # saving the source is optional, so we can ignore any errors
            warnings.warn("Couldn't retrieve source code for container of "
                          "type " + container_type.__name__ +
                          ". It won't be checked "
Ejemplo n.º 25
0
"""

# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.

import sys
import types
try:
    import copyreg  # Py 3
except ImportError:
    import copy_reg as copyreg  # Py 2


def code_ctor(*args):
    return types.CodeType(*args)


def reduce_code(co):
    args = [
        co.co_argcount, co.co_nlocals, co.co_stacksize, co.co_flags,
        co.co_code, co.co_consts, co.co_names, co.co_varnames, co.co_filename,
        co.co_name, co.co_firstlineno, co.co_lnotab, co.co_freevars,
        co.co_cellvars
    ]
    if sys.version_info[0] >= 3:
        args.insert(1, co.co_kwonlyargcount)
    return code_ctor, tuple(args)


copyreg.pickle(types.CodeType, reduce_code)
Ejemplo n.º 26
0
def PopVars(l):
   for e in l:
      skill.variables[e] = skill.varstack[e][-1]
      skill.varstack[e] = skill.varstack[e][:-1]

def unpickle_code(ms):
    co = marshal.loads(ms)
    assert isinstance(co, types.CodeType)
    return co

def pickle_code(co):
    assert isinstance(co, types.CodeType)
    ms = marshal.dumps(co)
    return unpickle_code, (ms,)

copyreg.pickle(types.CodeType, pickle_code, unpickle_code)

import runtime as runtime

cell_lib = {}
def layout(cell,extra_params=None):
   context.push()
   loadcell(cell)
   global pcell_updates
   for name,value in pcell_updates:
      context.bag[name]['value'] = value
      run(context.props['cbs'][name])
   pcell_updates = []

   #Must be called after pcell updates
   apply_params()
Ejemplo n.º 27
0
class DynamicClasscallMetaclass(DynamicMetaclass, ClasscallMetaclass):
    pass


class DynamicInheritComparisonMetaclass(DynamicMetaclass, InheritComparisonMetaclass):
    pass


class DynamicInheritComparisonClasscallMetaclass(DynamicMetaclass, InheritComparisonClasscallMetaclass):
    pass


# This registers the appropriate reduction methods (see Trac #5985)
for M in [DynamicMetaclass,
          DynamicClasscallMetaclass,
          DynamicInheritComparisonMetaclass,
          DynamicInheritComparisonClasscallMetaclass]:
    copyreg.pickle(M, M.__reduce__)


class TestClass:
    """
    A class used for checking that introspection works
    """
    def bla():
        """
        bla ...
        """
        pass
Ejemplo n.º 28
0
import os
import copyreg
import warnings
import types
import pandas as pd
import numpy as np
import iris
import meteocalc
from skewt import SkewT as sk
from StormScriptsPy3.Pfuncts import *
import StormScriptsPy3.Pfuncts as Pf

# Over ride pickle GIL (Allows the parallelization)
if not sys.warnoptions:
    warnings.simplefilter("ignore")
copyreg.pickle(types.MethodType, Pf._pickle_method)


# performance relaced functions: Slicer allows vectorized contraints
def slicer(ps, val):
    smslice = iris.Constraint(pressure=lambda cell: cell == ps)
    return val.extract(smslice).data


# dataise and no data allow for zipped loop
def dataise(var):
    return var.data


def nodata(var):
    return var
Ejemplo n.º 29
0
    template = _compile_repl(template, pattern)
    if not template[0] and len(template[1]) == 1:
        # literal replacement
        return template[1][0]
    def filter(match, template=template):
        return sre_parse.expand_template(template, match)
    return filter

# register myself for pickling

import copyreg

def _pickle(p):
    return _compile, (p.pattern, p.flags)

copyreg.pickle(_pattern_type, _pickle, _compile)

# --------------------------------------------------------------------
# experimental stuff (see python-dev discussions for details)

class Scanner:
    def __init__(self, lexicon, flags=0):
        from sre_constants import BRANCH, SUBPATTERN
        self.lexicon = lexicon
        # combine phrases into a compound pattern
        p = []
        s = sre_parse.Pattern()
        s.flags = flags
        for phrase, action in lexicon:
            gid = s.opengroup()
            p.append(sre_parse.SubPattern(s, [
Ejemplo n.º 30
0

ForkingPickler.register(array.array, reduce_array)

view_types = [
    type(getattr({}, name)()) for name in ('items', 'keys', 'values')
]
if view_types[0] is not list:  # only needed in Py3.0

    def rebuild_as_list(obj):
        return list, (list(obj), )

    for view_type in view_types:
        ForkingPickler.register(view_type, rebuild_as_list)
        import copyreg
        copyreg.pickle(view_type, rebuild_as_list)

#
# Type for identifying shared objects
#


class Token(object):
    '''
    Type to uniquely indentify a shared object
    '''
    __slots__ = ('typeid', 'address', 'id')

    def __init__(self, typeid, address, id):
        (self.typeid, self.address, self.id) = (typeid, address, id)
Ejemplo n.º 31
0

# Example 10
def pickle_game_state(game_state):
    kwargs = game_state.__dict__
    return unpickle_game_state, (kwargs,)


# Example 11
def unpickle_game_state(kwargs):
    return GameState(**kwargs)


# Example 12
import copyreg
copyreg.pickle(GameState, pickle_game_state)


# Example 13
state = GameState()
state.points += 1000
serialized = pickle.dumps(state)
state_after = pickle.loads(serialized)
print(state_after.__dict__)


# Example 14
class GameState(object):
    def __init__(self, level=0, lives=4, points=0, magic=5):
        self.level = level
        self.lives = lives
        func_name = '_' + cls_name + func_name
    return _unpickle_method, (func_name, obj, cls)


def _unpickle_method(func_name, obj, cls):
    for cls in cls.__mro__:
        try:
            func = cls.__dict__[func_name]
        except KeyError:
            pass
        else:
            break
    return func.__get__(obj, cls)


copyreg.pickle(types.MethodType, _pickle_method, _unpickle_method)


#===============================================================================
#
#===============================================================================
class Documents(object):
    '''
    classdocs
    '''
    def __init__(self, corenlp=None):
        '''
        Constructor
        '''

        self.corenlp = corenlp
Ejemplo n.º 33
0
#   available for unpickling to work.
def _ufunc_reconstruct(module, name):
    # The `fromlist` kwarg is required to ensure that `mod` points to the
    # inner-most module rather than the parent package when module name is
    # nested. This makes it possible to pickle non-toplevel ufuncs such as
    # scipy.special.expit for instance.
    mod = __import__(module, fromlist=[name])
    return getattr(mod, name)

def _ufunc_reduce(func):
    from pickle import whichmodule
    name = func.__name__
    return _ufunc_reconstruct, (whichmodule(func, name), name)


import sys
if sys.version_info[0] >= 3:
    import copyreg
else:
    import copy_reg as copyreg

copyreg.pickle(ufunc, _ufunc_reduce, _ufunc_reconstruct)
# Unclutter namespace (must keep _ufunc_reconstruct for unpickling)
del copyreg
del sys
del _ufunc_reduce

from numpy._pytesttester import PytestTester
test = PytestTester(__name__)
del PytestTester
Ejemplo n.º 34
0
# make Rects pickleable
if PY_MAJOR_VERSION >= 3:
    import copyreg as copy_reg
else:
    import copy_reg


def __rect_constructor(x, y, w, h):
    return Rect(x, y, w, h)


def __rect_reduce(r):
    assert type(r) == Rect
    return __rect_constructor, (r.x, r.y, r.w, r.h)
copy_reg.pickle(Rect, __rect_reduce, __rect_constructor)


# make Colors pickleable
def __color_constructor(r, g, b, a):
    return Color(r, g, b, a)


def __color_reduce(c):
    assert type(c) == Color
    return __color_constructor, (c.r, c.g, c.b, c.a)
copy_reg.pickle(Color, __color_reduce, __color_constructor)


# cleanup namespace
del pygame, os, sys, surflock, MissingModule, copy_reg, geterror, PY_MAJOR_VERSION
Ejemplo n.º 35
0
def pickle_torch_dtype(torch_dtype: torch.dtype):
    return reconstruct_torch_dtype, (str(torch_dtype), )


if __name__ == "__main__":
    arg = arg_parse()
    #arg.save_dir = "net_0912_absloss_F_HE_pw10_threshold1_erode3"
    arg.save_dir = "nets_1003_absloss_FRE_pw10_erode2"
    arg.save_dir = "%s/outs/%s" % (os.getcwd(), arg.save_dir)
    if os.path.exists(arg.save_dir) is False:
        os.mkdir(arg.save_dir)

    logger = Logger(arg.save_dir)

    copyreg.pickle(torch.dtype, pickle_torch_dtype)

    os.environ["CUDA_VISIBLE_DEVICES"] = arg.gpus
    torch_device = torch.device("cuda")
    #torch_device = torch.device("cpu")

    # manually change paths of training data and test data
    # filename example :
    f_path_train = "/home/jysong/PyCharmProjects_JY/180907_3DcellSegmentation_regressionVer/data/train_"
    f_path_valid = "/home/jysong/PyCharmProjects_JY/180907_3DcellSegmentation_regressionVer/data/valid_"
    #f_path_test = "/home/jysong/PyCharmProjects_JY/180907_3DcellSegmentation_regressionVer/data/test_"
    f_path_test = "/data1/Moosung_CART/For_analysis"
    #f_path_test = "/data1/Moosung_CART/For_analysis/dataset1/exp0_fullsequence"

    preprocess = preprocess.get_preprocess(arg.augment)
Ejemplo n.º 36
0
    def __str__(self):
        return '<Element %s>' % self.name

    def __repr__(self):
        return '<Element %s>' % self.name

# This is for backward compatibility.
def get_by_symbol(symbol):
    """ Get the element with a particular chemical symbol. """
    s = symbol.strip().upper()
    return Element._elements_by_symbol[s]

def _pickle_element(element):
    return (get_by_symbol, (element.symbol,))

copyreg.pickle(Element, _pickle_element)

# NOTE: getElementByMass assumes all masses are Quantity instances with unit
# "daltons". All elements need to obey this assumption, or that method will
# fail. No checking is done in getElementByMass for performance reasons
hydrogen =       Element(  1, "hydrogen", "H", 1.007947*daltons)
deuterium =      Element(  1, "deuterium", "D", 2.01355321270*daltons)
helium =         Element(  2, "helium", "He", 4.003*daltons)
lithium =        Element(  3, "lithium", "Li", 6.9412*daltons)
beryllium =      Element(  4, "beryllium", "Be", 9.0121823*daltons)
boron =          Element(  5, "boron", "B", 10.8117*daltons)
carbon =         Element(  6, "carbon", "C", 12.01078*daltons)
nitrogen =       Element(  7, "nitrogen", "N", 14.00672*daltons)
oxygen =         Element(  8, "oxygen", "O", 15.99943*daltons)
fluorine =       Element(  9, "fluorine", "F", 18.99840325*daltons)
neon =           Element( 10, "neon", "Ne", 20.17976*daltons)
Ejemplo n.º 37
0
                literal.extend(items)
        else:
            literal.append(ord(ch))

    # Flush the literal.
    if literal:
        compiled.append(make_string(literal))

    _replacement_cache[key] = compiled

    return compiled


# We define Pattern here after all the support objects have been defined.
Pattern = type(_compile('', 0, {}))
Match = type(_compile('', 0).match(''))

# We'll define an alias for the 'compile' function so that the repr of a
# pattern object is eval-able.
Regex = compile

# Register myself for pickling.
import copyreg as _copy_reg


def _pickle(pattern):
    return _regex.compile, pattern._pickled_data


_copy_reg.pickle(Pattern, _pickle)
Ejemplo n.º 38
0
    return unpickle_function, (mod_name, qname, self_)


def pickle_state(state):
    return _uarray._BackendState._unpickle, state._pickle()


def pickle_set_backend_context(ctx):
    return _SetBackendContext, ctx._pickle()


def pickle_skip_backend_context(ctx):
    return _SkipBackendContext, ctx._pickle()


copyreg.pickle(_Function, pickle_function)
copyreg.pickle(_uarray._BackendState, pickle_state)
copyreg.pickle(_SetBackendContext, pickle_set_backend_context)
copyreg.pickle(_SkipBackendContext, pickle_skip_backend_context)
atexit.register(_uarray.clear_all_globals)


def get_state():
    """
    Returns an opaque object containing the current state of all the backends.

    Can be used for synchronization between threads/processes.

    See Also
    --------
    set_state
Ejemplo n.º 39
0
    except AttributeError:
        log.msg("Method", im_name, "not on class", im_class)
        assert im_self is not None, "No recourse: no instance to guess from."
        # Attempt a last-ditch fix before giving up. If classes have changed
        # around since we pickled this method, we may still be able to get it
        # by looking on the instance's current class.
        if im_self.__class__ is im_class:
            raise
        return unpickleMethod(im_name, im_self, im_self.__class__)
    else:
        maybeClass = ()
        bound = types.MethodType(methodFunction, im_self, *maybeClass)
        return bound


copy_reg.pickle(types.MethodType, pickleMethod, unpickleMethod)


def _pickleFunction(f):
    """
    Reduce, in the sense of L{pickle}'s C{object.__reduce__} special method, a
    function object into its constituent parts.

    @param f: The function to reduce.
    @type f: L{types.FunctionType}

    @return: a 2-tuple of a reference to L{_unpickleFunction} and a tuple of
        its arguments, a 1-tuple of the function's fully qualified name.
    @rtype: 2-tuple of C{callable, native string}
    """
    if f.__name__ == '<lambda>':
Ejemplo n.º 40
0
# make Rects pickleable
if PY_MAJOR_VERSION >= 3:
    import copyreg as copy_reg
else:
    import copy_reg


def __rect_constructor(x, y, w, h):
    return Rect(x, y, w, h)


def __rect_reduce(r):
    assert type(r) == Rect
    return __rect_constructor, (r.x, r.y, r.w, r.h)
copy_reg.pickle(Rect, __rect_reduce, __rect_constructor)


# make Colors pickleable
def __color_constructor(r, g, b, a):
    return Color(r, g, b, a)


def __color_reduce(c):
    assert type(c) == Color
    return __color_constructor, (c.r, c.g, c.b, c.a)
copy_reg.pickle(Color, __color_reduce, __color_constructor)


# cleanup namespace
del pygame, os, sys, surflock, MissingModule, copy_reg, geterror, PY_MAJOR_VERSION, _import_failed
Ejemplo n.º 41
0
import numpy
from .lmfit.parameter import Parameter as LMFitParameter
import copyreg, types
import platform


# Use copy_reg to allow pickling of bound methods
def make_instancemethod(inst, methodname):
    return getattr(inst, methodname)


def pickle_instancemethod(method):
    return make_instancemethod, (method.__self__, method.__func__.__name__)


copyreg.pickle(pickle_instancemethod, make_instancemethod)


class Parameter(LMFitParameter):
    """ MATK parameter class
    """
    def __init__(self,
                 name,
                 value=None,
                 vary=True,
                 min=None,
                 max=None,
                 expr=None,
                 nominal=None,
                 discrete_vals=[],
                 **kwargs):
Ejemplo n.º 42
0
from pyspark import since
from pyspark.ml import linalg as newlinalg
from pyspark.sql.types import UserDefinedType, StructField, StructType, ArrayType, DoubleType, \
    IntegerType, ByteType, BooleanType

__all__ = [
    'Vector', 'DenseVector', 'SparseVector', 'Vectors', 'Matrix',
    'DenseMatrix', 'SparseMatrix', 'Matrices', 'QRDecomposition'
]

if sys.version_info[:2] == (2, 7):
    # speed up pickling array in Python 2.7
    def fast_pickle_array(ar):
        return array.array, (ar.typecode, ar.tostring())

    copy_reg.pickle(array.array, fast_pickle_array)

# Check whether we have SciPy. MLlib works without it too, but if we have it, some methods,
# such as _dot and _serialize_double_vector, start to support scipy.sparse matrices.

try:
    import scipy.sparse
    _have_scipy = True
except:
    # No SciPy in environment, but that's okay
    _have_scipy = False


def _convert_to_vector(l):
    if isinstance(l, Vector):
        return l
Ejemplo n.º 43
0
import datetime
import torch
import copy
import csv
import copyreg
import shutil
import matplotlib.pyplot as plt
from util import crop_video


def _pickle_keypoints(point):
    return cv2.KeyPoint, (*point.pt, point.size, point.angle, point.response,
                          point.octave, point.class_id)


copyreg.pickle(cv2.KeyPoint().__class__, _pickle_keypoints)

os.environ["CUDA_VISIBLE_DEVICES"] = "0"

h_size = 480
w_size = 640


def crop_metric(M):
    points = np.array([[0, 0, 1], [0, h_size, 1], [w_size, 0, 1],
                       [w_size, h_size, 1]]).T
    result = np.matmul(M, points).T
    result = result[:, :2] / result[:, 2:]
    w_out = 1 - max(result[0, 0], result[1, 0], w_size - result[2, 0],
                    w_size - result[3, 0], 0) / w_size
    h_out = 1 - max(result[0, 1], result[2, 1], h_size - result[1, 1],
Ejemplo n.º 44
0
#
# Register some things for pickling
#

def reduce_array(a):
    return array.array, (a.typecode, a.tobytes())
ForkingPickler.register(array.array, reduce_array)

view_types = [type(getattr({}, name)()) for name in ('items','keys','values')]
if view_types[0] is not list:       # only needed in Py3.0
    def rebuild_as_list(obj):
        return list, (list(obj),)
    for view_type in view_types:
        ForkingPickler.register(view_type, rebuild_as_list)
        import copyreg
        copyreg.pickle(view_type, rebuild_as_list)

#
# Type for identifying shared objects
#

class Token(object):
    '''
    Type to uniquely indentify a shared object
    '''
    __slots__ = ('typeid', 'address', 'id')

    def __init__(self, typeid, address, id):
        (self.typeid, self.address, self.id) = (typeid, address, id)

    def __getstate__(self):
Ejemplo n.º 45
0
we need to automate all of this so that functions themselves can be pickled.

Reference: A. Tremols, P Cogolo, "Python Cookbook," p 302-305
"""

# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.

import sys
import types
try:
    import copyreg  # Py 3
except ImportError:
    import copy_reg as copyreg  # Py 2


def code_ctor(*args):
    return types.CodeType(*args)


def reduce_code(co):
    args = [co.co_argcount, co.co_nlocals, co.co_stacksize,
            co.co_flags, co.co_code, co.co_consts, co.co_names,
            co.co_varnames, co.co_filename, co.co_name, co.co_firstlineno,
            co.co_lnotab, co.co_freevars, co.co_cellvars]
    if sys.version_info[0] >= 3:
        args.insert(1, co.co_kwonlyargcount)
    return code_ctor, tuple(args)

copyreg.pickle(types.CodeType, reduce_code)
Ejemplo n.º 46
0
                else:
                    # String format replace
                    try:
                        obj = m.captures(g_index)
                    except IndexError:  # pragma: no cover
                        raise IndexError("'%d' is out of range!" % g_index)
                    l = _util.format_string(m, obj, capture, self._bytes)
                if span_case is not None:
                    if span_case == _LOWER:
                        l = l.lower()
                    else:
                        l = l.upper()
                if single_case is not None:
                    if single_case == _LOWER:
                        l = l[0:1].lower() + l[1:]
                    else:
                        l = l[0:1].upper() + l[1:]
            text.append(l)

        return sep.join(text)


def _pickle(r):
    """Pickle."""

    return ReplaceTemplate, (r.groups, r.group_slots, r.literals,
                             r.pattern_hash, r.use_format, r._bytes)


_copyreg.pickle(ReplaceTemplate, _pickle)
Ejemplo n.º 47
0
def reduce_method(method):
    '''Reducer for methods.'''
    return (
        getattr,
        (
            
            method.__self__ or method.__self__.__class__,
            # `im_self` for bound methods, `im_class` for unbound methods.
            
            method.__func__.__name__
        
        )
    )

copyreg.pickle(types.MethodType, reduce_method)


###############################################################################


def reduce_module(module):
    '''Reducer for modules.'''
    return (import_tools.normal_import, (module.__name__,))

copyreg.pickle(types.ModuleType, reduce_module)


###############################################################################

Ejemplo n.º 48
0
    def filter(match, template=template):
        return sre_parse.expand_template(template, match)

    return filter


# register myself for pickling

import copyreg


def _pickle(p):
    return _compile, (p.pattern, p.flags)


copyreg.pickle(Pattern, _pickle, _compile)

# --------------------------------------------------------------------
# experimental stuff (see python-dev discussions for details)


class Scanner:
    def __init__(self, lexicon, flags=0):
        from sre_constants import BRANCH, SUBPATTERN
        if isinstance(flags, RegexFlag):
            flags = flags.value
        self.lexicon = lexicon
        # combine phrases into a compound pattern
        p = []
        s = sre_parse.State()
        s.flags = flags
Ejemplo n.º 49
0
    def decode(self, *a, **b):
        return str(self).decode(*a, **b)

    def read(self):
        return str(self)

    def __mod__(self, symbols):
        if self.is_copy:
            return lazyT(self)
        return lazyT(self.m, symbols, self.T, self.f, self.t, self.M)

def pickle_lazyT(c):
    return str, (c.xml(),)

copy_reg.pickle(lazyT, pickle_lazyT)

class translator(object):
    """
    this class is instantiated by gluon.compileapp.build_environment
    as the T object
    ::
        T.force(None) # turns off translation
        T.force('fr, it') # forces web2py to translate using fr.py or it.py

        T(\"Hello World\") # translates \"Hello World\" using the selected file

    notice 1: there is no need to force since, by default, T uses
       http_accept_language to determine a translation file.
    notice 2:
       en and en-en are considered different languages!
Ejemplo n.º 50
0
            else:
                literal.extend(items)
        else:
            literal.append(ord(ch))

    # Flush the literal.
    if literal:
        compiled.append(make_string(literal))

    _replacement_cache[key] = compiled

    return compiled


# We define _pattern_type here after all the support objects have been defined.
_pattern_type = type(_compile("", 0, {}))

# We'll define an alias for the 'compile' function so that the repr of a
# pattern object is eval-able.
Regex = compile

# Register myself for pickling.
import copyreg as _copy_reg


def _pickle(pattern):
    return _regex.compile, pattern._pickled_data


_copy_reg.pickle(_pattern_type, _pickle)
Ejemplo n.º 51
0
#try: import _pygame_fastevent
#except (ImportError,IOError), msg:fastevent=MissingModule("fastevent", msg, 0)

#there's also a couple "internal" modules not needed
#by users, but putting them here helps "dependency finder"
#programs get everything they need (like py2exe)
try: import _pygame_imageext; del _pygame_imageext
except (ImportError,IOError):pass

try: import _pygame_mixer_music; del _pygame_mixer_music
except (ImportError,IOError):pass

def packager_imports():
    """
    Some additional things that py2app/py2exe will want to see
    """
    import OpenGL.GL
'''
#make Rects pickleable
import copyreg
def __rect_constructor(x,y,w,h):
	return Rect(x,y,w,h)
def __rect_reduce(r):
	assert type(r) == Rect
	return __rect_constructor, (r.x, r.y, r.w, r.h)
copyreg.pickle(Rect, __rect_reduce, __rect_constructor)

#cleanup namespace
del os, sys, #TODO rwobject, surflock, MissingModule, copy_reg
Ejemplo n.º 52
0
    if not '!langname!' in sentences:
        sentences['!langname!'] = (DEFAULT_LANGUAGE_NAME if language in (
            'default', DEFAULT_LANGUAGE) else sentences['!langcode!'])
    write_dict(lang_file, sentences)


### important to allow safe session.flash=T(....)


def lazyT_unpickle(data):
    return marshal.loads(data)


def lazyT_pickle(data):
    return lazyT_unpickle, (marshal.dumps(str(data)), )


copy_reg.pickle(lazyT, lazyT_pickle, lazyT_unpickle)


def update_all_languages(application_path):
    path = pjoin(application_path, 'languages/')
    for language in oslistdir(path):
        if regex_langfile.match(language):
            findT(application_path, language[:-3])


if __name__ == '__main__':
    import doctest
    doctest.testmod()
Ejemplo n.º 53
0
            finite_genome=finite_genome,
            u=u,
            v=v,
            alternate_fg=alternate_fg)

        #return self # comment out (returned for testing earlier)


# Allow TLSpectrum objects to be pickled.
# See http://effbot.org/librarybook/copy-reg.htm
try:
    import copy_reg
except:
    import copyreg


def TLSpectrum_pickler(fs):
    # Collect all the info necessary to save the state of a TLSpectrum
    return TLSpectrum_unpickler, (fs.data, fs.mask, fs.folded)


def TLSpectrum_unpickler(data, mask, folded):
    # Use that info to recreate the TLSpectrum
    return TLSpectrum(data, mask, mask_infeasible=False, data_folded=folded)


try:
    copy_reg.pickle(TLSpectrum, TLSpectrum_pickler, TLSpectrum_unpickler)
except:
    copyreg.pickle(TLSpectrum, TLSpectrum_pickler, TLSpectrum_unpickler)
Ejemplo n.º 54
0
#  Here are the loading and unloading functions
# The name numpy.core._ufunc_reconstruct must be
#   available for unpickling to work.
def _ufunc_reconstruct(module, name):
    # The `fromlist` kwarg is required to ensure that `mod` points to the
    # inner-most module rather than the parent package when module name is
    # nested. This makes it possible to pickle non-toplevel ufuncs such as
    # scipy.special.expit for instance.
    mod = __import__(module, fromlist=[name])
    return getattr(mod, name)


def _ufunc_reduce(func):
    from pickle import whichmodule
    name = func.__name__
    return _ufunc_reconstruct, (whichmodule(func, name), name)


import sys

if sys.version_info[0] >= 3:
    import copyreg
else:
    import copy_reg as copyreg

copyreg.pickle(ufunc, _ufunc_reduce, _ufunc_reconstruct)
# Unclutter namespace (must keep _ufunc_reconstruct for unpickling)
del copyreg
del sys
del _ufunc_reduce
Ejemplo n.º 55
0
        """Equal."""

        return (
            not isinstance(other, WcRegexp) or
            self._include != other._include or
            self._exclude != other._exclude or
            self._real != other._real or
            self._path != other._path or
            self._follow != other._follow
        )

    def match(self, filename, root_dir=None):
        """Match filename."""

        return _Match(
            filename,
            self._include,
            self._exclude,
            self._real,
            self._path,
            self._follow,
            root_dir=root_dir
        ).match()


def _pickle(p):
    return WcRegexp, (p._include, p._exclude, p._real, p._path, p._follow)


copyreg.pickle(WcRegexp, _pickle)
Ejemplo n.º 56
0
def iterate_deduplicated(iterable: Iterable[Hashable], seen: Iterable[Hashable] = ()) \
                                                                              -> Iterator[Hashable]:
    seen = set(seen)
    for item in iterable:
        if item in seen:
            continue
        else:
            yield item
            seen.add(item)


def pickle_lock(lock):
    return (threading.Lock, ())


copyreg.pickle(type(threading.Lock()), pickle_lock)


def pickle_r_lock(r_lock):
    return (threading.RLock, ())


copyreg.pickle(type(threading.RLock()), pickle_r_lock)


def pickle_stack_summary(stack_summary):
    return (tensorflow.compat.v1.flags.tf_decorator.tf_stack.StackSummary, ())


copyreg.pickle(tensorflow.compat.v1.flags.tf_decorator.tf_stack.StackSummary,
               pickle_stack_summary)
Ejemplo n.º 57
0
        return spawnvpe(mode, file, args[:-1], env)


    __all__.extend(["spawnvp", "spawnvpe", "spawnlp", "spawnlpe",])

import copyreg as _copyreg

def _make_stat_result(tup, dict):
    return stat_result(tup, dict)

def _pickle_stat_result(sr):
    (type, args) = sr.__reduce__()
    return (_make_stat_result, args)

try:
    _copyreg.pickle(stat_result, _pickle_stat_result, _make_stat_result)
except NameError: # stat_result may not exist
    pass

def _make_statvfs_result(tup, dict):
    return statvfs_result(tup, dict)

def _pickle_statvfs_result(sr):
    (type, args) = sr.__reduce__()
    return (_make_statvfs_result, args)

try:
    _copyreg.pickle(statvfs_result, _pickle_statvfs_result,
                     _make_statvfs_result)
except NameError: # statvfs_result may not exist
    pass
Ejemplo n.º 58
0
    def __init__(self,
                 search_space,
                 obj_func,
                 surrogate,
                 ftarget=None,
                 minimize=True,
                 noisy=False,
                 max_eval=None,
                 max_iter=None,
                 infill='EI',
                 t0=2,
                 tf=1e-1,
                 schedule=None,
                 n_init_sample=None,
                 n_point=1,
                 n_job=1,
                 backend='multiprocessing',
                 n_restart=None,
                 max_infill_eval=None,
                 wait_iter=3,
                 optimizer='MIES',
                 log_file=None,
                 data_file=None,
                 verbose=False,
                 random_seed=None,
                 available_gpus=[]):
        """
        parameter
        ---------
            search_space : instance of SearchSpace type
            obj_func : callable,
                the objective function to optimize
            surrogate: surrogate model, currently support either GPR or random forest
            minimize : bool,
                minimize or maximize
            noisy : bool,
                is the objective stochastic or not?
            max_eval : int,
                maximal number of evaluations on the objective function
            max_iter : int,
                maximal iteration
            n_init_sample : int,
                the size of inital Design of Experiment (DoE),
                default: 20 * dim
            n_point : int,
                the number of candidate solutions proposed using infill-criteria,
                default : 1
            n_job : int,
                the number of jobs scheduled for parallelizing the evaluation. 
                Only Effective when n_point > 1 
            backend : str, 
                the parallelization backend, supporting: 'multiprocessing', 'MPI', 'SPARC'
            optimizer: str,
                the optimization algorithm for infill-criteria,
                supported options: 'MIES' (Mixed-Integer Evolution Strategy), 
                                   'BFGS' (quasi-Newtion for GPR)
            available_gpus: array:
                one dimensional array of GPU numbers to use for running on GPUs in parallel. Defaults to no gpus.

        """
        self.verbose = verbose
        self.log_file = log_file
        self.data_file = data_file
        self._space = search_space
        self.var_names = self._space.var_name.tolist()
        self.obj_func = obj_func
        self.noisy = noisy
        self.surrogate = surrogate
        self.n_point = n_point
        self.n_jobs = min(self.n_point, n_job)
        self.available_gpus = available_gpus
        self._parallel_backend = backend
        self.ftarget = ftarget
        self.infill = infill
        self.minimize = minimize
        self.dim = len(self._space)
        self._best = min if self.minimize else max

        self.r_index = self._space.id_C  # index of continuous variable
        self.i_index = self._space.id_O  # index of integer variable
        self.d_index = self._space.id_N  # index of categorical variable

        self.param_type = self._space.var_type
        self.N_r = len(self.r_index)
        self.N_i = len(self.i_index)
        self.N_d = len(self.d_index)

        # parameter: objective evaluation
        # TODO: for noisy objective function, maybe increase the initial evaluations
        self.init_n_eval = 1
        self.max_eval = int(max_eval) if max_eval else np.inf
        self.max_iter = int(max_iter) if max_iter else np.inf
        self.n_init_sample = self.dim * 20 if n_init_sample is None else int(
            n_init_sample)
        self.eval_hist = []
        self.eval_hist_id = []
        self.iter_count = 0
        self.eval_count = 0

        # setting up cooling schedule
        if self.infill == 'MGFI':
            self.t0 = t0
            self.tf = tf
            self.t = t0
            self.schedule = schedule

            # TODO: find a nicer way to integrate this part
            # cooling down to 1e-1
            max_iter = self.max_eval - self.n_init_sample
            if self.schedule == 'exp':  # exponential
                self.alpha = (self.tf / t0)**(1. / max_iter)
            elif self.schedule == 'linear':
                self.eta = (t0 - self.tf) / max_iter  # linear
            elif self.schedule == 'log':
                self.c = self.tf * np.log(max_iter + 1)  # logarithmic
            elif self.schedule == 'self-adaptive':
                raise NotImplementedError

        # paramter: acquisition function optimziation
        mask = np.nonzero(self._space.C_mask | self._space.O_mask)[0]
        self._bounds = np.array(
            [self._space.bounds[i]
             for i in mask])  # bounds for continuous and integer variable
        # self._levels = list(self._space.levels.values())
        self._levels = np.array([
            self._space.bounds[i] for i in self._space.id_N
        ])  # levels for discrete variable
        self._optimizer = optimizer
        # TODO: set this number smaller when using L-BFGS and larger for MIES
        self._max_eval = int(
            5e2 * self.dim) if max_infill_eval is None else max_infill_eval
        self._random_start = int(5 *
                                 self.dim) if n_restart is None else n_restart
        self._wait_iter = int(
            wait_iter)  # maximal restarts when optimal value does not change

        # Intensify: the number of potential configuations compared against the current best
        # self.mu = int(np.ceil(self.n_init_sample / 3))
        self.mu = 3

        # stop criteria
        self.stop_dict = {}
        self.hist_f = []
        self._check_params()

        # set the random seed
        self.random_seed = random_seed
        if self.random_seed:
            np.random.seed(self.random_seed)

        self._get_logger(self.log_file)

        # allows for pickling the objective function
        copyreg.pickle(self._eval_one, dill.pickles)
        copyreg.pickle(self.obj_func, dill.pickles)

        # paralellize gpus
        self.init_gpus = True
        self.evaluation_queue = queue.Queue()
Ejemplo n.º 59
0
import numpy as np

from pyspark.sql.types import UserDefinedType, StructField, StructType, ArrayType, DoubleType, \
    IntegerType, ByteType, BooleanType


__all__ = ['Vector', 'DenseVector', 'SparseVector', 'Vectors',
           'Matrix', 'DenseMatrix', 'SparseMatrix', 'Matrices']


if sys.version_info[:2] == (2, 7):
    # speed up pickling array in Python 2.7
    def fast_pickle_array(ar):
        return array.array, (ar.typecode, ar.tostring())
    copy_reg.pickle(array.array, fast_pickle_array)


# Check whether we have SciPy. MLlib works without it too, but if we have it, some methods,
# such as _dot and _serialize_double_vector, start to support scipy.sparse matrices.

try:
    import scipy.sparse
    _have_scipy = True
except:
    # No SciPy in environment, but that's okay
    _have_scipy = False


def _convert_to_vector(l):
    if isinstance(l, Vector):
Ejemplo n.º 60
0
    def get_description(self):
        return self.__description

    def get_name(self):
        return self.__name

    def __str__(self):
        return 'PermissionSetting: %s' % self.__name

    __repr__ = __str__


# register PermissionSettings to be symbolic constants by identity,
# even when pickled and unpickled.
copyreg.constructor(PermissionSetting)
copyreg.pickle(PermissionSetting, PermissionSetting.get_name,
               PermissionSetting)

Allow = PermissionSetting('Allow', 'Explicit allow setting for permissions')

Deny = PermissionSetting('Deny', 'Explicit deny setting for permissions')

AllowSingle = PermissionSetting('AllowSingle',
                                'Explicit allow and not inherit permission')

Unset = PermissionSetting(
    'Unset', 'Unset constant that denotes no setting for permission')


class IGroups(Interface):  # pylint: disable=E0239
    """A group Utility search."""