Exemplo n.º 1
0
    def run_pixel(self, ind, value_dict):
        self.reset()
        self.ind = ind
        self.value_dict = value_dict

        self.fitting_kwargs = self.value_dict.pop('fitting_kwargs', {})
        if 'min_function' in self.fitting_kwargs:
            self.fitting_kwargs['min_function'] = dill.loads(
                self.fitting_kwargs['min_function'])
        if 'min_function_grad' in self.fitting_kwargs and isinstance(
                self.fitting_kwargs['min_function_grad'], bytes):
            self.fitting_kwargs['min_function_grad'] = dill.loads(
                self.fitting_kwargs['min_function_grad'])
        self.model.signal.data[:] = self.value_dict.pop('signal.data')

        if self.model.signal.metadata.has_item(
                'Signal.Noise_properties.variance'):
            var = self.model.signal.metadata.Signal.Noise_properties.variance
            if isinstance(var, BaseSignal):
                var.data[:] = self.value_dict.pop('variance.data')

        if 'low_loss.data' in self.value_dict:
            self.model.low_loss.data[:] = self.value_dict.pop('low_loss.data')

        for component_comb in self.generate_component_combinations():
            good_fit = self.fit(component_comb)

            if good_fit:
                if len(self.optional_names) == 0:
                    return self.send_results(current=True)
                else:
                    self.compare_models()
        return self.send_results()
Exemplo n.º 2
0
def execute_computation(next_computation):

	if next_computation.type == ComputationQueue.OPTIM:
		optimization = Optimization.objects.get(id=next_computation.computation_id)
		optim = loads(next_computation.object.encode('Latin-1'))

		if not optim.isInterrupted():
			optim.run_async(
				success=lambda executed_optim: optim_success(optimization, executed_optim),
				failure=lambda executed_optim, error=None: optim_error(optimization, executed_optim, error),
				nb_procs=optimization.cores,
				timeout=next_computation.timeout
			)

		else:
			optim.restart_async(
				success=lambda executed_optim: optim_success(optimization, executed_optim),
				failure=lambda executed_optim, error=None: optim_error(optimization, executed_optim, error),
				nb_procs=optimization.cores,
				timeout=next_computation.timeout
			)
		optimization.status = Optimization.BUSY
		optimization.save()
		next_computation.delete()

	else:
		continuation = Continuation.objects.get(id=next_computation.computation_id)
		cont = loads(next_computation.object.encode('Latin-1'))
		cont.run_async(
			lambda res: cont_success(continuation, cont, res),
			lambda error=None: cont_error(continuation, cont, error)
		)
		continuation.status = Continuation.BUSY
		continuation.save()
		next_computation.delete()
Exemplo n.º 3
0
    def __init__(self, algo, domain, trials, rstate, asynchronous=None,
                 max_queue_len=1,
                 poll_interval_secs=1.0,
                 max_evals=sys.maxsize,
                 verbose=0,
                 show_progressbar=True
                 ):
        self.algo = algo
        self.domain = domain
        self.trials = trials
        self.show_progressbar = show_progressbar
        if asynchronous is None:
            self.asynchronous = trials.asynchronous
        else:
            self.asynchronous = asynchronous
        self.poll_interval_secs = poll_interval_secs
        self.max_queue_len = max_queue_len
        self.max_evals = max_evals
        self.rstate = rstate

        if self.asynchronous:
            if 'FMinIter_Domain' in trials.attachments:
                logger.warn('over-writing old domain trials attachment')
            msg = pickler.dumps(domain)
            # -- sanity check for unpickling
            pickler.loads(msg)
            trials.attachments['FMinIter_Domain'] = msg
Exemplo n.º 4
0
 def handle(self):
     import sys
     data = self.request.recv(4096)
     cur_thread = threading.current_thread()
     response = "{}: {}".format(cur_thread.name, data)
     rec = dill.loads(data)
     modulesNames = rec[1]
     h = hashlib.sha1()
     h.update(rec[0])
     hexdigestkey = h.hexdigest()
     if hexdigestkey != rec[4]:
     	sys.exit(1)
     remfunc = dill.loads(rec[0])
     for mod_name in modulesNames:
         remfunc.__globals__[mod_name] = import_module(mod_name)
     try :
         res = remfunc(*rec[2], **rec[3])
     except:
         msgerr= sys.exc_info()[0]
         res_pack = dill.dumps(msgerr)
         self.request.sendall(res_pack)             
         self.server.shutdown()            
     res_pack = dill.dumps(res)
     self.request.sendall(res_pack)
     self.server.shutdown()
Exemplo n.º 5
0
def loads(s):
  try:
    return dill.loads(base64.b64decode(s))
  except Exception:          # pylint: disable=broad-except
    dill.dill._trace(True)   # pylint: disable=protected-access
    return dill.loads(base64.b64decode(s))
  finally:
    dill.dill._trace(False)  # pylint: disable=protected-access
 def check_coder(self, coder, *values):
   self._observe(coder)
   for v in values:
     self.assertEqual(v, coder.decode(coder.encode(v)))
   copy1 = dill.loads(dill.dumps(coder))
   copy2 = dill.loads(dill.dumps(coder))
   for v in values:
     self.assertEqual(v, copy1.decode(copy2.encode(v)))
Exemplo n.º 7
0
def test_picklemap():
    encode = picklemap(typed=False, flat=True, serializer='dill')
    assert encode(*args, **kwds) == dumps((1, 2, 'a', 3, 'b', 4))
    encode = picklemap(typed=False, flat=False, serializer='dill')
    assert loads(encode(*args, **kwds)) == loads(dumps((args, kwds)))
    encode = picklemap(typed=True, flat=True, serializer='dill')
    assert encode(*args, **kwds) == dumps( (1, 2, 'a', 3, 'b', 4, type(1), type(2), type(3), type(4)) )
    encode = picklemap(typed=True, flat=False, serializer='dill')
    assert loads(encode(*args, **kwds)) == loads(dumps( (args, kwds, (type(1), type(2)), (type(3), type(4))) ))
Exemplo n.º 8
0
 def _get_job_from_queue(self, queue_name, worker_id, route, tr):
     rqueue_name = queue_name + '-' + route
     route_queue = self._get_queue(queue_name + '-' + route)
     worker_queue = self._get_queue('__WORKERQUEUE-'+worker_id)
     rret = route_queue.pop(self.conn)
     if rret:
         return dill.loads(rret)
     wret = worker_queue.pop(self.conn)
     if wret:
         return dill.loads(wret)
Exemplo n.º 9
0
def test_basic():
    a = [0, 1, 2]
    pa = pickle.dumps(a)
    pmath = pickle.dumps(math) #XXX: FAILS in pickle
    pmap = pickle.dumps(map)
    # ...
    la = pickle.loads(pa)
    lmath = pickle.loads(pmath)
    lmap = pickle.loads(pmap)
    assert list(map(math.sin, a)) == list(lmap(lmath.sin, la))
Exemplo n.º 10
0
def _sample_multi(what):
    graphlearner = dill.loads(what[0])
    graphlist = dill.loads(what[1])
    # if jobsize % batchsize != 0, sample will not give me a tuple,
    # here i filter for these
    multisampletime=time.time()
    #result = [graphlearner.transform_single(g) for g in graphlist]
    result = map(graphlearner.transform_single, graphlist)
    logger.log(5,'sampled a batch in %.2fs' % (time.time()-multisampletime))
    # print result
    return [e for e in result if type(e) == type(())]
Exemplo n.º 11
0
 def is_damaged(self):
     mem = self.stored()
     if mem and 'obj' in mem:
         if self._obj is None:
             self._memory['obj'] = mem['obj']
             self._obj = dill.loads(base64.b64decode(mem['obj']))
             return self._obj is None
         else:
             return joblib.hash(self._obj) != \
                    joblib.hash(dill.loads(base64.b64decode(mem['obj'])))
     else:
         return self._obj is None
    def test_lambdas_pickle(self):
        NONLOCAL_CONST = 5

        lambda_func = lambda x, LOCAL_CONST=7: \
            x * LOCAL_CONST * NONLOCAL_CONST * self.CLASS_CONST * GLOBAL_CONST

        def nested_func(x, LOCAL_CONST=7):
            return x * LOCAL_CONST * NONLOCAL_CONST * self.CLASS_CONST * GLOBAL_CONST

        self.assertEqual(lambda_func(11),
                         pickle.loads(pickle.dumps(lambda_func))(11))
        self.assertEqual(nested_func(11),
                         pickle.loads(pickle.dumps(nested_func))(11))
Exemplo n.º 13
0
    def test_job_properties_set_data_property(self):
        """Data property gets derived from the job tuple."""
        job = Job()
        def foo(a,b,c,bar=''):
            pass
        job.func = foo
        func, instance, args, kwargs = loads(job.data)
        fname = "tests.test_job."+loads(func).__name__

        self.assertEquals(fname, job.func_name)
        self.assertEquals(instance, None)
        self.assertEquals(args, ())
        self.assertEquals(kwargs, {})
Exemplo n.º 14
0
def loads(encoded):
  c = base64.b64decode(encoded)

  s = zlib.decompress(c)
  del c  # Free up some possibly large and no-longer-needed memory.

  try:
    return dill.loads(s)
  except Exception:          # pylint: disable=broad-except
    dill.dill._trace(True)   # pylint: disable=protected-access
    return dill.loads(s)
  finally:
    dill.dill._trace(False)  # pylint: disable=protected-access
Exemplo n.º 15
0
 def check_coder(self, coder, *values):
   self._observe(coder)
   for v in values:
     self.assertEqual(v, coder.decode(coder.encode(v)))
     self.assertEqual(coder.estimate_size(v),
                      len(coder.encode(v)))
     self.assertEqual(coder.estimate_size(v),
                      coder.get_impl().estimate_size(v))
     self.assertEqual(coder.get_impl().get_estimated_size_and_observables(v),
                      (coder.get_impl().estimate_size(v), []))
   copy1 = dill.loads(dill.dumps(coder))
   copy2 = dill.loads(dill.dumps(coder))
   for v in values:
     self.assertEqual(v, copy1.decode(copy2.encode(v)))
     if coder.is_deterministic():
       self.assertEqual(copy1.encode(v), copy2.encode(v))
Exemplo n.º 16
0
    def do_job(self, job):
        # depickle
        body = pickle.loads(job['body'])
        directory = body['dir']
        # FIXME a horrible hack where we add ourselves to the pythonpath
        sys.path.append(directory)
        mod = __import__(body['mod'])
        self.log.debug("successfully imported module "+str(mod))

        if job['fn_type'] == 'method':
            parent = dill.loads(body['parent'])
            fn = getattr(parent, body['fn'])
        else:
            fn = getattr(mod, body['fn'])
        args = body['args']
        kwargs = body['kwargs']

        # call it
        self.persister.set_working(self.worker_id)
        try:
            ret = fn(*args, **kwargs)
            self.persister.save_result(job['job_id'], ret, 'complete')
            self.log.info(ret)
        except Exception as e:
            self.persister.save_result(job['job_id'], None, 'error')
            self.log.error(str(e))
            exc_t, exc_v, exc_tr = sys.exc_info()
            self.log.error(str(
                '\n'.join(traceback.format_exception(exc_t, exc_v, exc_tr))))
            self.log.debug("python path is %s" % str(sys.path))
        finally:
            a = sys.path.pop()
        self.persister.unset_working(self.worker_id)
Exemplo n.º 17
0
Arquivo: pmap.py Projeto: RONNCC/bumps
    def _process_work(msg):
        # Check for sentinel
        if msg.reply_to == "": channel.basic_cancel(consumer)

        body = pickle.loads(msg.body)
        mapid = body['mapid']
        if mapid not in _cache:
            _fetch_function(msg.reply_to, mapid)
        function = _cache[mapid]
        if function == None:
            channel.basic_ack(msg.delivery_tag)
            return

        # Acknowledge delivery of message
        #print "processing...",body['index'],body['value']
        try:
            result = function(body['value'])
        except:
            result = None
        #print "done"
        channel.basic_ack(msg.delivery_tag)
        reply = dict(index=body['index'], result=result, mapid=mapid)
        replymsg = amqp.Message(pickle.dumps(reply))
        channel.basic_publish(replymsg, exchange=exchange,
                              routing_key=msg.reply_to)
Exemplo n.º 18
0
def test_serializability():
    state = da.random.RandomState(5)
    x = state.normal(10, 1, size=10, chunks=5)

    y = dill.loads(dill.dumps(x))

    assert (x.compute() == y.compute()).all()
Exemplo n.º 19
0
def load_model(model_name):
    print 'Loading model {0}'.format(model_name)

    with open(os.path.join('../models', model_name), 'r') as f:
        encoded_str = f.read()

    return dill.loads(encoded_str)
Exemplo n.º 20
0
 def unserialize(cls, value):
     unserialized = json.loads(value)
     return cls(
         f_code=dill.loads(base64.b64decode(unserialized['f_code'])),
         f_args=unserialized['f_args'],
         f_kwargs=unserialized['f_kwargs']
     )
Exemplo n.º 21
0
def load_pickle(h5f, safe=True):
    """ Deserialize and load a pickled object within a hickle file
  
  WARNING: Pickle has 
  
  Parameters
  ----------
  h5f: h5py.File object
  
  safe (bool): Disable automatic depickling of arbitrary python objects. 
  DO NOT set this to False unless the file is from a trusted source.
  (see http://www.cs.jhu.edu/~s/musings/pickle.html for an explanation)
  """

    if not safe:
        try:
            import dill as cPickle
        except ImportError:
            import cPickle

        data = h5f["data"][:]
        data = cPickle.loads(data[0])
        return data
    else:
        print("\nWarning: Object is of an unknown type, and has not been loaded")
        print("         for security reasons (it could be malicious code). If")
        print("         you wish to continue, manually set safe=False\n")
Exemplo n.º 22
0
    def test_pickle(self):
        import sys
        if sys.version_info < (3, 4):
            import dill as pickle
        else:
            import pickle

        # go to non initial state B
        self.stuff.to_B()
        # pickle Stuff model
        dump = pickle.dumps(self.stuff)
        self.assertIsNotNone(dump)
        stuff2 = pickle.loads(dump)
        self.assertTrue(stuff2.machine.is_state("B"))
        # check if machines of stuff and stuff2 are truly separated
        stuff2.to_A()
        self.stuff.to_C()
        self.assertTrue(stuff2.machine.is_state("A"))
        thread = Thread(target=stuff2.process)
        thread.start()
        # give thread some time to start
        time.sleep(0.01)
        # both objects should be in different states
        # and also not share locks
        begin = time.time()
        # stuff should not be locked and execute fast
        self.assertTrue(self.stuff.machine.is_state("C"))
        fast = time.time()
        # stuff2 should be locked and take about 1 second
        # to be executed
        self.assertTrue(stuff2.machine.is_state("B"))
        blocked = time.time()
        self.assertAlmostEqual(fast-begin, 0, delta=0.1)
        self.assertAlmostEqual(blocked-begin, 1, delta=0.1)
Exemplo n.º 23
0
def load_compressed(compressed_data):
  """Returns a decompressed and deserialized python object."""
  # Note: SDK uses ``pickler.dumps`` to serialize certain python objects
  # (like sources), which involves serialization, compression and base64
  # encoding. We cannot directly use ``pickler.loads`` for
  # deserialization, as the runner would have already base64 decoded the
  # data. So we only need to decompress and deserialize.

  data = zlib.decompress(compressed_data)
  try:
    return dill.loads(data)
  except Exception:          # pylint: disable=broad-except
    dill.dill._trace(True)   # pylint: disable=protected-access
    return dill.loads(data)
  finally:
    dill.dill._trace(False)  # pylint: disable=protected-access
Exemplo n.º 24
0
def from_pickle(file_path, obj_name=None):
    """
    Restore pickled object from disk

    * Workaround taken from [1] for deserializing larger objects

    [1] - http://stackoverflow.com/questions/31468117/python-3-can-pickle-handle-byte-objects-larger-than-4gb

    :param file_path: Absolute path of serialized (pickle) object (can be gzip compressed if path ends with .gz)
    :param obj_name: Optional name to associate with object for logging message
    :return: Deserialized object
    """
    logger.debug('Restoring serialized object {}from location "{}"'.format(_obj_name(obj_name), file_path))

    # If given gzip file, read in one chunk (this limits source gzip files to ~2GB since
    # there is no way to get the uncompressed size of gzip data over 2GB directly)
    if file_path.endswith('.gz'):
        import gzip
        with gzip.open(file_path, 'rb') as f_in:
            return dill.load(f_in)

    # Otherwise, read arbitrarily large uncompressed file
    bytes_in = bytearray(0)
    max_bytes = 2**31 - 1
    input_size = os.path.getsize(file_path)

    with open(file_path, 'rb') as fd:
        for _ in range(0, input_size, max_bytes):
            bytes_in += fd.read(max_bytes)
    return dill.loads(bytes_in)
Exemplo n.º 25
0
def reconstruct_object(flags, value):
    """ Reconstructs the value (if necessary) after having saved it in a
    dictionary
    """
    if not isinstance(flags, list):
        flags = parse_flag_string(flags)
    if 'sig' in flags:
        if isinstance(value, dict):
            from hyperspy.signal import Signal
            value = Signal(**value)
            value._assign_subclass()
        return value
    if 'fn' in flags:
        ifdill, thing = value
        if ifdill is None:
            return thing
        if ifdill in [False, 'False']:
            return types.FunctionType(marshal.loads(thing), globals())
        if ifdill in [True, 'True']:
            if not dill_avail:
                raise ValueError("the dictionary was constructed using "
                                 "\"dill\" package, which is not available on the system")
            else:
                return dill.loads(thing)
        # should not be reached
        raise ValueError("The object format is not recognized")
    return value
def get_queryset_from_uuid(qs_uuid, model):
    # return queryset_uuid_map[qs_uuid]

    query = pickle.loads(cache.get('aristotle_mdr_cache_qs__%s' % qs_uuid))
    qs = model.objects.none()
    qs.query = query
    return qs
Exemplo n.º 27
0
  def connect_api(self, __config_parser):
    # Create base client object from config
    __base_client = papi_client.PapiClientFactory.client_from_config(
      __config_parser,
      'papi',
      logger=self.logger.instance
    )
    self.client = loader.PapiClientCollection(
      base_client=__base_client,
      conf=__config_parser,
      logger=self.logger.instance
    )

    # Load old session (Auth cookies) - Looked up by login URI from db
    # Alternatively: session = self.client._client._PapiClient__url
    self.login_uri = __config_parser.get('papi', 'url')
    try:
      __session = APISession.objects.get(login_uri=self.login_uri)
    except APISession.DoesNotExist:
      self.client._client._PapiClient__session = None
      self.cookie = None
    except: raise
    else:
      self.client._client._PapiClient__session = pickle.loads(__session.serialized) 
      self.cookie = self.client._client._PapiClient__session.cookies['PHPSESSID']

    return self.client
Exemplo n.º 28
0
 def wrapper(*args, **kwds):
     pickledfunc = dill.dumps(func)            
     try:
         HOST, USER, PASSWORD = kwds['remote'][0], kwds['remote'][1], kwds['remote'][2] 
     except:
         return func(*args, **kwds)
     PORT = random.randrange(10000, 20000)
     threads = []
     t = Process(target=start_remote, args=(HOST,USER,PASSWORD,PORT))
     t.start()
     time.sleep(2)
     sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
     sock.connect((HOST, PORT))
     modules_list = [itm for itm in  func.func_globals.keys() if itm.count("_")== 0 and itm != func.__name__ and itm != 'remoteFunction']
     h = hashlib.sha1()
     h.update(pickledfunc)
     hexdigestkey = h.hexdigest()
     del kwds['remote']
     lis = [pickledfunc, modules_list, args, kwds, hexdigestkey]
     lis = dill.dumps(lis)
     sock.sendall(lis)
     response = sock.recv(4096)
     response = dill.loads(response)
     return response
     sock.close()
     t.terminate()
Exemplo n.º 29
0
def loadDict(filename="featuredict_es.dat"):
  file = open(filename, 'rb')
  s = file.read()
  file.close()

  fd = pickle.loads(s)
  return fd
Exemplo n.º 30
0
 def init(self):
     con = pymongo.MongoClient()
     db = con["reminders"]
     for reminder_ in db["reminders"].find():
         self.reminders.append(dill.loads(reminder_["reminder"]))
     self.reminders.sort()
     db.reminders.remove({})
Exemplo n.º 31
0
import os
from os import listdir
from os.path import isfile, join, exists
import shutil
import dill as pickle
from collections import defaultdict

DATA_DIR = './data'
AUDIO_SCRAPING_DIR = "./audio-scraping/"
FINAL_LABEL_FILE = DATA_DIR + '/labels.p'
SEASONS = [1, 2, 3, 4, 5, 6, 7, 8]

labels = dict()
for season in SEASONS:
    try:
        label_file = "%sseason%i-labelled-new.p" % (AUDIO_SCRAPING_DIR, season)
        with open(label_file, 'r+') as of:
            to_add = pickle.loads(of.read())
        labels.update(to_add)
    except OSError:
        print('\n-------\nERROR: Season %i not found!\n--------\n') % (season)

if exists(FINAL_LABEL_FILE):  #DELETES ALL PRE-EXISTING FEATURE DATA FIRST! NB
    print 'Deleting previous labels'
    os.remove(FINAL_LABEL_FILE)

# pickle.dump(correct_label_mappings, open(pickle_filename, "wb"))
pickle.dump(labels, open(FINAL_LABEL_FILE, "wb"))
print("\n----\nLabels compiled into %s\n----\n") % (FINAL_LABEL_FILE)
Exemplo n.º 32
0
esc_default = re.escape(default.format(r'\d')).replace(r'\\d', r'\d')
esc_samedefault = re.escape(samedefault.format(r'\d')).replace(r'\\d', r'\d')
esc_newdefault = re.escape(newdefault.format(r'\d')).replace(r'\\d', r'\d')

blocked = "**You've been blocked and reported for spamming.**"
blocklog = "{} **has been blocked, unblock them to see their messages.**"
autoapprove = "**Successfully auto-approved** {}"

DEFAULT_MUTE_SETTINGS = types.InputPeerNotifySettings(
    silent=True, mute_until=datetime.timedelta(days=365))
DEFAULT_UNMUTE_SETTINGS = types.InputPeerNotifySettings(show_previews=True,
                                                        silent=False)

if redis:
    if redis.exists('approved:users'):
        approvedUsers = dill.loads(redis.get('approved:users'))


@client.onMessage(incoming=True, edited=False)
async def pm_incoming(event: NewMessage.Event) -> None:
    """Filter incoming messages for blocking."""
    if not PM_PERMIT or not redis or not event.is_private:
        return
    out = None
    new_pm = False
    entity = await event.get_sender()
    input_entity = await event.get_input_sender()
    sender = getattr(event, 'from_id', entity.id)

    if (entity.verified or entity.support or entity.bot
            or sender in approvedUsers):
Exemplo n.º 33
0
def bench(strictio, fmode, skippypy):
    import platform
    if skippypy and platform.python_implementation() == 'PyPy':
        # Skip for PyPy...
        return

    # file exists, with same contents
    # read

    write_randomness()

    f = open(fname, "r")
    _f = dill.loads(dill.dumps(f, fmode=fmode))  #, strictio=strictio))
    assert _f.mode == f.mode
    assert _f.tell() == f.tell()
    assert _f.read() == f.read()
    f.close()
    _f.close()

    # write

    f = open(fname, "w")
    f.write("hello")
    f_dumped = dill.dumps(f, fmode=fmode)  #, strictio=strictio)
    f1mode = f.mode
    ftell = f.tell()
    f.close()
    f2 = dill.loads(f_dumped)  #FIXME: fails due to pypy/issues/1233
    # TypeError: expected py_object instance instead of str
    f2mode = f2.mode
    f2tell = f2.tell()
    f2name = f2.name
    f2.write(" world!")
    f2.close()

    if fmode == dill.HANDLE_FMODE:
        assert open(fname).read() == " world!"
        assert f2mode == f1mode
        assert f2tell == 0
    elif fmode == dill.CONTENTS_FMODE:
        assert open(fname).read() == "hello world!"
        assert f2mode == f1mode
        assert f2tell == ftell
        assert f2name == fname
    elif fmode == dill.FILE_FMODE:
        assert open(fname).read() == "hello world!"
        assert f2mode == f1mode
        assert f2tell == ftell
    else:
        raise RuntimeError("Unknown file mode '%s'" % fmode)

    # append

    trunc_file()

    f = open(fname, "a")
    f.write("hello")
    f_dumped = dill.dumps(f, fmode=fmode)  #, strictio=strictio)
    f1mode = f.mode
    ftell = f.tell()
    f.close()
    f2 = dill.loads(f_dumped)
    f2mode = f2.mode
    f2tell = f2.tell()
    f2.write(" world!")
    f2.close()

    assert f2mode == f1mode
    if fmode == dill.CONTENTS_FMODE:
        assert open(fname).read() == "hello world!"
        assert f2tell == ftell
    elif fmode == dill.HANDLE_FMODE:
        assert open(fname).read() == "hello world!"
        assert f2tell == ftell
    elif fmode == dill.FILE_FMODE:
        assert open(fname).read() == "hello world!"
        assert f2tell == ftell
    else:
        raise RuntimeError("Unknown file mode '%s'" % fmode)

    # file exists, with different contents (smaller size)
    # read

    write_randomness()

    f = open(fname, "r")
    fstr = f.read()
    f_dumped = dill.dumps(f, fmode=fmode)  #, strictio=strictio)
    f1mode = f.mode
    ftell = f.tell()
    f.close()
    _flen = 150
    _fstr = write_randomness(number=_flen)

    if strictio:  # throw error if ftell > EOF
        assert throws(dill.loads, (f_dumped, ), buffer_error)
    else:
        f2 = dill.loads(f_dumped)
        assert f2.mode == f1mode
        if fmode == dill.CONTENTS_FMODE:
            assert f2.tell() == _flen
            assert f2.read() == ""
            f2.seek(0)
            assert f2.read() == _fstr
            assert f2.tell() == _flen  # 150
        elif fmode == dill.HANDLE_FMODE:
            assert f2.tell() == 0
            assert f2.read() == _fstr
            assert f2.tell() == _flen  # 150
        elif fmode == dill.FILE_FMODE:
            assert f2.tell() == ftell  # 200
            assert f2.read() == ""
            f2.seek(0)
            assert f2.read() == fstr
            assert f2.tell() == ftell  # 200
        else:
            raise RuntimeError("Unknown file mode '%s'" % fmode)
        f2.close()

    # write

    write_randomness()

    f = open(fname, "w")
    f.write("hello")
    f_dumped = dill.dumps(f, fmode=fmode)  #, strictio=strictio)
    f1mode = f.mode
    ftell = f.tell()
    f.close()
    fstr = open(fname).read()

    f = open(fname, "w")
    f.write("h")
    _ftell = f.tell()
    f.close()

    if strictio:  # throw error if ftell > EOF
        assert throws(dill.loads, (f_dumped, ), buffer_error)
    else:
        f2 = dill.loads(f_dumped)
        f2mode = f2.mode
        f2tell = f2.tell()
        f2.write(" world!")
        f2.close()
        if fmode == dill.CONTENTS_FMODE:
            assert open(fname).read() == "h world!"
            assert f2mode == f1mode
            assert f2tell == _ftell
        elif fmode == dill.HANDLE_FMODE:
            assert open(fname).read() == " world!"
            assert f2mode == f1mode
            assert f2tell == 0
        elif fmode == dill.FILE_FMODE:
            assert open(fname).read() == "hello world!"
            assert f2mode == f1mode
            assert f2tell == ftell
        else:
            raise RuntimeError("Unknown file mode '%s'" % fmode)
        f2.close()

    # append

    trunc_file()

    f = open(fname, "a")
    f.write("hello")
    f_dumped = dill.dumps(f, fmode=fmode)  #, strictio=strictio)
    f1mode = f.mode
    ftell = f.tell()
    f.close()
    fstr = open(fname).read()

    f = open(fname, "w")
    f.write("h")
    _ftell = f.tell()
    f.close()

    if strictio:  # throw error if ftell > EOF
        assert throws(dill.loads, (f_dumped, ), buffer_error)
    else:
        f2 = dill.loads(f_dumped)
        f2mode = f2.mode
        f2tell = f2.tell()
        f2.write(" world!")
        f2.close()
        assert f2mode == f1mode
        if fmode == dill.CONTENTS_FMODE:
            # position of writes cannot be changed on some OSs
            assert open(fname).read() == "h world!"
            assert f2tell == _ftell
        elif fmode == dill.HANDLE_FMODE:
            assert open(fname).read() == "h world!"
            assert f2tell == _ftell
        elif fmode == dill.FILE_FMODE:
            assert open(fname).read() == "hello world!"
            assert f2tell == ftell
        else:
            raise RuntimeError("Unknown file mode '%s'" % fmode)
        f2.close()

    # file does not exist
    # read

    write_randomness()

    f = open(fname, "r")
    fstr = f.read()
    f_dumped = dill.dumps(f, fmode=fmode)  #, strictio=strictio)
    f1mode = f.mode
    ftell = f.tell()
    f.close()

    os.remove(fname)

    if strictio:  # throw error if file DNE
        assert throws(dill.loads, (f_dumped, ), dne_error)
    else:
        f2 = dill.loads(f_dumped)
        assert f2.mode == f1mode
        if fmode == dill.CONTENTS_FMODE:
            # FIXME: this fails on systems where f2.tell() always returns 0
            # assert f2.tell() == ftell # 200
            assert f2.read() == ""
            f2.seek(0)
            assert f2.read() == ""
            assert f2.tell() == 0
        elif fmode == dill.FILE_FMODE:
            assert f2.tell() == ftell  # 200
            assert f2.read() == ""
            f2.seek(0)
            assert f2.read() == fstr
            assert f2.tell() == ftell  # 200
        elif fmode == dill.HANDLE_FMODE:
            assert f2.tell() == 0
            assert f2.read() == ""
            assert f2.tell() == 0
        else:
            raise RuntimeError("Unknown file mode '%s'" % fmode)
        f2.close()

    # write

    write_randomness()

    f = open(fname, "w+")
    f.write("hello")
    f_dumped = dill.dumps(f, fmode=fmode)  #, strictio=strictio)
    ftell = f.tell()
    f1mode = f.mode
    f.close()

    os.remove(fname)

    if strictio:  # throw error if file DNE
        assert throws(dill.loads, (f_dumped, ), dne_error)
    else:
        f2 = dill.loads(f_dumped)
        f2mode = f2.mode
        f2tell = f2.tell()
        f2.write(" world!")
        f2.close()
        if fmode == dill.CONTENTS_FMODE:
            assert open(fname).read() == " world!"
            assert f2mode == 'w+'
            assert f2tell == 0
        elif fmode == dill.HANDLE_FMODE:
            assert open(fname).read() == " world!"
            assert f2mode == f1mode
            assert f2tell == 0
        elif fmode == dill.FILE_FMODE:
            assert open(fname).read() == "hello world!"
            assert f2mode == f1mode
            assert f2tell == ftell
        else:
            raise RuntimeError("Unknown file mode '%s'" % fmode)

    # append

    trunc_file()

    f = open(fname, "a")
    f.write("hello")
    f_dumped = dill.dumps(f, fmode=fmode)  #, strictio=strictio)
    ftell = f.tell()
    f1mode = f.mode
    f.close()

    os.remove(fname)

    if strictio:  # throw error if file DNE
        assert throws(dill.loads, (f_dumped, ), dne_error)
    else:
        f2 = dill.loads(f_dumped)
        f2mode = f2.mode
        f2tell = f2.tell()
        f2.write(" world!")
        f2.close()
        assert f2mode == f1mode
        if fmode == dill.CONTENTS_FMODE:
            assert open(fname).read() == " world!"
            assert f2tell == 0
        elif fmode == dill.HANDLE_FMODE:
            assert open(fname).read() == " world!"
            assert f2tell == 0
        elif fmode == dill.FILE_FMODE:
            assert open(fname).read() == "hello world!"
            assert f2tell == ftell
        else:
            raise RuntimeError("Unknown file mode '%s'" % fmode)

    # file exists, with different contents (larger size)
    # read

    write_randomness()

    f = open(fname, "r")
    fstr = f.read()
    f_dumped = dill.dumps(f, fmode=fmode)  #, strictio=strictio)
    f1mode = f.mode
    ftell = f.tell()
    f.close()
    _flen = 250
    _fstr = write_randomness(number=_flen)

    # XXX: no safe_file: no way to be 'safe'?

    f2 = dill.loads(f_dumped)
    assert f2.mode == f1mode
    if fmode == dill.CONTENTS_FMODE:
        assert f2.tell() == ftell  # 200
        assert f2.read() == _fstr[ftell:]
        f2.seek(0)
        assert f2.read() == _fstr
        assert f2.tell() == _flen  # 250
    elif fmode == dill.HANDLE_FMODE:
        assert f2.tell() == 0
        assert f2.read() == _fstr
        assert f2.tell() == _flen  # 250
    elif fmode == dill.FILE_FMODE:
        assert f2.tell() == ftell  # 200
        assert f2.read() == ""
        f2.seek(0)
        assert f2.read() == fstr
        assert f2.tell() == ftell  # 200
    else:
        raise RuntimeError("Unknown file mode '%s'" % fmode)
    f2.close()  # XXX: other alternatives?

    # write

    f = open(fname, "w")
    f.write("hello")
    f_dumped = dill.dumps(f, fmode=fmode)  #, strictio=strictio)
    f1mode = f.mode
    ftell = f.tell()

    fstr = open(fname).read()

    f.write(" and goodbye!")
    _ftell = f.tell()
    f.close()

    # XXX: no safe_file: no way to be 'safe'?

    f2 = dill.loads(f_dumped)
    f2mode = f2.mode
    f2tell = f2.tell()
    f2.write(" world!")
    f2.close()
    if fmode == dill.CONTENTS_FMODE:
        assert open(fname).read() == "hello world!odbye!"
        assert f2mode == f1mode
        assert f2tell == ftell
    elif fmode == dill.HANDLE_FMODE:
        assert open(fname).read() == " world!"
        assert f2mode == f1mode
        assert f2tell == 0
    elif fmode == dill.FILE_FMODE:
        assert open(fname).read() == "hello world!"
        assert f2mode == f1mode
        assert f2tell == ftell
    else:
        raise RuntimeError("Unknown file mode '%s'" % fmode)
    f2.close()

    # append

    trunc_file()

    f = open(fname, "a")
    f.write("hello")
    f_dumped = dill.dumps(f, fmode=fmode)  #, strictio=strictio)
    f1mode = f.mode
    ftell = f.tell()
    fstr = open(fname).read()

    f.write(" and goodbye!")
    _ftell = f.tell()
    f.close()

    # XXX: no safe_file: no way to be 'safe'?

    f2 = dill.loads(f_dumped)
    f2mode = f2.mode
    f2tell = f2.tell()
    f2.write(" world!")
    f2.close()
    assert f2mode == f1mode
    if fmode == dill.CONTENTS_FMODE:
        assert open(fname).read() == "hello and goodbye! world!"
        assert f2tell == ftell
    elif fmode == dill.HANDLE_FMODE:
        assert open(fname).read() == "hello and goodbye! world!"
        assert f2tell == _ftell
    elif fmode == dill.FILE_FMODE:
        assert open(fname).read() == "hello world!"
        assert f2tell == ftell
    else:
        raise RuntimeError("Unknown file mode '%s'" % fmode)
    f2.close()
 def test_load_dump(self):
     with testing.tmp_dir(self.get_temp_dir()) as tmp_dir:
         builder = testing.DummyMnist(data_dir=tmp_dir)
     builder2 = dill.loads(dill.dumps(builder))
     self.assertEqual(builder.name, builder2.name)
     self.assertEqual(builder.version, builder2.version)
Exemplo n.º 35
0
    def run(self):
        while True:
            try:
                msgtype, typ, obj0 = self.socket.recv_pyobj()
                newmsg = None
                # GUCK Handler
                if msgtype == "guck":
                    pass
                # NEST handler
                elif msgtype == "nest":
                    # prepare text
                    if typ == "send":
                        status_changed, device_changed, nestlist = dill.loads(
                            obj0)
                        newmsg = "*** NEST ***\n\n"
                        if status_changed != []:
                            newmsg += "STRUCTURE STATUS UPDATES:\n"
                            for ss0 in status_changed:
                                nm, it, st = ss0
                                newmsg += nm
                                if it == "new_structure":
                                    newmsg += ": NEW\n"
                                if it == "away":
                                    newmsg += ": Away (" + st.upper() + ")\n"
                            if device_changed != []:
                                newmsg += "\n"
                        if device_changed != []:
                            newmsg += "DEVICE STATUS UPDATES:\n"
                            for dd0 in device_changed:
                                nm, lc, it, st = dd0
                                newmsg += nm + " - " + lc + ": "
                                if it == "co_alarm_state":
                                    newmsg += "CO2 (" + st.upper() + ")\n"
                                if it == "smoke_alarm_state":
                                    newmsg += "smoke (" + st.upper() + ")\n"
                                if it == "battery_health":
                                    newmsg += "energy (" + st.upper() + ")\n"
                        self.DATALIST.append(
                            (msgtype,
                             dill.dumps(
                                 (status_changed, device_changed, nestlist))))
                        # send text to telegram, mail, ftp, sms etc

                        if self.do_telegram and newmsg:
                            self.send_tgrm(newmsg)
                        # server answer depends of msgtype
                        self.socket.send_string("OK")
                    # return nest info for processing by wastl
                    elif typ == "wastlinfo":
                        stat, nestlist = self.get_data("nest")
                        newmsg = "OIS OKAY"
                        self.socket.send_string(newmsg)
                        logger.info("String sent to WASTL!")
                    # return fulltext status
                    elif typ == "getstatus":
                        stat, nestlist = self.get_data("nest")
                        newmsg = "*** NEST ***\n"
                        if stat == 0:
                            newmsg += "\nSTRUCTURE STATUS:"
                            for structure in nestlist:
                                newmsg += "\nName: " + structure["name"] + "\n"
                                newmsg += "Away: " + structure["away"].upper(
                                ) + "\n"
                                newmsg += "CO2:  " + structure[
                                    "co_alarm_state"].upper() + "\n"
                                newmsg += "Smoke:" + structure[
                                    "smoke_alarm_state"].upper() + "\n"
                                newmsg += "\nDEVICE STATUS:\n"
                                for item in structure["locations"]:
                                    newmsg += item["name"] + "\n"
                                    newmsg += "   CO2:    " + item[
                                        "co_alarm_state"].upper() + "\n"
                                    newmsg += "   Smoke:  " + item[
                                        "smoke_alarm_state"].upper() + "\n"
                                    newmsg += "   Energy: " + item[
                                        "battery_health"].upper() + "\n"
                        self.socket.send_string(newmsg)
            except Exception as e:
                logger.error(str(e))
                try:
                    self.socket.send_string("NOOK")
                except Exception as e:
                    logger.error(str(e))
            if len(self.DATALIST) > 200:
                del self.DATALIST[0]
            time.sleep(0.2)
Exemplo n.º 36
0
def loaded_wc(wc):
    return dill.loads(dill.dumps(wc))
Exemplo n.º 37
0
if __name__ == "__channelexec__":

    GUCK_HOME = os.environ["GUCK_HOME"]
    ln = "nestthread"
    logger = logging.getLogger(ln)
    logger.setLevel(logging.INFO)
    fh = logging.FileHandler(GUCK_HOME + "log/" + ln + ".log", mode="w")
    formatter = logging.Formatter(
        '%(asctime)s - %(name)s - %(levelname)s - %(message)s')
    fh.setFormatter(formatter)
    logger.addHandler(fh)

    CONNECTOR_AUX = zenzlib.Connector()

    token, api_url, msg = dill.loads(channel.receive())
    logger.info("Received NEST data")
    NESTSS = Nest_sse(token, api_url, msg)
    NESTSS.start()
    if NESTSS.STATUS == -2:
        channel.send(dill.dumps("NOOK"))
    else:
        channel.send(dill.dumps("OK"))
    while NESTSS.STATUS != -2:
        if time.time() - NESTSS.LASTKEEPALIVE > 120:
            ret0 = dill.dumps("NOOK")
        else:
            ret0 = dill.dumps("OK")
        channel.send(ret0)
        time.sleep(1)
    channel.send("NOOK")
Exemplo n.º 38
0
def showlog():
    import graphlearn01.utils.draw as draw
    with open("pareto_arg_log", "r") as f:
        for arg in [dill.loads(e) for e in f.read().split("#####")[:-1]]:
            draw.graphlearn(arg[1]['pareto_set'])
Exemplo n.º 39
0
def pytest_configure(config):
    """
    Configure RPReportListener for send logs.

    :param config: Config file
    :return:  None
    """
    if config.getoption('--collect-only', default=False) or \
            config.getoption('--setup-plan', default=False) or \
            not config.option.rp_enabled:
        config._reportportal_configured = False
        return

    project = config.getini('rp_project')
    endpoint = config.getini('rp_endpoint')
    uuid = getenv('RP_UUID') or config.getini('rp_uuid')
    ignore_errors = config.getini('rp_ignore_errors')
    config._reportportal_configured = all([project, endpoint, uuid])

    if config._reportportal_configured and ignore_errors:
        try:
            verify_ssl = config.getini('rp_verify_ssl')
            r = requests.get(
                '{0}/api/v1/project/{1}'.format(endpoint, project),
                headers={'Authorization': 'bearer {0}'.format(uuid)},
                verify=verify_ssl)
            r.raise_for_status()
        except requests.exceptions.RequestException as exc:
            log.exception(exc)
            config._reportportal_configured = False

    if config._reportportal_configured is False:
        return

    if not config.option.rp_launch:
        config.option.rp_launch = config.getini('rp_launch')
    if not config.option.rp_launch_description:
        config.option.rp_launch_description = config.\
            getini('rp_launch_description')

    if is_master(config):
        config.py_test_service = PyTestServiceClass()
    else:
        config.py_test_service = pickle.loads(
            config.slaveinput['py_test_service'])

    # set Pytest_Reporter and configure it
    if PYTEST_HAS_LOGGING_PLUGIN:
        # This check can go away once we support pytest >= 3.3
        log_level = get_actual_log_level(config, 'rp_log_level')
        if log_level is None:
            log_level = logging.NOTSET
    else:
        log_level = logging.NOTSET

    config._reporter = RPReportListener(config.py_test_service,
                                        log_level=log_level,
                                        endpoint=endpoint)

    if hasattr(config, '_reporter'):
        config.pluginmanager.register(config._reporter)
Exemplo n.º 40
0
def test_function_with_restricted_object():
    deserialized = dill.loads(dill.dumps(restricted_func, recurse=True))
Exemplo n.º 41
0
def run_dill_encode(payload):
    fun,args=dill.loads(payload)
    return fun(*args)
Exemplo n.º 42
0
    # plt.show()
    pred = net.predict(x)[0]
    return x[0], pred


if __name__ == '__main__':
    inp_path = '/home/vidooly/ml/projects/user/vikas/kiss_classifier/data/kiss_data_new_aug/'
    out_dir = '/home/vidooly/ml/projects/user/vikas/kiss_classifier/data/kiss_data_new_aug1/'
    cls_list = ['adult', 'normal']

    for i in range(11):
        os.makedirs(out_dir + 'safe/' + str(i), exist_ok=True)
        os.makedirs(out_dir + 'unsafe/' + str(i), exist_ok=True)
        custom_object = {
            'binary_focal_loss_fixed':
            dill.loads(dill.dumps(binary_focal_loss(gamma=2., alpha=.25))),
            'categorical_focal_loss_fixed':
            dill.loads(dill.dumps(categorical_focal_loss(gamma=2.,
                                                         alpha=.25))),
            'categorical_focal_loss':
            categorical_focal_loss,
            'binary_focal_loss':
            binary_focal_loss
        }

    # load the trained model
    net = load_model(
        '/home/vidooly/ml/projects/user/vikas/kiss_classifier/model/best_path_merge_foc2.hdf5',
        custom_objects=custom_object)

    j = 0
Exemplo n.º 43
0
    def test_pickle(self):
        import sys
        if sys.version_info < (3, 4):
            import dill as pickle
        else:
            import pickle

        states = [
            'A', 'B', {
                'name': 'C',
                'children':
                ['1', '2', {
                    'name': '3',
                    'children': ['a', 'b', 'c']
                }]
            }, 'D', 'E', 'F'
        ]
        transitions = [{
            'trigger': 'walk',
            'source': 'A',
            'dest': 'B'
        }, {
            'trigger': 'run',
            'source': 'B',
            'dest': 'C'
        }, {
            'trigger': 'sprint',
            'source': 'C',
            'dest': 'D'
        }]
        m = self.stuff.machine_cls(states=states,
                                   transitions=transitions,
                                   initial='A')
        m.heavy_processing = heavy_processing
        m.add_transition('forward', 'A', 'B', before='heavy_processing')

        # # go to non initial state B
        m.to_B()

        # pickle Stuff model
        dump = pickle.dumps(m)
        self.assertIsNotNone(dump)
        m2 = pickle.loads(dump)
        self.assertTrue(m2.is_B())
        m2.to_C_3_a()
        m2.to_C_3_b()
        # check if machines of stuff and stuff2 are truly separated
        m2.to_A()
        m.to_C()
        self.assertTrue(m2.is_A())
        thread = Thread(target=m2.forward)
        thread.start()
        # give thread some time to start
        time.sleep(0.01)
        # both objects should be in different states
        # and also not share locks
        begin = time.time()
        # stuff should not be locked and execute fast
        self.assertTrue(m.is_C())
        fast = time.time()
        # stuff2 should be locked and take about 1 second
        # to be executed
        self.assertTrue(m2.is_B())
        blocked = time.time()
        self.assertAlmostEqual(fast - begin, 0, delta=0.1)
        self.assertAlmostEqual(blocked - begin, 1, delta=0.1)
Exemplo n.º 44
0
    signal.signal(signal.SIGTERM, sighandler)
    signal.signal(signal.SIGABRT, sighandler)
    signal.signal(signal.SIGALRM, sighandler)

    # Start Telegram
    logger.info("Starting Telegram bot ...")
    UPDATER.start_polling()

    # Loop for threading
    # while ZENZ_RUNNING and NESTSS.STATUS != -2:
    #    time.sleep(1)
    logger.info("Entering ZENZ/NEST threading loop")
    while ZENZ_RUNNING:
        if neststatus == 1:
            try:
                nestok = dill.loads(channel.receive())
            except Exception as e:
                # something awful has happened, kill nestsse
                nestok = "OK"
                neststatus = -1  # never try again
            if nestok == "NOOK":
                i = 1
                neststatus = -1
                while i < 5 and neststatus == -1:
                    gateway.exit()
                    logger.info(
                        "Nest connection down, restarting via execnet ...")
                    time.sleep(3)
                    gateway, channel, neststatus = start_nest_execnet(
                        benv, bstr)
                    i += 1
Exemplo n.º 45
0
        self.trials = trials
        if async is None:
            self. async = trials. async
        else:
            self. async = async
        self.poll_interval_secs = poll_interval_secs
        self.max_queue_len = max_queue_len
        self.max_evals = max_evals
        self.rstate = rstate

        if self. async:
            if 'FMinIter_Domain' in trials.attachments:
                logger.warn('over-writing old domain trials attachment')
            msg = pickle.dumps(domain, protocol=self.cPickle_protocol)
            # -- sanity check for unpickling
            pickle.loads(msg)
            trials.attachments['FMinIter_Domain'] = msg

    def serial_evaluate(self, N=-1):
        for trial in self.trials._dynamic_trials:
            if trial['state'] == base.JOB_STATE_NEW:
                trial['state'] == base.JOB_STATE_RUNNING
                now = coarse_utcnow()
                trial['book_time'] = now
                trial['refresh_time'] = now
                spec = base.spec_from_misc(trial['misc'])
                ctrl = base.Ctrl(self.trials, current_trial=trial)
                try:
                    result = self.domain.evaluate(spec, ctrl)
                except Exception, e:
                    logger.info('job exception: %s' % str(e))
Exemplo n.º 46
0
def run_dill_encoded(what):
    """Use dill as replacement for pickle to enable multiprocessing on instance methods"""

    fun, args = dill.loads(what)
    return fun(*args)
Exemplo n.º 47
0
    def assertFeatureTest(self, fdict, test, feature, shape, dtype):
        """Test that encode=>decoding of a value works correctly."""
        # test feature.encode_example can be pickled and unpickled for beam.
        dill.loads(dill.dumps(feature.encode_example))

        input_value = {'inner': test.value}

        if test.raise_cls is not None:
            with self._subTest('raise'):
                if not test.raise_msg:
                    raise ValueError(
                        'test.raise_msg should be set with {} for test {}'.
                        format(test.raise_cls, type(feature)))
                with self.assertRaisesWithPredicateMatch(
                        test.raise_cls, test.raise_msg):
                    features_encode_decode(fdict,
                                           input_value,
                                           decoders=test.decoders)
        else:
            # Test the serialization only
            if test.expected_serialized is not None:
                with self._subTest('out_serialize'):
                    self.assertEqual(
                        test.expected_serialized,
                        feature.encode_example(test.value),
                    )

            # Test serialization + decoding from disk
            with self._subTest('out'):
                out_tensor, out_numpy = features_encode_decode(
                    fdict,
                    input_value,
                    decoders={'inner': test.decoders},
                )
                out_tensor = out_tensor['inner']
                out_numpy = out_numpy['inner']

                # Assert the returned type match the expected one
                with self._subTest('dtype'):

                    def _get_dtype(s):
                        if isinstance(s, tf.data.Dataset):
                            return tf.nest.map_structure(
                                _get_dtype, s.element_spec)
                        else:
                            return s.dtype

                    out_dtypes = tf.nest.map_structure(_get_dtype, out_tensor)
                    self.assertEqual(out_dtypes, test.dtype or feature.dtype)
                with self._subTest('shape'):
                    # For shape, because (None, 3) match with (5, 3), we use
                    # tf.TensorShape.assert_is_compatible_with on each of the elements
                    expected_shape = feature.shape if test.shape is None else test.shape

                    def _get_shape(s):
                        if isinstance(s, tf.data.Dataset):
                            return utils.map_nested(_get_shape, s.element_spec)
                        else:
                            return s.shape

                    out_shapes = utils.map_nested(_get_shape, out_tensor)

                    shapes_tuple = utils.zip_nested(out_shapes, expected_shape)
                    utils.map_nested(
                        lambda x: x[0].assert_is_compatible_with(x[1]),
                        shapes_tuple)

                # Assert value
                with self._subTest('out_value'):
                    # Eventually construct the tf.RaggedTensor
                    expected = tf.nest.map_structure(
                        lambda t: t.build()
                        if isinstance(t, RaggedConstant) else t,
                        test.expected,
                    )
                    self.assertAllEqualNested(out_numpy,
                                              expected,
                                              atol=test.atol)

                # Assert the HTML representation works
                if not test.decoders:
                    with self._subTest('repr'):
                        self._test_repr(feature, out_numpy)
Exemplo n.º 48
0
    def __getstate__(self):
        bar[0] = bar[0] + 1
        return {}

    def __setstate__(self, data):
        pass


f = Foo()
from dill import dumps, loads
dumps(f)
dumps(lambda: f, recurse=False)  # doesn't call __getstate__
dumps(lambda: f, recurse=True)  # calls __getstate__
assert bar[0] == 2

#97 serialize lambdas in test files
from math import sin, pi


def sinc(x):
    return sin(x) / x


settings['recurse'] = True
_sinc = dumps(sinc)
del sin
sinc_ = loads(_sinc)  # no NameError... pickling preserves 'sin'
res = sinc_(1)
from math import sin
assert sinc(1) == res
Exemplo n.º 49
0
 def __setstate__(self, vals):
     vals["logp"] = dill.loads(vals["logp"])
     self.__dict__ = vals
Exemplo n.º 50
0
    def _onDepDetectorFinished(self):
        logging.debug(
            'dependency detection in progress (helper-process finished)')

        self._depDetectorData.flush()

        base64payload = self._depDetectorData.getvalue()
        base64payloadSize = len(base64payload)
        base64payloadType = type(base64payload)

        logging.debug('base64payload type: %s', base64payloadType)
        logging.debug('base64payload size: %d', base64payloadSize)

        if base64payloadSize == 0:
            logging.debug('no dependency data received')
            return

        if six.PY2:
            if not Any.isInstance(base64payload, unicode):
                logging.debug('received dependency data of unexpected type')
                logging.debug(
                    '(this could come from a ~/.bashrc which prints text)')

                return

        else:
            if not Any.isInstance(base64payload, bytes):
                logging.debug('received dependency data of unexpected type')
                logging.debug(
                    '(this could come from a ~/.bashrc which prints text)')

                return

        dillPayload = base64.b64decode(base64payload)
        dillPayloadSize = len(dillPayload)
        dillPayloadType = type(dillPayload)

        logging.debug('dillPayload type: %s', dillPayloadType)
        logging.debug('dillPayload size: %d', dillPayloadSize)

        data = dill.loads(dillPayload)
        Any.requireIsDictNonEmpty(data)

        Any.requireIsInstance(data['bstpkg_src'], BSTPackage.BSTSourcePackage)
        Any.requireIsInstance(data['bstpkg_global'],
                              BSTPackage.BSTGloballyInstalledPackage)
        Any.requireIsDict(data['installStatus'])
        Any.requireIsDict(data['installStatusLocal'])
        Any.requireIsDict(data['installStatusProxy'])
        Any.requireIsDict(data['installStatusGlobal'])

        self._bstpkg_src.depSet = data['bstpkg_src'].depSet
        self._bstpkg_src.depTree = data['bstpkg_src'].depTree
        self._bstpkg_global = data['bstpkg_global']

        try:
            self._bstpkg_global.open(self.getCanonicalPath())
        except AssertionError as details:
            logging.debug(details)

        self._installStatus = data['installStatus']
        self._installStatusLocal = data['installStatusLocal']
        self._installStatusProxy = data['installStatusProxy']
        self._installStatusGlobal = data['installStatusGlobal']

        logging.debug('depSet:     %s', self._bstpkg_src.depSet)
        logging.debug('depTree:    %s', self._bstpkg_src.depTree)
        logging.debug('revDepSet:  %s', self._bstpkg_global.revDepSet)
        logging.debug('revDepTree: %s', self._bstpkg_global.revDepTree)

        self.depsDetected.emit(True)

        # retrieving direct dependencies should work, consider an error if not

        try:
            Any.requireIsSet(self._bstpkg_src.depSet)
            Any.requireIsList(self._bstpkg_src.depTree)
        except AssertionError:
            self.depsDetected.emit(False)
            logging.error('unable to retrieve dependencies')

        # while for reverse dependencies it is significant if the package is
        # installed, yet

        if self._bstpkg_global.isInstalled():
            try:
                Any.requireIsSet(self._bstpkg_global.revDepSet)
                Any.requireIsList(self._bstpkg_global.revDepTree)
            except AssertionError:
                logging.error('unable to retrieve reverse dependencies')
        else:
            logging.debug('not globally installed --> no reverse dependencies')

        logging.debug('dependency detection finished')
Exemplo n.º 51
0
 def _in_parallel(circuit, pm_dill=None):
     """ Used by _run_several_circuits. """
     running_passmanager = dill.loads(pm_dill)._create_running_passmanager()
     result = running_passmanager.run(circuit)
     return result
Exemplo n.º 52
0
def submit(user):
    """Process user request to submit feature.

    Extracts 'database', 'problem_id', 'code', and 'description' from POST
    body.
    """

    try:
        database    = request.form["database"]
        problem_id  = request.form["problem_id"]
        feature_dill= request.form["feature_dill"]
        code        = request.form["code"]
        description = request.form["description"]
    except Exception:
        app.logger.exception("Couldn't read parameters from form.")
        return EvaluationResponse(
            status_code = EvaluationResponse.STATUS_CODE_BAD_REQUEST
        )
    app.logger.debug("Read parameters from form.")

    # preprocessing
    # - look up the problem in the databasse
    # - look up the user in the database
    # - compute the md5 hash of the feature code
    # - convert the feature code into a function
    orm = ORMManager(database, admin=True)
    with orm.session_scope() as session:
        try:
            problem_obj = session.query(Problem)\
                    .filter(Problem.id == problem_id).one()
        except (NoResultFound, MultipleResultsFound) as e:
            app.logger.exception("Couldn't access problem (id '{}') from db"
                    .format(problem_id))
            return EvaluationResponse(
                status_code = EvaluationResponse.STATUS_CODE_BAD_REQUEST
            )
        except Exception:
            app.logger.exception(
                    "Unexpected issue accessing problem (id '{}') from db"
                    .format(problem_id))
            return EvaluationResponse(
                status_code = EvaluationResponse.STATUS_CODE_SERVER_ERROR
            )

        app.logger.debug("Accessed problem (id '{}') from db"
                .format(problem_id))

        user_name = user["name"]
        try:
            user_obj = session.query(User).filter(User.name == user_name).one()
        except (NoResultFound, MultipleResultsFound) as e:
            app.logger.exception("Couldn't access user (name '{}') from db"
                    .format(user_name))
            return EvaluationResponse(
                status_code = EvaluationResponse.STATUS_CODE_BAD_REQUEST
            )
        app.logger.debug("Accessed user (name '{}') from db".format(user_name))

        md5 = myhash(code)
        app.logger.debug("Computed feature hash.")

        evaluator = EvaluatorServer(problem_id, user_name, orm)
        try:
            is_registered = evaluator.check_if_registered(code)
            if is_registered:
                app.logger.debug("feature already registered.")
                return EvaluationResponse(
                    status_code = EvaluationResponse.STATUS_CODE_DUPLICATE_FEATURE
                )
        except Exception:
            app.logger.exception("Unexpected error checking if feature is "
                                 "registered")
            return EvaluationResponse(
                status_code = EvaluationResponse.STATUS_CODE_SERVER_ERROR
            )
        app.logger.debug("Confirmed that feature is not already registered")

        try:
            feature = dill.loads(unquote_to_bytes(feature_dill))
        except Exception:
            app.logger.exception("Couldn't extract function (code '{}')"
                    .format(code))
            return EvaluationResponse(
                    status_code = EvaluationResponse.STATUS_CODE_BAD_FEATURE
            )
        app.logger.debug("Extracted function.")

        # processing
        # - compute the CV score
        # - compute any other metrics
        try:
            metrics = evaluator.evaluate(feature)
            # TODO expand schema
        except ValueError:
            app.logger.exception("Couldn't evaluate feature (code '{}')"
                    .format(code))
            # feature is invalid
            return EvaluationResponse(
                status_code = EvaluationResponse.STATUS_CODE_BAD_FEATURE
            )
        except Exception:
            app.logger.exception(
                    "Unexpected error evaluating feature (code '{}')"
                    .format(code))
            return EvaluationResponse(
                status_code = EvaluationResponse.STATUS_CODE_SERVER_ERROR
            )
        app.logger.debug("Evaluated feature.")

        try:
            # write to db
            feature_obj = Feature(
                description         = description,
                feature_dill_quoted = feature_dill,
                code                = code,
                md5                 = md5,
                user                = user_obj,
                problem             = problem_obj
            )
            session.add(feature_obj)
            for metric in metrics:
                metric_db = metric.convert(kind="db")
                metric_obj = Metric(
                    feature = feature_obj,
                    name    = metric_db["name"],
                    scoring = metric_db["scoring"],
                    value   = metric_db["value"]
                )
                session.add(metric_obj)
        except Exception:
            app.logger.exception("Unexpected error inserting into db")
            return EvaluationResponse(
                status_code = EvaluationResponse.STATUS_CODE_DB_ERROR
            )
        app.logger.debug("Inserted into db.")

        # post to forum
        problem_name = problem_obj.name
        if is_positive_env(os.environ.get("USE_DISCOURSE")) and \
            problem_name != DEMO_PROBLEM_NAME:
            try:
                topic_obj = DiscourseFeatureTopic(feature_obj, metrics)
                topic_url = topic_obj.post_feature()
                app.logger.debug("Posted to forum")
            except Exception:
                topic_url = ""
                app.logger.exception("Unexpected error posting to forum")
        else:
            topic_url = ""

    # return
    # - status code
    # - metrics dict
    return EvaluationResponse(
        status_code=EvaluationResponse.STATUS_CODE_OKAY,
        metrics=metrics,
        topic_url=topic_url,
    )
Exemplo n.º 53
0
def run_dill_encoded(what):
    fun, args = dill.loads(what)
    return fun(*args)
Exemplo n.º 54
0
 def __setstate__(self, state):
     (self.datapipe, dill_function, self.args, self.kwargs) = state
     if DILL_AVAILABLE:
         self.fn = dill.loads(dill_function)  # type: ignore[assignment]
     else:
         self.fn = dill_function  # type: ignore[assignment]
Exemplo n.º 55
0
 def _in_parallel(circuit, pm_dill=None) -> QuantumCircuit:
     """Task used by the parallel map tools from ``_run_several_circuits``."""
     running_passmanager = dill.loads(pm_dill)._create_running_passmanager()
     result = running_passmanager.run(circuit)
     return result
Exemplo n.º 56
0
def emulate(conn_mat, scaling, n_gpus, steps, use_mps, cache_file='cache.db'):
    """
    Benchmark inter-LPU communication throughput.

    Each LPU is configured to use a different local GPU.

    Parameters
    ----------
    conn_mat : numpy.ndarray
        Square array containing numbers of directed spiking port connections 
        between LPUs (which correspond to the row and column indices). 
    scaling : int
        Scaling factor; multiply all connection numbers by this value.
    n_gpus : int
        Number of GPUs over which to partition the emulation.
    steps : int
        Number of steps to execute.
    use_mps : bool
        Use Multi-Process Service if True.

    Returns
    -------
    average_throughput, total_throughput : float
        Average per-step and total received data throughput in bytes/seconds.
    exec_time : float
        Execution time in seconds.
    """

    # Time everything starting with manager initialization:
    start_all = time.time()

    # Set up manager:
    man = MyManager(use_mps)

    # Generate selectors for configuring modules and patterns:
    mod_sels, pat_sels = gen_sels(conn_mat, scaling)

    # Partition nodes in connectivity matrix:
    part_map = partition(conn_mat, n_gpus)

    # Set up modules such that those in each partition use that partition's GPU:
    ranks = set(
        [rank for rank in itertools.chain.from_iterable(part_map.values())])
    rank_to_gpu_map = {rank: gpu for gpu in part_map for rank in part_map[gpu]}
    for i in ranks:
        lpu_i = 'lpu%s' % i
        sel, sel_in, sel_out, sel_gpot, sel_spike = mod_sels[lpu_i]
        man.add(MyModule,
                lpu_i,
                sel,
                sel_in,
                sel_out,
                sel_gpot,
                sel_spike,
                None,
                None, ['interface', 'io', 'type'],
                CTRL_TAG,
                GPOT_TAG,
                SPIKE_TAG,
                device=rank_to_gpu_map[i],
                time_sync=True)

    # Set up connections between module pairs:
    env = lmdb.open(cache_file, map_size=10**10)
    with env.begin() as txn:
        data = txn.get('routing_table')
    if data is not None:
        man.log_info('loading cached routing table')
        routing_table = dill.loads(data)

        # Don't replace man.routing_table outright because its reference is
        # already in the dict of named args to transmit to the child MPI process:
        for c in routing_table.connections:
            man.routing_table[c] = routing_table[c]
    else:
        man.log_info('no cached routing table found - generating')
        for lpu_i, lpu_j in pat_sels.keys():
            sel_from, sel_to, sel_in_i, sel_out_i, sel_gpot_i, sel_spike_i, \
                sel_in_j, sel_out_j, sel_gpot_j, sel_spike_j = pat_sels[(lpu_i, lpu_j)]
            pat = Pattern.from_concat(sel_from,
                                      sel_to,
                                      from_sel=sel_from,
                                      to_sel=sel_to,
                                      data=1,
                                      validate=False)
            pat.interface[sel_in_i, 'interface', 'io'] = [0, 'in']
            pat.interface[sel_out_i, 'interface', 'io'] = [0, 'out']
            pat.interface[sel_gpot_i, 'interface', 'type'] = [0, 'gpot']
            pat.interface[sel_spike_i, 'interface', 'type'] = [0, 'spike']
            pat.interface[sel_in_j, 'interface', 'io'] = [1, 'in']
            pat.interface[sel_out_j, 'interface', 'io'] = [1, 'out']
            pat.interface[sel_gpot_j, 'interface', 'type'] = [1, 'gpot']
            pat.interface[sel_spike_j, 'interface', 'type'] = [1, 'spike']
            man.connect(lpu_i, lpu_j, pat, 0, 1, compat_check=False)
        with env.begin(write=True) as txn:
            txn.put('routing_table', dill.dumps(man.routing_table))

    man.spawn(part_map)
    start_main = time.time()
    man.start(steps)
    man.wait()
    stop_main = time.time()
    return man.average_step_sync_time, (time.time()-start_all), (stop_main-start_main), \
        (man.stop_time-man.start_time)
Exemplo n.º 57
0
#TODO: Bind to current IP instead of localhost, localhost only binds in loopback ip.
ADDR = ('192.168.1.19', PORT)
wait_sock.bind(ADDR)
#TODO: Find proper way to write to file AND DELETE FROM FILE
#with open('clients.txt','a+') as f:
#    f.write(pickle.dumps(ADDR) + '\r\n')
print 'Client waiting on', ADDR
wait_sock.listen(1)
s, server_addr = wait_sock.accept()
print 'Server connected from:', server_addr

#BUFFSIZE = 1024*1000 #TODO: Fix receiving of large data
BUFFSIZE = 10000000
# Receiving the function (Count in case of this POC) from the server
func_str = s.recv(BUFFSIZE)
func = dill.loads(func_str)
#print func_str
#print 'Testing func:',func('120253482')
s.send('Received function')
#Receiving the actual data we need to run
print 'Waiting for data'
str_data = s.recv(BUFFSIZE)
print str_data
data_len = int(str_data[:10])
print 'Data len:', data_len
str_data = str_data[10:]
while len(str_data) < data_len:
    data = s.recv(BUFFSIZE)
    str_data += data
    print 'recv loop'
#print 'Data received. Length:', len(str_data)
Exemplo n.º 58
0
def dill_apply_func(sfunc, sargs, skwds):
    func = dill.loads(sfunc)
    args = dill.loads(sargs)
    kwds = dill.loads(skwds)
    return func(*args, **kwds)
Exemplo n.º 59
0

class basic2:
    pass


if __name__ == '__main__':
    x = 5
    y = 1

    # pickled basic stuff
    pa = pickle.dumps(a)
    pmath = pickle.dumps(math)  #XXX: FAILS in pickle
    pmap = pickle.dumps(map)
    # ...
    la = pickle.loads(pa)
    lmath = pickle.loads(pmath)
    lmap = pickle.loads(pmap)
    assert list(map(math.sin, a)) == list(lmap(lmath.sin, la))

    # pickled basic class stuff
    pbasic2 = pickle.dumps(basic2)
    _pbasic2 = pickle.loads(pbasic2)()
    pbasic = pickle.dumps(basic)
    _pbasic = pickle.loads(pbasic)()

    # pickled c2adder
    pc2adder = pickle.dumps(c2adder)
    pc2add5 = pickle.loads(pc2adder)(x)
    assert pc2add5(y) == x + y
Exemplo n.º 60
0
worker_socket.connect((ip,port))

worker_length=f"{len(WORKER_INDEN):<{HEADER_LENGTH}}".encode("utf-8")
worker_socket.send(worker_length+WORKER_INDEN)
Entries={}

while True:
    try:
        print("Not Received")
        message_length=int(worker_socket.recv(HEADER_LENGTH).strip().decode("utf-8"))
        message=worker_socket.recv(message_length).decode("utf-8")
        if(message=="ADD"):
            message_length=int(worker_socket.recv(HEADER_LENGTH).strip().decode("utf-8"))
            message=worker_socket.recv(message_length)
            
            message=pickle.loads(message)
            print("Here",type(message))
            Entries[message.return_Social_Security]=message
            print("Here1")
            message="Data added Successfully".encode("utf-8")
            message_length=f"{len(message):<{HEADER_LENGTH}}".encode("utf-8")
            worker_socket.send(message_length+message)
            pass
        elif(message=="RETRE"):
            print(message)
            messagex=pickle.dumps(Entries)
            messagex_length=f"{len(messagex):<{HEADER_LENGTH}}".encode("utf-8")
            worker_socket.send(messagex_length+messagex)
            pass
        else:
            print(message)