Exemplo n.º 1
0
 def commit(self):
     """Commit transaction
     """
     if not self.txn_active:
         return
     if self.files is not None:
         ttl = int(os.environ.get('BORG_FILES_CACHE_TTL', 20))
         with SaveFile(os.path.join(self.path, 'files'), binary=True) as fd:
             for path_hash, item in self.files.items():
                 # Only keep files seen in this backup that are older than newest mtime seen in this backup -
                 # this is to avoid issues with filesystem snapshots and mtime granularity.
                 # Also keep files from older backups that have not reached BORG_FILES_CACHE_TTL yet.
                 entry = FileCacheEntry(*msgpack.unpackb(item))
                 if entry.age == 0 and bigint_to_int(entry.mtime) < self._newest_mtime or \
                    entry.age > 0 and entry.age < ttl:
                     msgpack.pack((path_hash, entry), fd)
     self.config.set('cache', 'manifest', self.manifest.id_str)
     self.config.set('cache', 'timestamp', self.manifest.timestamp)
     self.config.set('cache', 'key_type', str(self.key.TYPE))
     self.config.set('cache', 'previous_location', self.repository._location.canonical_path())
     with SaveFile(os.path.join(self.path, 'config')) as fd:
         self.config.write(fd)
     self.chunks.write(os.path.join(self.path, 'chunks').encode('utf-8'))
     os.rename(os.path.join(self.path, 'txn.active'),
               os.path.join(self.path, 'txn.tmp'))
     shutil.rmtree(os.path.join(self.path, 'txn.tmp'))
     self.txn_active = False
Exemplo n.º 2
0
    def loop(self, csrcn):
        self.csrcn = csrcn
        self.setup_screen()

        sslctx = ssl.SSLContext(protocol=ssl.PROTOCOL_TLS)
        sslctx.load_cert_chain('certificates/client.crt', 'certificates/client.key')
        sslctx.load_verify_locations('certificates/ca.crt')

        sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
        ssl_sock = sslctx.wrap_socket(sock)
        ssl_sock.connect(('127.0.0.1', 6161))
        msgpack.pack(['CONNECT', 'brokermon'], ssl_sock)
        msgpack.pack(['LOGMON'], ssl_sock)
        unpacker = msgpack.Unpacker(raw=False)

        while True:
            data = ssl_sock.read(10000)
            if not data:
                return
            unpacker.feed(data)
            for pkt in unpacker:
                if pkt[0] == 'PING':
                    self.csrcn.insstr(8, 7, '{:15}'.format(pkt[1]))
                elif pkt[0] == 'PONG':
                    self.csrcn.insstr(9, 7, '{:15}'.format(pkt[1]))
                elif pkt[0] == 'TICKER':
                    self.csrcn.insstr(7, 7, pkt[1])
                elif pkt[0] == 'LOG':
                    self.log_print(pkt[1], pkt[2])
            self.csrcn.refresh()
Exemplo n.º 3
0
 def write_index(self):
     hints = {b'version': 2,
              b'segments': self.segments,
              b'compact': self.compact}
     transaction_id = self.io.get_segments_transaction_id()
     hints_file = os.path.join(self.path, 'hints.%d' % transaction_id)
     with open(hints_file + '.tmp', 'wb') as fd:
         msgpack.pack(hints, fd)
         fd.flush()
         os.fsync(fd.fileno())
     os.rename(hints_file + '.tmp', hints_file)
     self.index.write(os.path.join(self.path, 'index.tmp'))
     os.rename(os.path.join(self.path, 'index.tmp'),
               os.path.join(self.path, 'index.%d' % transaction_id))
     if self.append_only:
         with open(os.path.join(self.path, 'transactions'), 'a') as log:
             print('transaction %d, UTC time %s' % (transaction_id, datetime.utcnow().isoformat()), file=log)
     # Remove old auxiliary files
     current = '.%d' % transaction_id
     for name in os.listdir(self.path):
         if not name.startswith(('index.', 'hints.')):
             continue
         if name.endswith(current):
             continue
         os.unlink(os.path.join(self.path, name))
     self.index = None
Exemplo n.º 4
0
Arquivo: cache.py Projeto: caitp/inbox
def set_cache(key, val):
    path = _path_from_key(key)
    dirname = os.path.dirname(path)
    mkdirp(dirname)
    log.info("Saving cache to {0}".format(dirname))
    with open(path, 'w') as f:
        msgpack.pack(val, f)
Exemplo n.º 5
0
 def commit(self):
     """Commit transaction
     """
     if not self.txn_active:
         return
     self.security_manager.save(self.manifest, self.key)
     pi = ProgressIndicatorMessage(msgid='cache.commit')
     if self.files is not None:
         if self._newest_cmtime is None:
             # was never set because no files were modified/added
             self._newest_cmtime = 2 ** 63 - 1  # nanoseconds, good until y2262
         ttl = int(os.environ.get('BORG_FILES_CACHE_TTL', 20))
         pi.output('Saving files cache')
         with IntegrityCheckedFile(path=os.path.join(self.path, 'files'), write=True) as fd:
             for path_hash, item in self.files.items():
                 # Only keep files seen in this backup that are older than newest cmtime seen in this backup -
                 # this is to avoid issues with filesystem snapshots and cmtime granularity.
                 # Also keep files from older backups that have not reached BORG_FILES_CACHE_TTL yet.
                 entry = FileCacheEntry(*msgpack.unpackb(item))
                 if entry.age == 0 and bigint_to_int(entry.cmtime) < self._newest_cmtime or \
                    entry.age > 0 and entry.age < ttl:
                     msgpack.pack((path_hash, entry), fd)
         self.cache_config.integrity['files'] = fd.integrity_data
     pi.output('Saving chunks cache')
     with IntegrityCheckedFile(path=os.path.join(self.path, 'chunks'), write=True) as fd:
         self.chunks.write(fd)
     self.cache_config.integrity['chunks'] = fd.integrity_data
     pi.output('Saving cache config')
     self.cache_config.save(self.manifest, self.key)
     os.rename(os.path.join(self.path, 'txn.active'),
               os.path.join(self.path, 'txn.tmp'))
     shutil.rmtree(os.path.join(self.path, 'txn.tmp'))
     self.txn_active = False
     pi.finish()
Exemplo n.º 6
0
def QueueSender(reader: asyncio, writer: asyncio.StreamWriter, queue_name: str):
    """
    A coroutine for pulling items from the Queue to the streams.
    """
    client = writer.get_extra_info("peername")
    sclient = ':'.join(str(_) for _ in client)
    while True:
        try:
            data = yield from reader.read(65536)
        except ConnectionResetError:
            rlogger.info("Client {} closed connection".format(sclient))
            return
        if not data:
            slogger.info("Client {} closed connection".format(sclient))
            return
        # Unpack data
        try:
            sub_data = msgpack.unpackb(data, encoding='utf-8')
        except (msgpack.UnpackException, ValueError) as e:
            slogger.error("Recieved non-msgpack pull from {}".format(sclient))
            continue
        action = sub_data.get("action", -1)
        if not action == 1:
            slogger.error("Recieved non-pull action on pull channel from client (action: {})"
                          .format(sclient, action))
            continue
        queue = queues[queue_name][1]
        assert isinstance(queue, asyncio.Queue)
        data = yield from queue.get()
        slogger.debug("Packing data {} for queue {}".format(data[1], queue_name))
        response = {"status": 0, "data": data[1], "msgnum": data[0]}
        msgpack.pack(response, writer)
Exemplo n.º 7
0
Arquivo: safe.py Projeto: bwesterb/pol
    def store_to_stream(self, stream):
        """ Stores the Safe to `stream'.

            This is done automatically if opened with `open'. """
        start_time = time.time()
        l.debug("Packing ...")
        stream.write(SAFE_MAGIC)
        msgpack.pack(self.data, stream)
        l.debug(" packed in %.2fs", time.time() - start_time)
Exemplo n.º 8
0
def pack(response, io):
    if isinstance(response, types.StringTypes):
        io.write(response)
    elif isinstance(response, dict):
        msgpack.pack(response, io)
    elif isinstance(response, types.GeneratorType) or isinstance(response, Iterable):
        [pack(chunk, io) for chunk in response]
    elif response is not None:
        msgpack.pack(response, io)
def create_random_data(dataset, n_rep=300):
    import shazoo_exps as se
    data = {}
    g_adj, g_ew, gold_signs, phi = se.load_real_graph(dataset)
    g_adj = {int(u): {int(v) for v in adj} for u, adj in g_adj.items()}
    n = len(g_adj)
    nodes = list((range(n)))
    gold = np.array([gold_signs[u] for u in nodes])
    inv_ew = {(int(e[0]), int(e[1])): 1/w for e, w in se.sz.iteritems(g_ew)}
    g_ew = {(int(e[0]), int(e[1])): w for e, w in se.sz.iteritems(g_ew)}

    rst = []
    for _ in range(n_rep):
        se.sz.GRAPH, se.sz.EWEIGHTS = g_adj, g_ew
        se.sz.get_rst(None)
        adj, ew = se.sz.TREE_ADJ, se.sz.TWEIGHTS
        rst.append(tuple(set(ew)))
    data['rst'] = tuple(rst)

    trees = []
    for i in range(n_rep):
        adj, ew = se.get_mst(inv_ew)
        trees.append(tuple(set(ew)))
        if i == 2:
            res = []
            for j, s in enumerate(trees):
                for t in trees[j+1:]:
                    res.append(set(s) == set(t))
            if any(res):
                break
    data['mst'] = tuple(trees)

    nodes_order = []
    for _ in range(n_rep):
        random.shuffle(nodes)
        nodes_order.append(tuple(nodes))
    data['nodes_order'] = tuple(nodes_order)

    batch_order = []
    for ts in train_size:
        level = []
        max_index = int(ts*n)
        indices = list(range(max_index))
        for _ in range(n_rep):
            random.shuffle(indices)
            level.append(tuple(indices))
        batch_order.append(tuple(level))
    data['batch_order'] = tuple(batch_order)

    ones = np.ones(n, dtype=int)
    changed_signs = []
    for ts in pertubations:
        changed_signs.append(create_random_perturbations(ts, ones, nodes, gold, n_rep))
    data['changed_signs'] = tuple(changed_signs)

    with open('{}.random'.format(dataset), 'w+b') as outfile:
        msgpack.pack(data, outfile)
Exemplo n.º 10
0
def generate_msgpack():
    """
    Generate 'postcodes_X.mp' msg pack files, this is a utility and shouldn't be required as files are included in the
    repo.

    To use it you need to download a full list of uk postcodes csv file from
    from http://www.freemaptools.com/download-uk-postcode-lat-lng.htm
    and http://www.doogal.co.uk/UKPostcodes.php
    """
    # loaded locally as only required here
    from math import radians, sin, cos, sqrt, asin
    import csv

    def haversine(lat1, lon1, lat2, lon2):
        R = 6372.8 * 1000 # Earth radius in meters
        dLat = radians(lat2 - lat1)
        dLon = radians(lon2 - lon1)
        lat1 = radians(lat1)
        lat2 = radians(lat2)
        a = sin(dLat/2)**2 + cos(lat1)*cos(lat2)*sin(dLon/2)**2
        c = 2*asin(sqrt(a))
        return R * c

    all_pcs = []
    with open('freemaptools_postcodes.csv', 'rb') as f:
        csv_reader = csv.reader(f)
        next(csv_reader)  # heading
        for i, row in enumerate(csv_reader):
            pc = row[1]
            pc = pc.lower().replace(' ', '')
            lat = float(row[2])
            lng = float(row[3])
            all_pcs.append((pc, lat, lng))
    with open('doogle_postcodes.csv', 'rb') as f:
        csv_reader = csv.DictReader(f)
        for i, row in enumerate(csv_reader):
            if row['Terminated']:
                continue
            pc = row['Postcode'].lower().replace(' ', '')
            lat = float(row['Latitude'])
            lng = float(row['Longitude'])
            all_pcs.append((pc, lat, lng))

    pcs1 = {}
    pcs2 = {}
    for pc, lat, lng in all_pcs:
        error = haversine(lat, lng, round(lat, 3), round(lng, 3))
        assert error < 100
        if pc[0] in FILE_1_PREF:
            pcs1[pc] = '%0.3f %0.3f' % (lat - 49.5, lng + 8.5)
        else:
            pcs2[pc] = '%0.3f %0.3f' % (lat - 49.5, lng + 8.5)
    msgpack.pack(pcs1, open(PC_FILE1, 'wb'))
    msgpack.pack(pcs2, open(PC_FILE2, 'wb'))
    print 'saved %d and %d postcodes to %s and %s respectively' % (len(pcs1), len(pcs2), PC_FILE1, PC_FILE2)
Exemplo n.º 11
0
    def dump(self, file_path, obj, encoding="utf-8"):
        path = self.cache_path / file_path
        path.parent.mkdir(parents=True, exist_ok=True)

        try:
            with open(path.with_suffix(".msgpack"), "wb") as out_file:
                msgpack.pack(obj, out_file, encoding=encoding)
        except TypeError:
            os.remove(path.with_suffix(".msgpack"))
            with open(path.with_suffix(".pkl"), "wb") as out_file:
                pickle.dump(obj, out_file, protocol=pickle.HIGHEST_PROTOCOL)
Exemplo n.º 12
0
 def send_multipart(self, target, *args):
     if type(target) is str:
         target = self.servers[target]
     #safe_args = [(a.encode('utf-8') if type(a) is str else a) for a in args]
     self.set_indent('{:12} <- {:6} '.format(target.ident, args[0]))
     if (args[0] == 'PING'):
         if self.mon:
             msgpack.pack(['PING', target.ident], self.mon.writer)
     else:
         self.log(target.ident, repr(args))
     msgpack.pack(args, target.writer)
Exemplo n.º 13
0
 def write(self, out):
   if self.version != 1:
     msgpack.pack(self.version, out)
   msgpack.pack(self.klasses, out)
   msgpack.pack(self.stores, out)
   doc = self.doc_packed
   msgpack.pack(len(doc), out)
   out.write(doc)
   for insts in self.instances_packed:
     msgpack.pack(len(insts), out)
     out.write(insts)
Exemplo n.º 14
0
 def _handle_cache(self, cache_dir, minresults, overwrite, scores_file):
     cache_name = "{}.min{}.msg".format(Path(scores_file).name, minresults)
     cache_file = Path(cache_dir) / cache_name
     if cache_file.exists() and not overwrite:
         self.log.info(str(cache_file) + " cached - loading cache...")
         self.rankings = msgpack.load(open(cache_file, "rb"))
     else:
         self.log.info(
             str(cache_file) + " not cached - loading from scratch")
         self.rankings = self.load_rankings(scores_file)
         msgpack.pack(self.rankings, open(cache_file, "wb"))
Exemplo n.º 15
0
 def send_multipart(self, target, *args):
     if type(target) is str:
         target = self.servers[target]
     #safe_args = [(a.encode('utf-8') if type(a) is str else a) for a in args]
     self.set_indent('{:12} <- {:6} '.format(target.ident, args[0]))
     if (args[0] == 'PING'):
         if self.mon:
             msgpack.pack(['PING', target.ident], self.mon.writer)
     else:
         self.log(target.ident, repr(args))
     msgpack.pack(args, target.writer)
Exemplo n.º 16
0
def save_composite_scores(scores, path):
    """
    Serialize composite scores as returned by composite_scores to a msgpack file 
    atthe given path.
    """
    canonical_scores = {
        source: (float(s0), float(s1))
        for source, (s0, s1) in scores.items()
    }
    with open(path, 'wb') as fp:
        msgpack.pack(canonical_scores, fp)
Exemplo n.º 17
0
    def to_kdxa(self, file_path):
        """
        Write the document to the kdxa format (msgpack) which can be
        used with the Kodexa platform

            >>> document.to_mdoc('my-document.kdxa')

        :param file_path: the path to the mdoc you wish to create
        """
        with open(file_path, 'wb') as outfile:
            msgpack.pack(self.to_dict(), outfile, use_bin_type=True)
Exemplo n.º 18
0
    def save_route(self):
        def json_encode(obj):
            if isinstance(obj, Point):
                return (obj.lat, obj.lng)
            if isinstance(obj, Route):
                return attr.asdict(
                    obj,
                    recurse=False,
                    filter=lambda a, v: a.name in route_route_attrs)

        with open(os.path.join(self.dir_route, 'route.pack'), 'wb') as f:
            msgpack.pack(self, f, default=json_encode)
Exemplo n.º 19
0
Arquivo: loader.py Projeto: cmpute/d3d
    def _load_metadata(self):
        meta_path = self.base_path / "metadata.msg"
        if not meta_path.exists():
            _logger.info("Creating metadata of Nuscenes dataset (%s)...",
                         self.phase)
            metadata = {}

            if self.inzip:
                for archive in self.base_path.iterdir():
                    if archive.is_dir() or archive.suffix != ".zip":
                        continue

                    with PatchedZipFile(archive,
                                        to_extract="scene/stats.json") as ar:
                        metadata[archive.stem] = json.loads(
                            ar.read("scene/stats.json"))
            else:
                for folder in self.base_path.iterdir():
                    if not folder.is_dir() or folder.name == "maps":
                        continue

                    metadata[folder.name] = json.loads(
                        (folder / "scene/stats.json").read_text())

            assert len(metadata) > 0, "The dataset folder contains no valid frame, "\
                "please check path or parameters!"
            with open(meta_path, "wb") as fout:
                msgpack.pack(metadata, fout)

        with open(meta_path, "rb") as fin:
            self._metadata = SortedDict()
            meta_json = msgpack.unpack(fin)
            for k, v in meta_json.items():
                self._metadata[k] = edict(v)

        # load category mapping for segmentation
        with open(self.base_path / "category.json") as fin:
            cat_json = json.load(fin)
        cat_dict = {}
        for item in cat_json:
            if 'index' in item:
                cat_dict[item['index']] = NuscenesObjectClass.parse(
                    item['name'])

        builtin_table = NuscenesObjectClass._get_nuscenes_id_table()
        self._rawmapping = np.empty(len(builtin_table) + 1, dtype='u4')
        self._segmapping = np.empty(len(builtin_table) + 1, dtype='u1')
        for idx, clsobj in enumerate(builtin_table):
            if idx in cat_dict:  # test against offcial definition
                assert cat_dict[
                    idx] == clsobj, "Builtin Nuscenes-lidarseg table is incorrect! Please report this bug."
            self._rawmapping[idx] = clsobj.value
            self._segmapping[idx] = clsobj.to_segmentation().value
Exemplo n.º 20
0
def save_station_areas(save_path='./data/updated_kazerne_set.msg'):
    """Save the set of areas in which there is a fire station.

    Parameters
    ----------
    save_path: str
        The path to save the msgpack file.
    """
    new_kazerne_set = list(STATION_NAME_TO_AREA.values())
    with open(save_path, 'wb') as f:
        msgpack.pack(new_kazerne_set, f)
    return new_kazerne_set
Exemplo n.º 21
0
def pack(o, stream, **kwargs):
    """
    .. versionadded:: 2018.3.4

    Wraps msgpack.pack and ensures that the passed object is unwrapped if it is
    a proxy.

    By default, this function uses the msgpack module and falls back to
    msgpack_pure, if the msgpack is not available.
    """
    # Writes to a stream, there is no return
    msgpack.pack(o, stream, **_sanitize_msgpack_kwargs(kwargs))
Exemplo n.º 22
0
 def write(self, kind: FileType) -> None:
     fn = self._path(kind)
     if kind == FileType.PICKLE:
         # serialize as TreeNode
         with open(fn, "wb") as f:
             pickle.dump(self.treenode, f, protocol=-1)
     elif kind == FileType.CSV:
         # serialize as id_dict
         with open(fn, "w") as f:
             w = csv.DictWriter(f, Node._fields)
             w.writeheader()
             for item in self.treenode.node_iter():
                 w.writerow(item._asdict())
     elif kind == FileType.MSGPACK:
         # https://msgpack-python.readthedocs.io/en/latest/api.html
         with open(fn, "wb") as f:
             # Doesn't improve speed
             # msgpack.pack(self._to_dict(), f, use_bin_type=True)
             msgpack.pack(self.to_dict_list(), f)
     elif kind == FileType.JSON:
         self._json_dump(fn, json.dump)
     elif kind == FileType.UJSON:
         self._json_dump(fn, ujson.dump)
     elif kind == FileType.SIMPLEJSON:
         # NOTE: simplejson includes key names when serializing NamedTuples
         with open(fn, "w") as f:
             if self.json_dict_list:
                 simplejson.dump(list(self.id_dict.values()), f, ensure_ascii=True)
             else:
                 simplejson.dump(self.id_dict, f, ensure_ascii=True)
     elif kind == FileType.CBOR2:
         with open(fn, "wb") as f:
             cbor2.dump(self.to_dict_list(), f)
     elif kind == FileType.CBOR:
         with open(fn, "wb") as f:
             cbor.dump(self.to_dict_list(), f)
     elif kind == FileType.RAPIDJSON:
         # https://python-rapidjson.readthedocs.io/en/latest/benchmarks.html
         # TODO: See this example for possible speed improvement - deeper integration with Node
         #  https://python-rapidjson.readthedocs.io/en/latest/encoder.html
         # NOTE: can't use id_dict - keys must be strings
         #       can't use self.id_dict.values() - not serializable
         #       list(self.id_dict.values()) produces a list of lists - no keys - very fragile
         with open(fn, "w") as f:
             if self.json_dict_list:
                 rapidjson.Encoder(number_mode=rapidjson.NM_NATIVE, ensure_ascii=False)(self.to_dict_list(), f)
             else:
                 rapidjson.Encoder(number_mode=rapidjson.NM_NATIVE, ensure_ascii=False)(list(self.id_dict.values()), f)
     elif kind == FileType.BSON:
         with open(fn, "wb") as f:
             co = CodecOptions(document_class=RawBSONDocument)
             for node in self.treenode.node_iter():
                 f.write(BSON.encode(node._asdict(), codec_options=co))
Exemplo n.º 23
0
 def Write_MSGPACK(self, data, filePath=None, fileName=None):
     if fileName == None: fileName = "randoMSGPACK"
     
     if filePath == None: filePath = self.pathObj.CurrentUserDesktopPath()
     elif filePath == 'current': filePath = self.pathObj.CurrentFileDirectory()
     elif filePath == 'Lib': self.filePath = None
     
     completeFilePath = r"{0}\{1}.msgpack".format(filePath, fileName)
     
     # Write msgpack file
     with open(completeFilePath, 'w') as writePath:
         msgpack.pack(data, writePath)        
Exemplo n.º 24
0
def save_scores(scores, path):
    """
    Serialize scores as returned by evaluate_sources to a msgpack file at 
    the given path.  
    """
    canonical_scores = {
        source: {edge: float(score)
                 for edge, score in source_scores.items()}
        for source, source_scores in scores.items()
    }
    with open(path, 'wb') as fp:
        msgpack.pack(canonical_scores, fp)
Exemplo n.º 25
0
def save_object(object_, file_path):

    def ndarrray_to_list(o, _warned=[False]): # Use a mutlable default arg to hold a fn interal temp var.
        if isinstance(o, np.ndarray):
            if not _warned[0]:
                _warned[0] = True
            return o.tolist()
        return o

    file_path = os.path.expanduser(file_path)
    with open(file_path, 'wb') as fh:
        msgpack.pack(object_, fh, use_bin_type=True,default=ndarrray_to_list)
    def get_all_images(self):
        with timing("get_all_images"):
            if not self.all_images:
                print("filtering images...")
                with timing("filtering"):
                    self.all_images = [x for x in self.get_allfiles() if x.get("magic", "error") in files_opts.image_magics]
                print("saving hashes...")
                with timing("by_hash"):
                    self.by_hash = {hashlib.sha256(x["path"].encode("utf-8", ue)).hexdigest(): x for x in self.all_images}
                with timing("check errors"):
                    for i in self.all_images:
                        if "error" not in i:
                            print(i)
                            break
                with timing("make sure hashes are up to date"):
                    for h, v in self.by_hash.items():
                        assert v["hash"] == h, f"hashes mismatch: {v}, {h}"
                        v["hash"] = h
                with timing("merge dupes"):
                    try:
                        a = list(csv.reader(open("duplicates.csv", "r")))[1:]
                    except FileNotFoundError:
                        a = []
                    groups = {}
                    for group, filename, folder, size, match in a:
                        g = groups.setdefault(group, [])
                        g.append(os.path.join(folder, filename))
                    for g in groups.values():
                        if any("/fun/app/" in q for q in g): continue
                        q = [os.path.exists(j) for j in g]
                        assert len([x for x in q if x]) == 1, f"err: {g}, {q}"
                        hashes = [hashlib.sha256(("/"+os.path.relpath(j, self.base)).encode("utf-8", ue)).hexdigest() for j in g]
                        r = [h for h, m in zip(hashes, q) if m][0]
                        f = [h for h, m in zip(hashes, q) if not m]
                        hh = [x in self.by_hash for x in hashes]
                        hhc = len([x for x in hh if x])
                        if not hhc: continue

                        img = self.by_hash[r]
                        img["other_hashes"] = f
                        for h in f:
                            self.by_hash[h] = img
                print("done")
                with timing("all_magics"):
                    all_magics = collections.Counter([x.get("magic", x.get("error", "error unknown")) for x in self.get_allfiles()])
                print(all_magics)
                print("image count:")
                print(len(self.all_images))
                with timing("write all_images.msgpack"):
                    with open(f"all_images{self.vcode}.msgpack", "wb") as writer:
                        msgpack.pack(self.all_images, writer, use_bin_type=True, unicode_errors="surrogatepass")
            return self.all_images, self.by_hash
Exemplo n.º 27
0
 def save(s,dir_path):
     """ save replay buffer to directory path dir_path"""
     dir_path = os.path.abspath(dir_path) # convert to absolute path
     try:os.mkdir(dir_path)
     except OSError:pass
     print(f"saving replay buffer to {dir_path}")
     s.obses.save(os.path.join(dir_path,"obses"))
     s.next_obses.save(os.path.join(dir_path,"next_obses"))
     s.actions.save(os.path.join(dir_path,"actions"))
     s.rewards.save(os.path.join(dir_path,"rewards"))
     s.not_dones.save(os.path.join(dir_path,"not_dones"))
     with open(os.path.join(dir_path,"header.msgpack"), 'wb') as file:
         msgpack.pack((s.capacity,s.idx,s.full), file)
Exemplo n.º 28
0
 def _do_serialize(self):
     if self.__magic__ is None or self.__fields__ is None:
         raise RuntimeError(
             "Serialization can only be performed on classes implementing "
             "__fields__ and __magic__")
     buf = io.BytesIO()
     msgpack.pack(PROTOCOL_VERSION, buf)
     msgpack.pack(self.__magic__, buf)
     for name, type_info in self.__fields__:
         value = getattr(self, name)
         type_info.validate(value, name)
         type_info.pack(value, buf)
     return buf
Exemplo n.º 29
0
    def save(self, filename):
        tfn = '%s.inprog-%d' % (filename, random.randint(1, 10000000))
        fh = open(tfn, 'wb')

        try:
            me_as_dict = self.todict()
            msgpack.pack(me_as_dict, encoding='utf-8', stream=fh)
        finally:
            fh.close()

            if os.path.exists(filename):
                os.rename(filename, '%s.bak' % filename)
            os.rename(tfn, filename)
Exemplo n.º 30
0
 def test_unknown_integrity_version(self):
     # For now an unknown integrity data version is ignored and not an error.
     integrity_path = os.path.join(self.repository.path, 'integrity.1')
     with open(integrity_path, 'r+b') as fd:
         msgpack.pack({
             # Borg only understands version 2
             b'version': 4.7,
         }, fd)
         fd.truncate()
     with self.repository:
         # No issues accessing the repository
         assert len(self.repository) == 1
         assert self.repository.get(H(0)) == b'foo'
Exemplo n.º 31
0
def _parse_mirteFile(path):
    """ Open and parses the mirteFile at <path>. """
    cache_path = os.path.join(os.path.dirname(path),
                CACHE_FILENAME_TEMPLATE % os.path.basename(path))
    if (os.path.exists(cache_path) and
                os.path.getmtime(cache_path) >= os.path.getmtime(path)):
        with open(cache_path) as f:
            return msgpack.unpack(f)
    with open(path) as f:
        ret = yaml.load(f)
    with open(cache_path, 'w') as f:
        msgpack.pack(ret, f)
    return ret
Exemplo n.º 32
0
def saveTrainingData(dataName, trainingInputs, trainingOutputs,
                     validationInputs, validationOutputs):
    if len(trainingInputs) != len(trainingOutputs):
        raise Exception("Input length mismatch with output length!")

    outFile = open(dataName, 'wb')
    valOutFile = open(dataName + ".val", 'wb')
    msgpack.pack((trainingInputs, trainingOutputs), outFile, use_bin_type=True)
    msgpack.pack((validationInputs, validationOutputs),
                 valOutFile,
                 use_bin_type=True)
    valOutFile.close()
    outFile.close()
Exemplo n.º 33
0
def send_packet(packet, dev_id):
    (length, data, timestamp) = packet
    if length > 20000:
        print "WARNING: Large packet: {0} (sniffing loop?)".format(length)
    message = {"ln": length, "ts": timestamp, "if": devs[dev_id]["eth"], "id": dev_id, "dt": data}
    clients_copy = clients
    for client in clients_copy:
        try:
            msgpack.pack(message, client)
            client.flush()
        except eventlet.green.socket.error:
            print "client disconnected (fd: {0})".format(client.fileno())
            clients.remove(client)
Exemplo n.º 34
0
def save_object(object_, file_path):

    def ndarrray_to_list(o, _warned=[False]): # Use a mutlable default arg to hold a fn interal temp var.
        if isinstance(o, np.ndarray):
            if not _warned[0]:
                logger.warning("numpy array will be serialized as list. Invoked at:\n"+''.join(tb.format_stack()))
                _warned[0] = True
            return o.tolist()
        return o

    file_path = os.path.expanduser(file_path)
    with open(file_path, 'wb') as fh:
        msgpack.pack(object_, fh, use_bin_type=True,default=ndarrray_to_list)
Exemplo n.º 35
0
 def test_unknown_integrity_version(self):
     # For now an unknown integrity data version is ignored and not an error.
     integrity_path = os.path.join(self.repository.path, 'integrity.1')
     with open(integrity_path, 'r+b') as fd:
         msgpack.pack({
             # Borg only understands version 2
             b'version': 4.7,
         }, fd)
         fd.truncate()
     with self.repository:
         # No issues accessing the repository
         assert len(self.repository) == 1
         assert self.repository.get(H(0)) == b'foo'
Exemplo n.º 36
0
def save_object(object_, file_path):

    def ndarrray_to_list(o, _warned=[False]): # Use a mutlable default arg to hold a fn interal temp var.
        if isinstance(o, np.ndarray):
            if not _warned[0]:
                logger.warning("numpy array will be serialized as list. Invoked at:\n"+''.join(tb.format_stack()))
                _warned[0] = True
            return o.tolist()
        return o

    file_path = os.path.expanduser(file_path)
    with open(file_path, 'wb') as fh:
        msgpack.pack(object_, fh, use_bin_type=True,default=ndarrray_to_list)
Exemplo n.º 37
0
 def _do_serialize(self):
     if self.__magic__ is None or self.__fields__ is None:
         raise RuntimeError(
             "Serialization can only be performed on classes implementing "
             "__fields__ and __magic__")
     buf = io.BytesIO()
     msgpack.pack(PROTOCOL_VERSION, buf)
     msgpack.pack(self.__magic__, buf)
     for name, type_info in self.__fields__:
         value = getattr(self, name)
         type_info.validate(value, name)
         type_info.pack(value, buf)
     return buf
Exemplo n.º 38
0
def compute_kb_metadata(kb_directory=Cnst.DEFAULT_KB_MULTI_DIR):
    knowledge_bases = [
        f for f in os.listdir(kb_directory) if not f.startswith('.')
    ]
    metadata_dict = {}
    for kb in knowledge_bases:
        e2id_dict, r2id_dict = load_kb_dicts(kb)
        metadata_dict[kb] = [
            len(e2id_dict) - 1,
            len(r2id_dict), MAX_ARITIES[kb]
        ]
    with open(Cnst.KB_META_MULTI_FILE_NAME, "wb") as f:
        msgpack.pack(metadata_dict, f)
Exemplo n.º 39
0
def fetch_mem_profile(request, pk):
    profile = get_object_or_404(ProfileRun, pk=pk)

    full_profile = msgpack.unpack(
        gzip.GzipFile(fileobj=profile.memory_profile))
    resampled_profile = resample_memory_profile(
        full_profile, float(request.GET.get('x0', 0)),
        float(request.GET.get('x1', 'inf')))
    resampled_profile['ntotal'] = len(full_profile)
    buf = io.BytesIO()
    msgpack.pack(resampled_profile, buf)
    buf.seek(0)
    return MsgpackResponse(buf)
Exemplo n.º 40
0
    def save(self, io):
        msgpack.pack(
            {
                "forward": self.data,
                "backward": self.back,
                "weights": [self.fw_weights, self.bw_weights]
            },
            io,
            use_bin_type=True)

        logging.info(
            "Saved {} forward and {} backward Markov nodes into the Markov database."
            .format(len(self.data), len(self.back)))
Exemplo n.º 41
0
    def hashFile(self, dir_inner_path, file_relative_path, optional=False):
        inner_path = dir_inner_path + file_relative_path

        file_size = self.site.storage.getSize(inner_path)
        # Only care about optional files >1MB
        if not optional or file_size < 1 * 1024 * 1024:
            return super(ContentManagerPlugin, self).hashFile(dir_inner_path, file_relative_path, optional)

        back = {}
        content = self.contents.get(dir_inner_path + "content.json")

        hash = None
        piecemap_relative_path = None
        piece_size = None

        # Don't re-hash if it's already in content.json
        if content and file_relative_path in content.get("files_optional", {}):
            file_node = content["files_optional"][file_relative_path]
            if file_node["size"] == file_size:
                self.log.info("- [SAME SIZE] %s" % file_relative_path)
                hash = file_node.get("sha512")
                piecemap_relative_path = file_node.get("piecemap")
                piece_size = file_node.get("piece_size")

        if not hash or not piecemap_relative_path:  # Not in content.json yet
            if file_size < 5 * 1024 * 1024:  # Don't create piecemap automatically for files smaller than 5MB
                return super(ContentManagerPlugin, self).hashFile(dir_inner_path, file_relative_path, optional)

            self.log.info("- [HASHING] %s" % file_relative_path)
            merkle_root, piece_size, piecemap_info = self.hashBigfile(self.site.storage.open(inner_path, "rb"), file_size)
            if not hash:
                hash = merkle_root

            if not piecemap_relative_path:
                file_name = helper.getFilename(file_relative_path)
                piecemap_relative_path = file_relative_path + ".piecemap.msgpack"
                piecemap_inner_path = inner_path + ".piecemap.msgpack"

                msgpack.pack({file_name: piecemap_info}, self.site.storage.open(piecemap_inner_path, "wb"))

                back.update(super(ContentManagerPlugin, self).hashFile(dir_inner_path, piecemap_relative_path, optional=True))

        piece_num = int(math.ceil(float(file_size) / piece_size))

        # Add the merkle root to hashfield
        hash_id = self.site.content_manager.hashfield.getHashId(hash)
        self.optionalDownloaded(inner_path, hash_id, file_size, own=True)
        self.site.storage.piecefields[hash].fromstring("1" * piece_num)

        back[file_relative_path] = {"sha512": hash, "size": file_size, "piecemap": piecemap_relative_path, "piece_size": piece_size}
        return back
Exemplo n.º 42
0
    def hashFile(self, dir_inner_path, file_relative_path, optional=False):
        inner_path = dir_inner_path + file_relative_path

        file_size = self.site.storage.getSize(inner_path)
        # Only care about optional files >1MB
        if not optional or file_size < 1 * 1024 * 1024:
            return super(ContentManagerPlugin, self).hashFile(dir_inner_path, file_relative_path, optional)

        back = {}
        content = self.contents.get(dir_inner_path + "content.json")

        hash = None
        piecemap_relative_path = None
        piece_size = None

        # Don't re-hash if it's already in content.json
        if content and file_relative_path in content.get("files_optional", {}):
            file_node = content["files_optional"][file_relative_path]
            if file_node["size"] == file_size:
                self.log.info("- [SAME SIZE] %s" % file_relative_path)
                hash = file_node.get("sha512")
                piecemap_relative_path = file_node.get("piecemap")
                piece_size = file_node.get("piece_size")

        if not hash or not piecemap_relative_path:  # Not in content.json yet
            if file_size < 5 * 1024 * 1024:  # Don't create piecemap automatically for files smaller than 5MB
                return super(ContentManagerPlugin, self).hashFile(dir_inner_path, file_relative_path, optional)

            self.log.info("- [HASHING] %s" % file_relative_path)
            merkle_root, piece_size, piecemap_info = self.hashBigfile(self.site.storage.open(inner_path, "rb"), file_size)
            if not hash:
                hash = merkle_root

            if not piecemap_relative_path:
                file_name = helper.getFilename(file_relative_path)
                piecemap_relative_path = file_relative_path + ".piecemap.msgpack"
                piecemap_inner_path = inner_path + ".piecemap.msgpack"

                msgpack.pack({file_name: piecemap_info}, self.site.storage.open(piecemap_inner_path, "wb"))

                back.update(super(ContentManagerPlugin, self).hashFile(dir_inner_path, piecemap_relative_path, optional=True))

        piece_num = int(math.ceil(float(file_size) / piece_size))

        # Add the merkle root to hashfield
        hash_id = self.site.content_manager.hashfield.getHashId(hash)
        self.optionalDownloaded(inner_path, hash_id, file_size, own=True)
        self.site.storage.piecefields[hash].fromstring("1" * piece_num)

        back[file_relative_path] = {"sha512": hash, "size": file_size, "piecemap": piecemap_relative_path, "piece_size": piece_size}
        return back
Exemplo n.º 43
0
def ri(args):
    """obj.readinto(buffer)"""
    global _stream
    sz = args[1]
    if sz > len(_buffer): sz = len(_buffer)
    mv = memoryview(_buffer[:sz])
    # read & update sz to actual number of bytes read
    sz = args[0].readinto(mv)
    # send actual number of bytes read
    msgpack.pack(sz, _stream)
    # send raw bytes (easier to decode by client)
    _stream.write(mv[:sz])
    # result sent aleady
    return None
Exemplo n.º 44
0
def spak(filepath: str, data: JASM) -> str:
    """

    :param filepath:
    :param data:
    :return:
    """
    try:
        filepath = filepath if "." in filepath else "{}.pak".format(filepath)
        with open(filepath, "wb") as outfile:
            pack(data, outfile)
        return filepath
    except NameError:
        raise EnvironmentError("'pip install msgpack' if you wanna use this!")
Exemplo n.º 45
0
    def save(obj, stream):
        if not isinstance(obj, dict):
            raise ValueError()

        output = {}
        for key, value in obj.items():
            if hasattr(value, '_hg_tweak_descriptor'):
                value = getattr(value, '_hg_tweak_descriptor')
            output[key] = value

        msgpack.pack(output,
                     stream,
                     use_bin_type=True,
                     default=MsgPackEncoders.encode)
Exemplo n.º 46
0
def write_ruleset(ruleset,
                  ofname,
                  imported_from=None,
                  force=False,
                  msgpacked=True,
                  pickled=False):
    basename = ofname.rsplit('.', 1)[0]
    ofname = basename + ".py"

    # don't overwrite the module we just imported by default - slows down the next import
    if (imported_from is not None and not force
            and os.path.abspath(imported_from) == os.path.abspath(ofname)):
        print("not overwriting {}".format(imported_from))

    else:
        with open(ofname, "w") as f:
            f.write("ruleset = {\n ")
            f.write(pprint.pformat(ruleset,
                                   width=144)[1:])  # [1:] skips the opening {
            f.write(
                textwrap.dedent("""
                def get_trans(lang="{lang}", fallback = False):
                    def find_lang(lname):
                        for ess in ruleset["extraStrings"]:
                            if ess['type'] == lname:
                                return ess['strings']
                        return {{}}

                    trans = find_lang(lang)
                    falltrans = find_lang("{fblang}")

                    if fallback:
                        return lambda k : trans.get(k, falltrans.get(k, k))
                    else:
                        return lambda k : trans.get(k, k)

                """.format(fblang=FALLBACK_LANG,
                           lang=ruleset['_config']['options'].get(
                               'language', FALLBACK_LANG))))
        print("\nwrote {}".format(ofname))

    if pickled:
        ofname = basename + '.pickle'
        pickle.dump(ruleset, open(ofname, "wb"))
        print("wrote {}".format(ofname))

    if msgpacked:
        ofname = basename + '.msgp'
        msgpack.pack(ruleset, open(ofname, "wb"))
        print("wrote {}".format(ofname))
Exemplo n.º 47
0
 def readinto(self, buffer):
     global _stream
     _clear_rx_buffer()
     msgpack.pack(("ri", self._ext_type, len(buffer)), _stream)
     _ready_to_read()
     # get actual number of bytes read OR handle error (if any)
     sz = msgpack.unpack(_stream, ext_hook=_ext_handler, use_list=False)
     # read data; not all versions of readinto support length argument
     mv = memoryview(buffer)
     # print("urpc client.readinto [sz]")
     _stream.readinto(mv[:sz])
     # server sends an extra None
     assert msgpack.unpack(_stream) == None, "readinto expects terminating 'None'"
     return sz
    async def main_task(self):
        # take IMU readings
        readings = {
            'accel': self.cubesat.acceleration,
            'mag': self.cubesat.magnetic,
            'gyro': self.cubesat.gyro,
        }

        # store them in our cubesat data_cache object
        self.cubesat.data_cache.update({'imu': readings})

        # print the readings with some fancy formatting
        self.debug('IMU readings (x,y,z)')
        for imu_type in self.cubesat.data_cache['imu']:
            self.debug(
                '{:>5} {}'.format(imu_type,
                                  self.cubesat.data_cache['imu'][imu_type]), 2)

        # save data to the sd card, but only if we have a proper data file
        if self.data_file is not None:
            # save our readings using msgpack
            with open(self.data_file, 'ab') as f:
                msgpack.pack(readings, f)
            # check if the file is getting bigger than we'd like
            if stat(self.data_file)[6] >= 256:  # bytes
                if SEND_DATA:
                    print('\nSend IMU data file: {}'.format(self.data_file))
                    with open(self.data_file, 'rb') as f:
                        chunk = f.read(
                            64)  # each IMU readings is 64 bytes when encoded
                        while chunk:
                            # we could send bigger chunks, radio packet can take 252 bytes
                            self.cubesat.radio1.send(chunk)
                            print(chunk)
                            chunk = f.read(64)
                    print('finished\n')
                else:
                    # print the unpacked data from the file
                    print('\nPrinting IMU data file: {}'.format(
                        self.data_file))
                    with open(self.data_file, 'rb') as f:
                        while True:
                            try:
                                print('\t', msgpack.unpack(f))
                            except:
                                break
                    print('finished\n')
                # increment our data file number
                self.data_file = self.cubesat.new_file('/data/imu')
Exemplo n.º 49
0
def main():
    # Logging counters
    game_counter = 0
    skipped_games = 0
    skipped_computer = 0
    skipped_ply = 0
    games_since_last_log = 0
    total_moves = 0

    ensure_dir_exists(out_directory.replace("%s", ""))
    with open(pgn_path, 'r') as pgn_file, open(board_states_path, 'ab+') as boards_out_file, open(moves_path, 'ab+') as moves_out_file:
        # Count games and reset seek position
        input_game_count = count_games_in_pgn_file(pgn_file)
        pgn_file.seek(0)
        print("Found %s games to process" % input_game_count)

        for game in pgn_iterate(pgn_file):
            games_since_last_log += 1

            # Don't process games which meet filter criteria
            ply = int(game.headers["PlyCount"]) # Get number of half-moves in the current game
            # FICS uses these headers for player type instead of the standard
            has_computer_player = game.headers.get("WhiteIsComp") == "Yes" or game.headers.get("BlackIsComp") == "Yes"
            if has_computer_player:
                skipped_games += 1
                skipped_computer += 1
            elif ply <= min_length:
                skipped_games += 1
                skipped_ply += 1
            else: # Convert the game and pack it
                processed_boards = game_to_board_list(game)
                processed_moves = game_to_az_indices(game)

                total_moves += len(processed_boards)
                msgpack.pack(processed_boards, boards_out_file)
                msgpack.pack(processed_moves, moves_out_file)

            # Log current status to console every log_freq games
            if games_since_last_log >= log_freq:
                print("Processing game %s of %s" % (game_counter + 1, input_game_count))
                print("\tSkipped %s games with computer players\n\tSkipped %s games with %s or fewer half-moves." % (skipped_computer, skipped_ply, str(min_length - 1)))

                # Reset reporting counters
                games_since_last_log = 0
                skipped_computer = 0
                skipped_ply = 0

            game_counter += 1
    print("Wrote %s moves in %s games of %s input games" % (total_moves, game_counter - skipped_games, input_game_count))
Exemplo n.º 50
0
    def dump(self, fp):
        if self.expiration_date:
            expiration_date = self.expiration_date.isoformat()
        else:
            expiration_date = None

        data = {'value': self.value, 'expiration_date': expiration_date}

        dconfig = self.config.asdict()
        keys = ('default', 'unicode_errors', 'use_single_float', 'autoreset',
                'use_bin_type')
        assert set(keys) <= set(dconfig)
        kwargs = {k: v for k, v in six.iteritems(dconfig) if k in keys}
        kwargs['encoding'] = dconfig['pack_encoding']

        msgpack.pack(data, fp, **kwargs)
Exemplo n.º 51
0
def dump(obj, fp, registry=None):
    """Serialize ``obj`` as a messagepack formatted stream to ``fp``."""
    if registry is None:
        registry = default_registry
    return msgpack.pack(obj, fp,
                        default=functools.partial(_serializer, registry),
                        use_bin_type=True)
Exemplo n.º 52
0
 def pack(stream):
     """Wraps pack of msgpack."""
     return msgpack.pack(
         stream,
         encoding="UTF-8",
         use_bin_type=True
     )
Exemplo n.º 53
0
def test_unpack_tell():
    stream = io.BytesIO()
    messages = [2**i-1 for i in range(65)]
    messages += [-(2**i) for i in range(1, 64)]
    messages += [b'hello', b'hello'*1000, list(range(20)),
                 {i: bytes(i)*i for i in range(10)},
                 {i: bytes(i)*i for i in range(32)}]
    offsets = []
    for m in messages:
        pack(m, stream)
        offsets.append(stream.tell())
    stream.seek(0)
    unpacker = Unpacker(stream)
    for m, o in zip(messages, offsets):
        m2 = next(unpacker)
        assert m == m2
        assert o == unpacker.tell()
Exemplo n.º 54
0
  def process_doc(self, messages, out):
    try:
      version = next(messages)
    except StopIteration:
      return False
    if not isinstance(version, int):
      # Put the first message back on:
      messages = itertools.chain((version,), messages)
      version = 1

    for version in range(version, self.args.target_version):
      messages = getattr(self, 'update_to_v{0}'.format(version + 1))(messages)

    msgpack.pack(self.args.target_version, out, use_bin_type=True)  # update functions do not output version
    for msg in messages:
      msgpack.pack(msg, out, use_bin_type=True)

    return True
Exemplo n.º 55
0
def single_tree(filename, treekind, tid):
    prefix = os.path.splitext(filename)[0]
    tree_filename = '{}_{}_{}.pack'.format(prefix, treekind, tid)
    if os.path.exists(tree_filename):
        return
    logging.info('building {} {} on {}'.format(treekind, tid, prefix))
    _, _, _, G, E = load_graph(filename)
    if treekind == 'bfs':
        root = max(G.items(), key=lambda x: len(x[1]))[0]
        t = perturbed_bfs(G, root)
    elif treekind == 'gtx':
        t, _ = galaxy_maker(G, 150, short=True, output_name=None)
    elif treekind == 'rst':
        t = list(get_rst(G, {e: 1 for e in E})[1])
    logging.info('computing stretch of {} on {}'.format(treekind, prefix))
    stretch = average_strech(set(E), t)
    with open(tree_filename, 'w+b') as outfile:
        msgpack.pack((stretch, tuple((x for u, v in t for x in (u, v)))), outfile)
Exemplo n.º 56
0
def QueueWaiter(reader: asyncio.StreamReader, writer: asyncio.StreamWriter, queue_name: str):
    """
    A coroutine for waiting upon new items to be placed into the Queue.
    """
    client = writer.get_extra_info("peername")
    sclient = ':'.join(str(_) for _ in client)
    while True:
        try:
            data = yield from reader.read(65536)
        except ConnectionResetError:
            rlogger.info("Client {} closed connection".format(sclient))
            return
        if not data:
            rlogger.info("Client {} closed connection".format(sclient))
            return
        # Unpack
        try:
            sub_data = msgpack.unpackb(data, encoding="utf-8")
        except (msgpack.UnpackException, ValueError) as e:
            rlogger.error("Recieved non-msgpack push from {}".format(sclient))
            continue
        rlogger.debug("Recieved data from client {}: {}".format(sclient, sub_data))
        assert isinstance(sub_data, dict)
        action = sub_data.get("action", -1)
        if not action == 0:
            rlogger.error("Recieved non-push action on push channel from client {} (action: {})"
                          .format(sclient, action))
            continue
        # Get data to place
        data = sub_data.get("data", None)
        if not data:
            rlogger.error("Recieved no data on push channel from client {}".format(sclient))
            continue
        # Increment and get message number
        queues[queue_name][0] += 1
        msgnum = queues[queue_name][0]
        queue = queues[queue_name][1]
        # Put it on the queue
        assert isinstance(queue, asyncio.Queue)
        yield from queue.put([msgnum, data])
        # Respond to the client
        response = {"msgnum": msgnum, "status": 0}
        rlogger.debug("Sending response with message number {}".format(msgnum))
        msgpack.pack(response, writer)
Exemplo n.º 57
0
def get_graph(topology, real, name, size=None):
    if real and topology == 'PA':
        size = {'aut': 4773, 'wik': 7065, 'sla': 82052,
                'epi': 119070, 'kiw': 137713, 'gplus': 74917}[name]
    filename = 'nantes/{}_{}_{}_{}.pack'.format(topology, 'yes' if real else 'no', name, size)
    try:
        return load_graph(filename)
    except FileNotFoundError:
        start = clock()
        print('creating {}'.format(filename))
    if real:
        assert topology == 'grid' and name in {'nips_logo', 'nips_poster', 'space',
                                               'waterfall', 'zmonastery', 'zworld'}
        import convert_pbm_images as pbm
        nodes_sign, G, E = pbm.build_graph(*pbm.read_img('nantes/{}_{}.pbm'.format(name, size)))
        psi, phi = irregularities(G, nodes_sign, E)
        nodes_sign = [nodes_sign[i] for i in range(len(G))]
    else:
        if topology == 'PA':
            cexp.fast_preferential_attachment(size, 3, .13)
        if topology == 'grid':
            G, E_keys = make_grid(int(sqrt(size)))
            cexp.redensify.G = G
            cexp.redensify.N = len(G)
            cexp.redensify.EDGES_SIGN = {e: True for e in E_keys}
        if topology == 'triangle':
            import graph_tool.generation as gen
            import numpy as np
            points = np.random.random((size, 2)) * (size // 50 + 1)
            g, _ = gen.triangulation(points, type="delaunay")
            cexp.redensify.G, cexp.redensify.EDGES_SIGN = to_python_graph(g)
            cexp.redensify.N = size
        n = cexp.redensify.N
        nb_cluster = int(2 * sqrt(n))
        ci = cexp.turn_into_signed_graph_by_propagation(nb_cluster,
                                                        infected_fraction=.9)
        G, E = dict(cexp.redensify.G), dict(cexp.redensify.EDGES_SIGN)
        _, nodes_sign = merge_into_2_clusters(E, ci)
        psi, phi = irregularities(G, dict(enumerate(nodes_sign)), E)
    with open(filename, 'w+b') as outfile:
        msgpack.pack((psi, tuple(map(int, nodes_sign)), phi,
                      tuple((x for (u, v), s in E.items() for x in (u, v, int(s))))), outfile)
    print('save a {} nodes, {} edges in {:.3f} seconds'.format(len(G), len(E), clock() - start))
Exemplo n.º 58
0
def connected_cb(reader: asyncio.StreamReader, writer: asyncio.StreamWriter):
    """
    A callback for connected clients.
    """
    client = writer.get_extra_info("peername")
    sclient = ':'.join(str(_) for _ in client)
    logger.info("Recieved connection from {}:{}".format(*client))
    # Read a subscription message.
    try:
        sub = yield from reader.read(65536)
    except ConnectionResetError:
        rlogger.info("Client {} closed connection".format(sclient))
        return
    if not sub:
        logger.error("Client {} terminated connection abnormally".format(sclient))
        return
    try:
        sub_data = msgpack.unpackb(sub)
    except (msgpack.UnpackException, ValueError) as e:
        logger.error("Recieved unknown subscription message from {}:{}".format(*client))
        yield from writer.drain()
        writer.close()
        return
    # Get the data from the subscription message.
    if not b'queue' in sub_data:
        logger.error("Recieved null queue from {}".format(sclient))
        yield from writer.drain()
        writer.close()
        return
    queue_to_sub = sub_data[b"queue"]
    action = sub_data.get(b"action", 0)
    queue_created = False
    if queue_to_sub not in queues:
        queues[queue_to_sub] = [0, asyncio.Queue()]
        logger.debug("Created queue {}".format(queue_to_sub))
        queue_created = True
    logger.debug("Client {} subscribed to queue {} in mode {} ({})".format(sclient, queue_to_sub,
                                                                           action, "push" if not action else "pull"))
    if action == 0:
        loop.create_task(QueueWaiter(reader, writer, queue_to_sub))
    else:
        loop.create_task(QueueSender(reader, writer, queue_to_sub))
    msgpack.pack({"created": queue_created}, writer)
Exemplo n.º 59
0
	def s3store(self, host, request):
		start = datetime.utcnow()
		try:
			conn = self.get_connection()
			b = conn.get_bucket(request[1])
			k = Key(b)
			k.key = request[2]
			k.content_type = "application/x-msgpack"
			fp = NamedTemporaryFile(mode="r+b")
			msgpack.pack(self.format_to_store(), fp)
			fp.seek(0)
			k.set_contents_from_file(fp)
			fp.close()
			diff = datetime.utcnow() - start
			self.socket.send(msgpack.dumps(["restored", diff.total_seconds()]))
		except Exception as e:
			import traceback
			traceback.print_exc()
			self.socket.send(msgpack.dumps(["failed", str(e)]))
Exemplo n.º 60
0
def _parse_mirteFile(path, logger=None):
    """ Open and parses the mirteFile at <path>. """
    l = logging.getLogger('_parse_mirteFile') if logger is None else logger
    cache_path = os.path.join(os.path.dirname(path),
                CACHE_FILENAME_TEMPLATE % os.path.basename(path))
    if (os.path.exists(cache_path) and
                os.path.getmtime(cache_path) >= os.path.getmtime(path)):
        with open(cache_path) as f:
            return msgpack.unpack(f)
    with open(path) as f:
        ret = yaml.load(f)
    try:
        with open(cache_path, 'w') as f:
            msgpack.pack(ret, f)
    except IOError as e:
        if e.errno == errno.EACCES:
            l.warn('Not allowed to write %s', path)
        else:
            raise
    return ret