Ejemplo n.º 1
0
    def test_pers_load(self):
        for binary in [True, False]:
            src = StringIO()
            p = cPickle.Pickler(src)
            p.persistent_id = persistent_id
            p.binary = binary

            value = MyData('abc')
            p.dump(value)

            up = cPickle.Unpickler(StringIO(src.getvalue()))
            up.persistent_load = persistent_load
            res = up.load()

            self.assertEqual(res.value, value.value)

            # errors
            src = StringIO()
            p = cPickle.Pickler(src)
            p.persistent_id = persistent_id
            p.binary = binary

            value = MyData('abc')
            p.dump(value)

            up = cPickle.Unpickler(StringIO(src.getvalue()))

            # exceptions vary betwee cPickle & Pickle
            try:
                up.load()
                self.assertUnreachable()
            except Exception, e:
                pass
def _saveCameraCalibration(mtx, dist):

    print('Camera calibration matrix has been saved to a file')

    mtxfile = "data/camera_calibration/mtx.pck"
    distfile = "data/camera_calibration/dist.pck"

    with open(mtxfile, 'wb') as f:
        p = cPickle.Pickler(f)
        p.fast = True
        p.dump(mtx)

    with open(distfile, 'wb') as f:
        p = cPickle.Pickler(f)
        p.fast = True
        p.dump(dist)
Ejemplo n.º 3
0
 def pickleData(self, data, fname=None):
     if fname is None:
         fname = self.model_name + '_history.pkl.gz'
     f = gzip.open(fname, 'wb')
     p = pickle.Pickler(f)
     p.dump(data)
     f.close()
Ejemplo n.º 4
0
    def test_persistent_load(self):
        class MyData(object):
            def __init__(self, value):
                self.value = value

        def persistent_id(obj):
            if hasattr(obj, 'value'):
                return 'MyData: %s' % obj.value
            return None

        def persistent_load(id):
            return MyData(id[8:])


        for binary in [True, False]:
            src = StringIO()
            p = cPickle.Pickler(src)
            p.persistent_id = persistent_id
            p.binary = binary

            value = MyData('abc')
            p.dump(value)

            up = cPickle.Unpickler(StringIO(src.getvalue()))
            up.persistent_load = persistent_load
            res = up.load()

            self.assertEqual(res.value, value.value)

            # errors
            src = StringIO()
            p = cPickle.Pickler(src)
            p.persistent_id = persistent_id
            p.binary = binary

            value = MyData('abc')
            p.dump(value)

            up = cPickle.Unpickler(StringIO(src.getvalue()))

            # exceptions vary between cPickle & Pickle
            try:
                up.load()
                AssertUnreachable()
            except Exception, e:
                pass
Ejemplo n.º 5
0
    def assign_reads(self, sample):
        logger.info('Assigning reads to isoforms')
        chrom_clusters = self.get_chromosome_gene_clusters()
        pool = Pool(self.args.threads)
        processed_reads = pool.starmap(assign_reads_in_parallel, [(sample, chr_id, c, self.args, self.read_grouper,
                                                                   (self.reference_record_dict[
                                                                        chr_id] if self.reference_record_dict else None))
                                                                  for (chr_id, c) in chrom_clusters])
        pool.close()
        pool.join()

        logger.info("Resolving multimappers")
        self.multimapped_reads = defaultdict(list)
        for storage in processed_reads:
            for read_assignment in storage:
                self.multimapped_reads[read_assignment.read_id].append(read_assignment)

        multimap_resolver = MultimapResolver(self.args.multimap_strategy)
        multimap_pickler = pickle.Pickler(open(sample.out_raw_file + "_multimappers", "wb"),  -1)
        multimap_pickler.fast = True
        total_assignments = 0
        polya_assignments = 0
        for read_id in list(self.multimapped_reads.keys()):
            assignment_list = self.multimapped_reads[read_id]
            if len(assignment_list) == 1:
                total_assignments += 1
                polya_assignments += 1 if assignment_list[0].polyA_found else 0
                del self.multimapped_reads[read_id]
                continue
            multimap_resolver.resolve(assignment_list)
            multimap_pickler.dump(assignment_list)
            for a in assignment_list:
                if a.assignment_type != ReadAssignmentType.noninformative:
                    total_assignments += 1
                    if a.polyA_found:
                        polya_assignments += 1

        info_pickler = pickle.Pickler(open(sample.out_raw_file + "_info", "wb"),  -1)
        info_pickler.dump(total_assignments)
        info_pickler.dump(polya_assignments)
        if total_assignments == 0:
            logger.warning("No reads were assigned to isoforms, check your input files")
        else:
            logger.info('Finishing read assignment, total assignments %d, polyA percentage %.1f' %
                        (total_assignments, 100 * polya_assignments / total_assignments))
Ejemplo n.º 6
0
def pickleData(data, fname, unbunch=False):
    f = gzip.open(fname, 'wb')
    p = pickle.Pickler(f)
    if unbunch:
        for item in data:
            p.dump(item)
    else:
        p.dump(data)
    f.close()
Ejemplo n.º 7
0
def save_model(filePath, obj, data=None):

    with open(filePath, 'w') as outfile:
        json.dump(data, outfile)
    outfile = open(model_path + '/' + filePath.replace(' ', '_') + '.pkl',
                   'wb')
    fastPickler = cPickle.Pickler(outfile, -1)
    fastPickler.fast = 1
    fastPickler.dump(obj)
    outfile.close()
Ejemplo n.º 8
0
    def serial(result, fname="temp.bin"):
        if charade.detect(fname)['encoding'] == 'utf-8':
            fname = convert(fname)

        root_dir = os.path.dirname(__file__)
        fname = root_dir + "\\" + fname
        f = open(fname, "wb")
        p = cPickle.Pickler(f)
        p.clear_memo()
        p.fast = True
        p.dump(result)
        f.close()
Ejemplo n.º 9
0
 def test_pickler(self):
     basesize = support.calcobjsize('5P2n3i2n3iP')
     p = _pickle.Pickler(io.BytesIO())
     self.assertEqual(object.__sizeof__(p), basesize)
     MT_size = struct.calcsize('3nP0n')
     ME_size = struct.calcsize('Pn0P')
     check = self.check_sizeof
     check(
         p,
         basesize + MT_size + 8 * ME_size + sys.getsizeof(b'x' * 4096))
     for i in range(6):
         p.dump(chr(i))
     check(p, basesize + MT_size + 32 * ME_size + 0)
Ejemplo n.º 10
0
 def test_pickler(self):
     basesize = support.calcobjsize('7P2n3i2n3i2P')
     p = _pickle.Pickler(io.BytesIO())
     self.assertEqual(object.__sizeof__(p), basesize)
     MT_size = struct.calcsize('3nP0n')
     ME_size = struct.calcsize('Pn0P')
     check = self.check_sizeof
     check(p, basesize +
         MT_size + 8 * ME_size +  # Minimal memo table size.
         sys.getsizeof(b'x'*4096))  # Minimal write buffer size.
     for i in range(6):
         p.dump(chr(i))
     check(p, basesize +
         MT_size + 32 * ME_size +  # Size of memo table required to
                                   # save references to 6 objects.
         0)  # Write buffer is cleared after every dump().
Ejemplo n.º 11
0
    def serial(result, fname="temp.bin"):
        if isinstance(result, pd.DataFrame):
            fname = str(fname).replace('.searial', '.df')
        elif isinstance(result, np.ndarray):
            fname = str(fname).replace('.searial', '.csv')

        if isinstance(result, pd.DataFrame):
            result.to_pickle(fname)
        elif isinstance(result, np.ndarray):
            np.savetxt(fname, result, delimiter=',', fmt='%.3f')
        else:
            f = open(fname, "wb")
            p = cPickle.Pickler(f)
            p.clear_memo()
            p.fast = True
            p.dump(result)
            f.close()
Ejemplo n.º 12
0
def test_cpickle(_cache={}):
    import io
    try:
        import _pickle
    except ImportError:
        print("cannot import _pickle, skipped!")
        return
    l = None
    for n in itertools.count():
        try:
            l = _cache[n]
            continue  # Already tried and it works, let's save some time
        except KeyError:
            for i in range(100):
                l = [l]
        _pickle.Pickler(io.BytesIO(), protocol=-1).dump(l)
        _cache[n] = l
Ejemplo n.º 13
0
    def serial(result, fname="temp.bin"):
        if isinstance(result, pd.DataFrame) or isinstance(result, pd.Panel):
            fname = str(fname).replace('.searial', '.df')
        elif isinstance(result, np.ndarray):
            fname = str(fname).replace('.searial', '.csv')

        if charade.detect(fname)['encoding'] == 'utf-8':
            fname = convert(fname)
        if isinstance(result, pd.DataFrame) or isinstance(result, pd.Panel):
            result.to_pickle(fname)
            #result.to_csv(fname)
        elif isinstance(result, np.ndarray):
            np.savetxt(fname, result, delimiter=',', fmt='%.3f')
        else:
            f = open(fname, "wb")
            p = cPickle.Pickler(f)
            p.clear_memo()
            p.fast = True
            p.dump(result)
            f.close()
Ejemplo n.º 14
0
    def save(self, path, name=""):
        """Save the memory in case of crash
        Parameters
        ----------
        path: str
            The network path inside the yaml file of the model where the model is being saved
        name: str
            The name you wish to call the saved file default is nothing
        """
        info = {
            "storage": self._storage,
            "maxsize": self._maxsize,
            "next_idx": self._next_idx
        }
        with open(path + "/memory.info", "wb") as file:
#             pickle.dump(info, file, protocol=pickle.HIGHEST_PROTOCOL)
            p = pickle.Pickler(file) 
            p.fast = True 
            p.dump(info)
            p.memo.clear()
Ejemplo n.º 15
0
 def save(self, path):
     """Save the priority memory in case of crash
     Parameters
     ----------
     path: str
         The network path inside the yaml file of the model where the model is being saved
     """
     info = {
         "alpha": self._alpha,
         "it_sum": self._it_sum,
         "it_min": self._it_min,
         "max_priority": self._max_priority,
         "next_idx": self._next_idx,
         "storage": self._storage,
         "maxsize": self._maxsize
     }
     with open(path + "/adaptive_memory.info", "wb") as file:
         p = pickle.Pickler(file) 
         p.fast = True 
         p.dump(info)
         p.memo.clear()
Ejemplo n.º 16
0
 def test_bound_builtin_method(self):
     s = StringIO()
     p = _pickle.Pickler(s)
     self.assertEqual(self._get_summary_line(p.dump),
                      "dump(obj, /) method of _pickle.Pickler instance")
Ejemplo n.º 17
0
def predict():
    # print(known_face_names)
    people_count = len(known_face_encodings)
    # video_capture = cv2.VideoCapture(0)
    f = open('face_encodings.pkl', 'ab')
    f_ = open('face_names.pkl', 'ab')
    pickler = cPickle.Pickler(f)
    pickler_ = cPickle.Pickler(f_)
    # Initialize some variables

    # print(known_face_names)
    face_locations = []
    face_encodings = []
    face_names = []
    process_this_frame = True
    time_to_predict = 1000  #Time it takes to predict
    NI_count = 0
    ucnt = 0
    ncnt = 0
    fin_name = ''
    start = time.time()
    while True:
        try:
            # print(people_count)
            # print("Number of people : ", len(known_face_encodings))
            faceT = {"len": len(known_face_encodings)}
            import json
            import codecs
            with open('4forces3.json', 'wb') as f:
                json.dump(faceT,
                          codecs.getwriter('utf-8')(f),
                          ensure_ascii=False)
            testp = path + '../FINAL/src/3.txt'
            print(os.path.exists(testp))
            if (os.path.exists(testp)):
                frame = cv2.imread(path + '../FINAL/src/1.jpg')
            else:
                frame = cv2.imread(path + '../FINAL/src/2.jpg')
            # frame = cv2.imread('images.jpg')
            small_frame = cv2.resize(frame, (0, 0), fx=0.25, fy=0.25)
            rgb_small_frame = small_frame[:, :, ::-1]
            # cv2.imshow("temp",rgb_small_frame)
            if process_this_frame:
                # Find all the faces and face encodings in the current frame of video
                face_locations = face_recognition.face_locations(
                    rgb_small_frame)
                face_encodings = face_recognition.face_encodings(
                    rgb_small_frame, face_locations)

                # print(face_locations)

                face_names = []
                for face_encoding in face_encodings:
                    matches = face_recognition.compare_faces(
                        known_face_encodings, face_encoding, tolerance=0.6)
                    name = "Unknown"

                    if True in matches:
                        first_match_index = matches.index(True)
                        name = known_face_names[first_match_index]
                    else:
                        people_count += 1
                        # print(known_face_names)
                        # add_to_known(frame)
                        known_face_encodings.append(face_encoding)
                        pickler.dump(face_encoding)
                        known_face_names.append(len(known_face_names))
                        pickler_.dump(len(known_face_names))

            process_this_frame = not process_this_frame

            # Display the results
            for (top, right, bottom,
                 left), name in zip(face_locations, face_names):
                # Scale back up face locations since the frame we detected in was scaled to 1/4 size
                top *= 4
                right *= 4
                bottom *= 4
                left *= 4

                # Draw a box around the face
                cv2.rectangle(frame, (left, top), (right, bottom), (0, 0, 0),
                              2)

                # Draw a label with a name below the face
                # cv2.rectangle(frame, (left, bottom - 35), (right, bottom), (0, 0, 255))
                font = cv2.FONT_HERSHEY_DUPLEX
                cv2.putText(frame, name, (left + int(
                    (right - left) / 2), bottom + 16), font, 0.5,
                            (255, 255, 255), 1)

            # Display the resulting image
            # cv2.imshow('Video', frame)
            if len(face_names) == 1:
                if name == 'Unknown':
                    ucnt += 1
                else:
                    ncnt += 1

                if len(face_names) == 1:
                    # print("Predicted user : ",face_names[0] )
                    fin_name = face_names[0]
            elif len(face_names) > 1:
                ucnt = 0
                ncnt = 0
            # print(time.time() - start)
            if time.time() - start >= time_to_predict:
                break
            # Hit 'q' on the keyboard to quit!
            if cv2.waitKey(1) & 0xFF == ord('q'):
                break
        except:
            print('except')
            continue
Ejemplo n.º 18
0
def pickle(name, objects):
    with open(name, 'wb') as file:
        pickler = cpickle.Pickler(file)
        pickler.dump(objects)
Ejemplo n.º 19
0
 def __init__(self, output_file_name, params):
     AbstractAssignmentPrinter.__init__(self, output_file_name, params)
     self.pickler = pickle.Pickler(open(self.output_file_name, "wb"), -1)
     self.pickler.fast = True
Ejemplo n.º 20
0
def pickle(name, objects):
    timestamp('pickling...')
    with open(name, 'wb') as file:
        pickler = cpickle.Pickler(file)
        pickler.dump(objects)
Ejemplo n.º 21
0
 def saveSelf(self):
     filename = self.instance_name + 'g' + str(
         self.last_generation) + '.pck'
     with open(filename, 'wb') as output:
         pickler = pck.Pickler(output, -1)
         pickler.dump(self)
Ejemplo n.º 22
0
 def save_pickle(self, obj):
     outfile = open(self.filePath, 'wb')
     fastPickler = cPickle.Pickler(outfile, cPickle.HIGHEST_PROTOCOL)
     fastPickler.fast = 1
     fastPickler.dump(obj)
     outfile.close()