Ejemplo n.º 1
0
def dest_size(path):
    """
    Retrieves size of backed up data.
    """
    try:
        with bz2.open(path, 'rt') as input:
            return json_loads(input.read())
    except Exception:
        return 0
Ejemplo n.º 2
0
def dest_size(path):
    """
    Retrieves size of backed up data.
    """
    try:
        with bz2.open(path, 'rt') as input:
            return json_loads(input.read())
    except Exception:
        return 0
Ejemplo n.º 3
0
    def test_freshbz2(self):
        print_msg('Running {} test'.format(self.test_name))
        os.system('../../src/mongob --config config.yaml' +
                  ' --progress-file current_progress.yaml' +
                  ' --log backup.log')

        print_msg('Checking result for {}'.format(self.test_name))
        with open(self.test_info['dataset_file'], 'rt') as input:
            data_from_file = json_loads(input.read())
            data_from_file.sort(key=lambda x: x['_id'])

        data_from_src = list(self.coll_src.find().sort('_id', 1))

        with bz2.open(self.db_dest, 'rt') as input:
            data_from_dest = json_loads(input.read())
        data_from_dest.sort(key=lambda x: x['_id'])

        self.assertEqual(data_from_file, data_from_src)
        self.assertEqual(data_from_file, data_from_dest)
Ejemplo n.º 4
0
def setup_dataset(coll, dataset_file):
    """
    Setting up data set for a test by dropping the existing collection then
    loading data from a JSON file.
    """
    print_msg("Loading dataset for {}".format(coll.name))

    coll.drop()

    with open(dataset_file, 'r') as input:
        coll.insert_many(json_loads(input.read()))
Ejemplo n.º 5
0
def setup_dataset(coll, dataset_file):
    """
    Setting up data set for a test by dropping the existing collection then
    loading data from a JSON file.
    """
    print_msg("Loading dataset for {}".format(coll.name))

    coll.drop()

    with open(dataset_file, "r") as input:
        coll.insert_many(json_loads(input.read()))
Ejemplo n.º 6
0
    def test_freshbz2(self):
        print_msg('Running {} test'.format(self.test_name))
        os.system(
            '../../src/mongob --config config.yaml'
            + ' --progress-file current_progress.yaml'
            + ' --log backup.log'
        )

        print_msg('Checking result for {}'.format(self.test_name))
        with open(self.test_info['dataset_file'], 'rt') as input:
            data_from_file = json_loads(input.read())
            data_from_file.sort(key=lambda x: x['_id'])

        data_from_src  = list(self.coll_src.find().sort('_id', 1))

        with bz2.open(self.db_dest, 'rt') as input:
            data_from_dest = json_loads(input.read())
        data_from_dest.sort(key=lambda x: x['_id'])
        
        self.assertEqual(data_from_file, data_from_src)
        self.assertEqual(data_from_file, data_from_dest)
Ejemplo n.º 7
0
    def test_freshrun(self):
        self.coll_dest.drop()

        print_msg("Running {} test".format(self.test_name))
        os.system(
            "../../src/mongob --config config.yaml" + " --progress-file current_progress.yaml" + " --log backup.log"
        )

        print_msg("Checking result for {}".format(self.test_name))
        with open(self.test_info["dataset_file"], "r") as input:
            data_from_file = json_loads(input.read())
            data_from_file.sort(key=lambda x: x["_id"])

        data_from_src = list(self.coll_src.find().sort("_id", 1))
        data_from_dest = list(self.coll_dest.find().sort("_id", 1))

        self.assertEqual(data_from_file, data_from_src)
        self.assertEqual(data_from_file, data_from_dest)
Ejemplo n.º 8
0
def insert_docs(path, docs):
    """
    Inserts all docs into a destination file and returns IDs of the inserted
    docs.  This function makes no attempt at making sure there is no
    duplication.
    """
    # Not used:
    # As we write one list each time we open destination while the desire output
    # should be a single list, we need to do the list concatenation manually by
    # removing the closing square bracket ("]") and add a comma to separate the
    # currently inserted items.

    # Destination file is created for the first time
    if not os.path.isfile(path):
        with bz2.open(path, 'wt') as output:
            output.write(json_dumps(docs))
    else:
        # with bz2.open(path, 'r') as output:
        #     # Remove last ]
        #     output.seek(-1, os.SEEK_END)
        #     while output.peek(1)[:1] != b']' and output.tell() > 0:
        #         print(output.peek(1)[:1])
        #         output.seek(-1, os.SEEK_CUR)
        #     output.truncate()

        #     # Write , as list separator
        #     output.write(b', ')

        #     # Now, write docs without the beginning [
        #     output.write(json_dumps(docs)[1:].encode('utf-8'))

        with bz2.open(path, 'rt') as input:
            current_docs = json_loads(input.read())
        current_docs.extend(docs)

        with bz2.open(path, 'wt') as output:
            output.write(json_dumps(current_docs))

    return [doc['_id'] for doc in current_docs]
Ejemplo n.º 9
0
def insert_docs(path, docs):
    """
    Inserts all docs into a destination file and returns IDs of the inserted
    docs.  This function makes no attempt at making sure there is no
    duplication.
    """
    # Not used:
    # As we write one list each time we open destination while the desire output
    # should be a single list, we need to do the list concatenation manually by
    # removing the closing square bracket ("]") and add a comma to separate the
    # currently inserted items.

    # Destination file is created for the first time
    if not os.path.isfile(path):
        with bz2.open(path, 'wt') as output:
            output.write(json_dumps(docs))
    else:
        # with bz2.open(path, 'r') as output:
        #     # Remove last ]
        #     output.seek(-1, os.SEEK_END)
        #     while output.peek(1)[:1] != b']' and output.tell() > 0:
        #         print(output.peek(1)[:1])
        #         output.seek(-1, os.SEEK_CUR)
        #     output.truncate()

        #     # Write , as list separator
        #     output.write(b', ')

        #     # Now, write docs without the beginning [
        #     output.write(json_dumps(docs)[1:].encode('utf-8'))

        with bz2.open(path, 'rt') as input:
            current_docs = json_loads(input.read())
        current_docs.extend(docs)

        with bz2.open(path, 'wt') as output:
            output.write(json_dumps(current_docs))

    return [doc['_id'] for doc in current_docs]
Ejemplo n.º 10
0
def fixmeta(meta):
    return json_loads(json.dumps(meta))