Ejemplo n.º 1
0
    def test_simple_pipeline_encrypt(self):
        config = {
            'crypto': {
                'encrypt_opts': {},
                'stream_crypt_key':
                pysodium.crypto_secretstream_xchacha20poly1305_keygen()
            }
        }

        meta_pl_format = pipeline.get_default_pipeline_format()
        meta_pl_format['format'].update(
            {'encrypt': config['crypto']['encrypt_opts']})
        data_in = b'some data input'

        #-------
        pl_out = pipeline.build_pipeline(write_helper, 'out')
        meta = {
            'path': 'test',
            'header': pipeline.serialise_pipeline_format(meta_pl_format)
        }
        meta2 = pl_out(data_in, meta, config)

        self.assertNotEqual(data_in, meta2['data'])

        #-------
        pl_in = pipeline.build_pipeline(read_helper, 'in')
        data_out, meta3 = pl_in(meta2, config)

        self.assertEqual(data_in, data_out)
Ejemplo n.º 2
0
def compress(child, data, meta, config):
    pl_format = pipeline.parse_pipeline_format(meta['header'])
    if 'compress' in pl_format['format']:
        pl_format['format']['compress'] = {'A': 'bz2'}
        data = bz2.compress(data)

    meta['header'] = pipeline.serialise_pipeline_format(pl_format)
    return child(data, meta, config)
Ejemplo n.º 3
0
def get_remote_manifest_diff(config, version_id = None):
    meta = {'path'       : config['remote_manifest_diff_file'],
            'version_id' : version_id,
            'header'     : pipeline.serialise_pipeline_format(meta_pl_format)}
    data, meta2 = pl_in(meta, config)
    return { 'version_id'    : version_id,
             'last_modified' : meta2['last_modified'],
             'body'          : json.loads(data)}
Ejemplo n.º 4
0
def read_json_from_remote(config, path : str, version_id = None):

    meta = {'path'       : path,
            'version_id' : version_id,
            'header'     : pipeline.serialise_pipeline_format(meta_pl_format)}
    try: data, object_meta = pl_in(meta, config)
    except ValueError: return None, None

    return json.loads(data), object_meta
Ejemplo n.º 5
0
def get_remote_manifest_diffs(interface, conn, config):
    """ Get and sort the progression of change differences from the remote """

    diffs = []
    for v in get_remote_manifest_versions(interface, conn, config):
        meta = {'path'       : config['remote_manifest_diff_file'],
                'version_id' : v['VersionId'],
                'header'     : pipeline.serialise_pipeline_format(meta_pl_format)}
        data, meta2 = pl_in(meta, config)
        diffs.append({ 'version_id' : v['VersionId'],
                       'body' : data,
                       'meta' : meta2})

    return list(diffs)
Ejemplo n.º 6
0
    def test_simple_pipeline_compress(self):
        config = {}

        meta_pl_format = pipeline.get_default_pipeline_format()
        meta_pl_format['format'].update({'compress': None})
        data_in = b'some data input'

        #-------
        pl_out = pipeline.build_pipeline(write_helper, 'out')
        meta = {
            'path': 'test',
            'header': pipeline.serialise_pipeline_format(meta_pl_format)
        }
        meta2 = pl_out(data_in, meta, config)

        self.assertNotEqual(data_in, meta2['data'])

        #-------
        pl_in = pipeline.build_pipeline(read_helper, 'in')
        data_out, meta3 = pl_in(meta2, config)

        self.assertEqual(data_in, data_out)
Ejemplo n.º 7
0
def streaming_file_upload(interface, conn, config, local_file_path, system_path):

    #Determine the correct pipeline format to use for this file from the configuration
    try: pipeline_format = next((plf for wildcard, plf in config['file_pipeline']
                                 if fnmatch.fnmatch(system_path, wildcard)))
    except StopIteration: raise SystemExit('No pipeline format matches ')

    # Get remote file path
    remote_file_path = sfs.cpjoin(config['remote_base_path'], system_path)

    #----
    pipeline_configuration = pipeline.get_default_pipeline_format()
    pipeline_configuration['chunk_size'] = config['chunk_size']
    pipeline_configuration['format'] = {i : None for i in pipeline_format}
    if 'encrypt' in pipeline_configuration['format']:
        pipeline_configuration['format']['encrypt'] = config['crypto']['encrypt_opts']

    #-----
    upload = interface.streaming_upload()
    pl     = pipeline.build_pipeline_streaming(upload, 'out')
    pl.pass_config(config, pipeline.serialise_pipeline_format(pipeline_configuration))

    upload.begin(conn, remote_file_path)

    try:
        with open(local_file_path, 'rb') as fle:
            while True:
                print('.', end =" ")

                chunk = fle.read(config['chunk_size'])
                if chunk == b'': break
                pl.next_chunk(chunk)
            print()
        return upload.finish()

    # If file no longer exists at this stage assume it has been deleted and ignore it
    except IOError:
        upload.abort()
        raise
Ejemplo n.º 8
0
def write_json_to_remote(config, path : str, data_to_write):
    meta = {'path' : path, 'header' : pipeline.serialise_pipeline_format(meta_pl_format)}
    return pl_out(json.dumps(data_to_write).encode('utf-8'), meta, config)