Exemplo n.º 1
0
    def test_simple_pipeline_encrypt(self):
        config = {
            'crypto': {
                'encrypt_opts': {},
                'stream_crypt_key':
                pysodium.crypto_secretstream_xchacha20poly1305_keygen()
            }
        }

        meta_pl_format = pipeline.get_default_pipeline_format()
        meta_pl_format['format'].update(
            {'encrypt': config['crypto']['encrypt_opts']})
        data_in = b'some data input'

        #-------
        pl_out = pipeline.build_pipeline(write_helper, 'out')
        meta = {
            'path': 'test',
            'header': pipeline.serialise_pipeline_format(meta_pl_format)
        }
        meta2 = pl_out(data_in, meta, config)

        self.assertNotEqual(data_in, meta2['data'])

        #-------
        pl_in = pipeline.build_pipeline(read_helper, 'in')
        data_out, meta3 = pl_in(meta2, config)

        self.assertEqual(data_in, data_out)
Exemplo n.º 2
0
def init(interface, conn, config):
    """ Set up format of the pipeline used for storing meta-data like manifest diffs """
    global meta_pl_format, pl_in, pl_out
    meta_pl_format = pipeline.get_default_pipeline_format()
    meta_pl_format['format'].update({i : None for i in config['meta_pipeline']})
    if 'encrypt' in meta_pl_format['format']: meta_pl_format['format']['encrypt'] = config['crypto']['encrypt_opts']

    # ----
    pl_in  = pipeline.build_pipeline(functools.partial(interface.read_file, conn), 'in')
    pl_out = pipeline.build_pipeline(functools.partial(interface.write_file, conn), 'out')

    # Check for previous failed uploads and delete them
    if 'read_only' in config and not config['read_only']:
        interface.delete_failed_uploads(conn)
        garbage_collect(interface, conn, config, 'simple')
Exemplo n.º 3
0
    def test_simple_pipeline_compress(self):
        config = {}

        meta_pl_format = pipeline.get_default_pipeline_format()
        meta_pl_format['format'].update({'compress': None})
        data_in = b'some data input'

        #-------
        pl_out = pipeline.build_pipeline(write_helper, 'out')
        meta = {
            'path': 'test',
            'header': pipeline.serialise_pipeline_format(meta_pl_format)
        }
        meta2 = pl_out(data_in, meta, config)

        self.assertNotEqual(data_in, meta2['data'])

        #-------
        pl_in = pipeline.build_pipeline(read_helper, 'in')
        data_out, meta3 = pl_in(meta2, config)

        self.assertEqual(data_in, data_out)
Exemplo n.º 4
0
def streaming_file_upload(interface, conn, config, local_file_path, system_path):

    #Determine the correct pipeline format to use for this file from the configuration
    try: pipeline_format = next((plf for wildcard, plf in config['file_pipeline']
                                 if fnmatch.fnmatch(system_path, wildcard)))
    except StopIteration: raise SystemExit('No pipeline format matches ')

    # Get remote file path
    remote_file_path = sfs.cpjoin(config['remote_base_path'], system_path)

    #----
    pipeline_configuration = pipeline.get_default_pipeline_format()
    pipeline_configuration['chunk_size'] = config['chunk_size']
    pipeline_configuration['format'] = {i : None for i in pipeline_format}
    if 'encrypt' in pipeline_configuration['format']:
        pipeline_configuration['format']['encrypt'] = config['crypto']['encrypt_opts']

    #-----
    upload = interface.streaming_upload()
    pl     = pipeline.build_pipeline_streaming(upload, 'out')
    pl.pass_config(config, pipeline.serialise_pipeline_format(pipeline_configuration))

    upload.begin(conn, remote_file_path)

    try:
        with open(local_file_path, 'rb') as fle:
            while True:
                print('.', end =" ")

                chunk = fle.read(config['chunk_size'])
                if chunk == b'': break
                pl.next_chunk(chunk)
            print()
        return upload.finish()

    # If file no longer exists at this stage assume it has been deleted and ignore it
    except IOError:
        upload.abort()
        raise