Exemplo n.º 1
0
class TestStreamingIteratorWithLargeChunks(unittest.TestCase):
    def setUp(self):
        self.letters = ['a', 'b', 'c', 'd', 'e']
        self.chunks = (letter * 2000 for letter in self.letters)
        self.size = 5 * 2000
        self.uploader = StreamingIterator(self.size, self.chunks)

    def test_returns_the_amount_requested(self):
        chunk_size = 1000
        bytes_read = 0
        while True:
            b = self.uploader.read(chunk_size)
            if not b:
                break
            assert len(b) == chunk_size
            bytes_read += len(b)

        assert bytes_read == self.size

    def test_returns_all_of_the_bytes(self):
        chunk_size = 8192
        bytes_read = 0
        while True:
            b = self.uploader.read(chunk_size)
            if not b:
                break
            bytes_read += len(b)

        assert bytes_read == self.size
Exemplo n.º 2
0
class TestStreamingIteratorWithLargeChunks(object):
    @pytest.fixture(autouse=True)
    def setup(self, get_iterable):
        self.letters = [b'a', b'b', b'c', b'd', b'e']
        self.chunks = (letter * 2000 for letter in self.letters)
        self.size = 5 * 2000
        self.uploader = StreamingIterator(self.size, get_iterable(self.chunks))

    def test_returns_the_amount_requested(self):
        chunk_size = 1000
        bytes_read = 0
        while True:
            b = self.uploader.read(chunk_size)
            if not b:
                break
            assert len(b) == chunk_size
            bytes_read += len(b)

        assert bytes_read == self.size

    def test_returns_all_of_the_bytes(self):
        chunk_size = 8192
        bytes_read = 0
        while True:
            b = self.uploader.read(chunk_size)
            if not b:
                break
            bytes_read += len(b)

        assert bytes_read == self.size
class TestStreamingIteratorWithLargeChunks(object):
    @pytest.fixture(autouse=True)
    def setup(self, get_iterable):
        self.letters = [b'a', b'b', b'c', b'd', b'e']
        self.chunks = (letter * 2000 for letter in self.letters)
        self.size = 5 * 2000
        self.uploader = StreamingIterator(self.size, get_iterable(self.chunks))

    def test_returns_the_amount_requested(self):
        chunk_size = 1000
        bytes_read = 0
        while True:
            b = self.uploader.read(chunk_size)
            if not b:
                break
            assert len(b) == chunk_size
            bytes_read += len(b)

        assert bytes_read == self.size

    def test_returns_all_of_the_bytes(self):
        chunk_size = 8192
        bytes_read = 0
        while True:
            b = self.uploader.read(chunk_size)
            if not b:
                break
            bytes_read += len(b)

        assert bytes_read == self.size
Exemplo n.º 4
0
def handle_uploaded_file(file, callback):
    """
    :param file: Of type UploadedFile
    """

    generator = file.chunks()  # Create generator
    size = file.size  # Get size
    # content_type = file.content_type  # Get the content-type of the data

    # def my_callback(monitor):
    #     bar.show(monitor.bytes_read)

    streamer = StreamingIterator(size, generator, callback=callback)
    # encoder_len = streamer.size
    # bar = ProgressBar(expected_size=encoder_len, filled_char='=')

    # r = requests.put('http://localhost:4444/', data=streamer,
    #                   headers={'Content-Type': content_type})
    http_url = 'http://localhost:4444/'
    params = {'dsName': 'BigDisk', 'dcPath': 'Datacenter'}
    headers = {'Content-Type': 'application/octet-stream'}
    cookie = {
        'chocolate_chip':
        ' "cd9c342ce3df6d2944d45132c6feca1e161f39bc"; $Path=/'
    }

    request = requests.put(http_url,
                           params=params,
                           data=streamer,
                           headers=headers,
                           cookies=cookie,
                           verify=False)
class TestStreamingIterator(object):
    @pytest.fixture(autouse=True)
    def setup(self, get_iterable):
        self.chunks = [b'here', b'are', b'some', b'chunks']
        self.size = 17
        self.uploader = StreamingIterator(self.size, get_iterable(self.chunks))

    def test_read_returns_all_chunks_in_one(self):
        assert self.uploader.read() == b''.join(self.chunks)

    def test_read_returns_empty_string_after_exhausting_the_iterator(self):
        for i in range(0, 4):
            self.uploader.read(8192)

        assert self.uploader.read() == b''
        assert self.uploader.read(8192) == b''
Exemplo n.º 6
0
class TestStreamingIterator(unittest.TestCase):
    def setUp(self):
        self.chunks = [b'here', b'are', b'some', b'chunks']
        self.iterator = iter(self.chunks)
        self.size = 17
        self.uploader = StreamingIterator(self.size, self.iterator)

    def test_read_returns_all_chunks_in_one(self):
        assert self.uploader.read() == b''.join(self.chunks)

    def test_read_returns_empty_string_after_exhausting_the_iterator(self):
        for i in range(0, 4):
            self.uploader.read(8192)

        assert self.uploader.read() == b''
        assert self.uploader.read(8192) == b''
Exemplo n.º 7
0
class TestStreamingIterator(object):
    @pytest.fixture(autouse=True)
    def setup(self, get_iterable):
        self.chunks = [b'here', b'are', b'some', b'chunks']
        self.size = 17
        self.uploader = StreamingIterator(self.size, get_iterable(self.chunks))

    def test_read_returns_all_chunks_in_one(self):
        assert self.uploader.read() == b''.join(self.chunks)

    def test_read_returns_empty_string_after_exhausting_the_iterator(self):
        for i in range(0, 4):
            self.uploader.read(8192)

        assert self.uploader.read() == b''
        assert self.uploader.read(8192) == b''
Exemplo n.º 8
0
 def upload(self, local_file_path, rest_resource_path):
     """
     Add log message
     @type       local_file_path: string
     @param      local_file_path: Local file path
     @type       rest_resource_path: string
     @param      rest_resource_path: Resource file path (without leading slash!)
     @rtype:     string
     @return:    Relative HDFS path
     """
     file_resource_uri = self.rest_endpoint.get_resource_uri(
         rest_resource_path)
     if local_file_path is not None:
         # with open(local_file_path, 'r') as f:
         # strip path and extension from absolute file path to get filename
         filename = local_file_path.rpartition('/')[2]
         chunks = FileBinaryDataChunks(local_file_path, 65536,
                                       self.progress_reporter).chunks()
         file_size = os.path.getsize(local_file_path)
         streamer = StreamingIterator(file_size, chunks)
         content_type = 'multipart/form-data'
         s = requests.Session()
         s.auth = ('admin', 'admin')
         r = s.put(file_resource_uri.format(filename),
                   data=streamer,
                   headers={'Content-Type': content_type})
         return ResponseWrapper(success=True, response=r)
     else:
         return ResponseWrapper(success=False)
Exemplo n.º 9
0
 def stream_serializer(self, data, request_schema=None):
     """
     Stream Serializer will compress and stream
     :param data:
     :return:
     """
     zipped_payload = self._compress(data)
     logging.info('compressed payload size is {} bytes'.format(
         self.get_size(zipped_payload)))
     return rest_config.MIME_JSON,\
            StreamingIterator(size=len(zipped_payload),
                              iterator=self._gen(zipped_payload))
Exemplo n.º 10
0
    def post_to_bbg_transport(self, req):
        """
        Call BT api [POST] and keep polling until response has complete status
        or times out
        :param req:
        :return:
        """
        logging.info('Submitting to BT...')

        json_payload = self.get_request_item(req).to_json()
        logging.info('payload size is {} bytes for {} requests'.format(
            self.get_size(json_payload), len(req)))

        buf = io.BytesIO()
        with gzip.GzipFile(fileobj=buf, mode='wb') as f:
            # todo: fix this to use pypimco serialize.py
            json.dump(json_payload, f)

        zipped_payload = buf.getvalue()

        logging.info(
            'compressed payload size is {} bytes for {} requests'.format(
                self.get_size(zipped_payload), len(req)))

        payload = zipped_payload

        def gen(data):
            for i in range(0, len(data), 1000):
                yield data[i:i + 1000]

        streamer = StreamingIterator(size=len(payload), iterator=gen(payload))

        try:
            response = self._post(data=streamer,
                                  headers={
                                      'Content-Type': 'application/json',
                                      'Content-Encoding': 'gzip'
                                  },
                                  is_stream=True)
        except ClientException as ex:
            status = ex.detail_json.get('status')
            logging.exception('BBG Transport API call failed: ' + ex.message)
            if status == 403:
                raise BbgTransportForbidden('Forbidden: ' +
                                            ex.detail_json.get('msg'))
        except Exception as ex:
            logging.exception('BBG Transport Server Error: ' + ex.message)
            raise
        else:
            return self.handle_response(response)
Exemplo n.º 11
0
 def setUp(self):
     self.chunks = [b'here', b'are', b'some', b'chunks']
     self.iterator = iter(self.chunks)
     self.size = 17
     self.uploader = StreamingIterator(self.size, self.iterator)
Exemplo n.º 12
0
def main():
    # parse arguments
    args = parse_args(sys.argv[1:])

    # parse config file gns3_proxy_config.ini
    config = configparser.ConfigParser()
    config.read_file(open(args.config_file))
    config.read(args.config_file)

    logging.basicConfig(
        level=getattr(logging, args.log_level),
        format=
        '%(asctime)s - %(levelname)s - %(funcName)s:%(lineno)d - %(message)s')

    # get backend_user
    #
    # description: Username to use to access backend GNS3 server
    # default: admin
    if config.get('proxy', 'backend_user'):
        backend_user = config.get('proxy', 'backend_user')
    else:
        backend_user = "******"

    # get backend_password
    #
    # description: Password to use to access backend GNS3 server
    # default: password
    if config.get('proxy', 'backend_password'):
        backend_password = config.get('proxy', 'backend_password')
    else:
        backend_password = "******"

    # get backend_port
    #
    # description: TCP port to use to access backend GNS3 server
    # default: 3080
    if config.get('proxy', 'backend_port'):
        backend_port = config.getint('proxy', 'backend_port')
    else:
        backend_port = 3080

    # Compute Image Backend
    image_backend_url = '/compute/' + args.image_type + '/images'

    # Alternate location for image access, used for upload by the GNS3 client, but download (GET) throws an error
    alt_image_backend_url = '/computes/local/' + args.image_type + '/images'

    # read servers from config
    if config.items('servers'):
        config_servers = dict()
        server_items = config.items('servers')
        for server, value in server_items:
            try:
                ip_address(value)
            except ValueError:
                logger.fatal(
                    "server config %s is not a valid IP address (e.g., 1.2.3.4)"
                    % value)
                raise ProxyError()
            config_servers[server] = value
    else:
        config_servers = None

    logger.debug("Config backend_user: %s" % backend_user)
    logger.debug("Config backend_password: %s" % backend_password)
    logger.debug("Config backend_port: %s" % backend_port)

    logger.debug("Config servers: %s" % config_servers)
    try:
        username = backend_user
        password = backend_password

        # get source server IP
        if args.source_server in config_servers:
            src_server = config_servers[args.source_server]
            logger.debug("Source server will be %s:%s" %
                         (src_server, backend_port))
        else:
            logger.fatal("Source server not found in config.")
            raise ProxyError()

        base_src_api_url = "http://" + src_server + ":" + str(
            backend_port) + "/v2"
        logger.debug("Searching source images")
        images = list()
        url = base_src_api_url + image_backend_url

        r = requests.get(url, auth=(username, password))
        if not r.status_code == 200:
            logger.fatal("Could not list images.")
            logger.debug("Status code: " + str(r.status_code) + " Text:" +
                         r.text)
            raise ProxyError()
        else:
            image_results = json.loads(r.text)
            for image in image_results:
                if re.fullmatch(args.image_filename, image['filename']):
                    logger.debug('matched image: %s' % image['filename'])
                    images.append(image)

        if len(images) == 0:
            logger.fatal("Specified image not found.")
            raise ProxyError()

        for image in images:
            image_filename = image['filename']
            print("#### Replicating image: %s from server: %s (%s)" %
                  (image_filename, args.source_server, src_server))

            # target handling

            # Try to find match for target server in config
            target_servers = list()
            if len(config_servers) > 0:
                for key in config_servers:
                    if re.fullmatch(args.target_server, key):
                        logger.debug(
                            "Target server found: %s (%s) using provided match: %s"
                            % (key, config_servers[key], args.target_server))
                        if key == args.source_server:
                            logger.debug(
                                "Target server %s is the same as the source server %s . Filtered out."
                                % (key, args.source_server))
                        else:
                            target_servers.append({
                                'name':
                                key,
                                'address':
                                config_servers[key]
                            })
            else:
                logger.fatal(
                    "No servers defined in config. Could not select target server."
                )
                raise ProxyError()

            if len(target_servers) == 0:
                logger.fatal(
                    "No target servers found using match: %s. Could not select target server."
                    % args.target_server)
                raise ProxyError()

            for target_server in target_servers:
                target_server_name = target_server['name']
                target_server_address = target_server['address']
                logger.debug("    Replicating image: %s to server: %s" %
                             (image_filename, target_server_name))
                base_dst_api_url = "http://" + target_server_address + ":" + str(
                    backend_port) + "/v2"

                logger.debug("Checking if target image exists...")
                url = base_dst_api_url + image_backend_url
                r = requests.get(url, auth=(username, password))
                if r.status_code == 200:
                    target_image_exists = False
                    target_image_md5sum = ''
                    target_image_to_delete = ''
                    target_image_results = json.loads(r.text)
                    for target_image in target_image_results:
                        if re.fullmatch(image_filename,
                                        target_image['filename']):
                            logger.debug(
                                "image: %s already exists on server %s" %
                                (target_image['filename'], target_server_name))
                            if target_image_exists:
                                logger.fatal(
                                    "Multiple images matched %s on server %s. "
                                    "Import can only be used for single image."
                                    % (image_filename, target_server_name))
                                raise ProxyError()
                            else:
                                target_image_exists = True
                                target_image_md5sum = target_image['md5sum']
                                target_image_to_delete = image['filename']

                    if target_image_exists:
                        if args.force:
                            # deleting image
                            # print("Deleting existing image %s on server: %s"
                            #      % (image_to_delete, config_servers[server]))
                            # url = base_dst_api_url + image_backend_url + '/' + image_to_delete
                            # r = requests.delete(url, auth=(username, password))
                            # if not r.status_code == 204:
                            #    if r.status_code == 404:
                            #        logger.debug("Image did not exist before, not deleted")
                            #    else:
                            #        logger.fatal("unable to delete image")
                            #        raise ProxyError()
                            logger.debug(
                                "image: %s (%s) already exists on server %s. Overwriting it."
                                % (image_filename, target_image_to_delete,
                                   target_server_name))
                        elif image['md5sum'] == target_image_md5sum:
                            logger.debug(
                                "image: %s (%s) already exists on server %s, skipping transfer. "
                                "Use --force to overwrite it during import." %
                                (image_filename, target_image_to_delete,
                                 target_server_name))
                            continue
                        else:
                            logger.fatal(
                                "image: %s (%s) already exists on server, but the md5sum does not match."
                                "on target %s. Use --force to overwrite it during import."
                                % (image_filename, target_image_to_delete,
                                   target_server_name))
                            raise ProxyError()

                    # export source image
                    logger.debug("Opening source image")
                    url = base_src_api_url + image_backend_url + '/' + image_filename
                    r_export = requests.get(url,
                                            stream=True,
                                            auth=(username, password))
                    if not r_export.status_code == 200:
                        logger.fatal(
                            "Unable to export image from source server.")
                        logger.debug("Status code: " + str(r.status_code) +
                                     " Text:" + r.text)
                        raise ProxyError()

                    start_timestamp = int(round(time.time()))

                    def generate_chunk():
                        transferred_length_upload = 0
                        prev_transferred_length_upload = 0
                        next_percentage_to_print_upload = 0
                        prev_timestamp_upload = int(round(time.time() * 1000))
                        for in_chunk in r_export.iter_content(
                                chunk_size=args.buffer):
                            if in_chunk:
                                yield in_chunk
                                transferred_length_upload += len(in_chunk)
                                if total_length > 0:
                                    transferred_percentage_upload = int(
                                        (transferred_length_upload /
                                         total_length) * 100)
                                else:
                                    transferred_percentage_upload = 0
                                if transferred_percentage_upload >= next_percentage_to_print_upload:
                                    curr_timestamp_upload = int(
                                        round(time.time() * 1000))
                                    duration_upload = curr_timestamp_upload - prev_timestamp_upload
                                    delta_length_upload = \
                                        transferred_length_upload - prev_transferred_length_upload
                                    if duration_upload > 0:
                                        rate_upload = delta_length_upload / (
                                            duration_upload / 1000)
                                    else:
                                        rate_upload = 0
                                    prev_timestamp_upload = curr_timestamp_upload
                                    prev_transferred_length_upload = transferred_length_upload
                                    print(
                                        "Replicating to %s (%s) ... %d%% (%.3f MB/s)"
                                        % (target_server_name,
                                           target_server_address,
                                           transferred_percentage_upload,
                                           (rate_upload / 1000000)))
                                    next_percentage_to_print_upload = next_percentage_to_print_upload + 5

                    # import target image
                    logger.debug("Opening target image")
                    url = base_dst_api_url + alt_image_backend_url + '/' + image_filename
                    total_length = int(r_export.headers.get('content-length'))
                    # r_import = requests.post(url, auth=(username, password), data=generate_chunk())
                    streamer = StreamingIterator(total_length,
                                                 generate_chunk())
                    r_import = requests.post(url,
                                             auth=(username, password),
                                             data=streamer)
                    if not r_import.status_code == 200:
                        if r_import.status_code == 403:
                            logger.fatal(
                                "Forbidden to import image on target server.")
                            logger.debug("Status code: " + str(r.status_code) +
                                         " Text:" + r.text)
                            raise ProxyError()
                        else:
                            logger.fatal(
                                "Unable to import image on target server.")
                            logger.debug("Status code: " + str(r.status_code) +
                                         " Text:" + r.text)
                            raise ProxyError()
                    else:
                        end_timestamp = int(round(time.time()))

                        print(
                            "#### image %s (%s bytes) replicated from server: %s to server: %s (in %i secs)"
                            % (image_filename, total_length,
                               args.source_server, target_server_name,
                               (end_timestamp - start_timestamp)))

                else:
                    logger.fatal(
                        "Could not get status of images from server %s." %
                        target_server_name)
                    logger.debug("Status code: " + str(r.status_code) +
                                 " Text:" + r.text)
                    raise ProxyError()

        print("Done.")

    except KeyboardInterrupt:
        pass
Exemplo n.º 13
0
 def setup(self, get_iterable):
     self.chunks = [b'here', b'are', b'some', b'chunks']
     self.size = 17
     self.uploader = StreamingIterator(self.size, get_iterable(self.chunks))
Exemplo n.º 14
0
 def setUp(self):
     self.letters = ['a', 'b', 'c', 'd', 'e']
     self.chunks = (letter * 2000 for letter in self.letters)
     self.size = 5 * 2000
     self.uploader = StreamingIterator(self.size, self.chunks)
Exemplo n.º 15
0
 def setup(self, get_iterable):
     self.letters = [b'a', b'b', b'c', b'd', b'e']
     self.chunks = (letter * 2000 for letter in self.letters)
     self.size = 5 * 2000
     self.uploader = StreamingIterator(self.size, get_iterable(self.chunks))
 def setup(self, get_iterable):
     self.chunks = [b'here', b'are', b'some', b'chunks']
     self.size = 17
     self.uploader = StreamingIterator(self.size, get_iterable(self.chunks))
Exemplo n.º 17
0
import requests
import os
from os import path

#pip install requests-toolbelt
from requests_toolbelt.streaming_iterator import StreamingIterator
import os


# To Upload a file Just Change This Line:
filePath = "C:\\Uploads\\Source\\4M.dat"
# This is URL of file-upload-webservice
serverURL="http://localhost:8080/upload-webservice/bigfileupload"

#retrieve fileName from path, this will be sent as a header 
fileName = path.split(filePath)[1]
size =os.path.getsize(filePath);
headers = {"File-Name": fileName, "Content-Type": "application/octet-stream"}
resp = requests.post(serverURL, headers=headers, data=StreamingIterator(size, open(filePath, 'r+b',1024)))

#print result
print resp.status_code
print resp.text
Exemplo n.º 18
0
auth = HTTPBasicAuth(config['Server']['user'], config['Server']['password'])

url = "http://{}/v2/compute/qemu/images".format(gns3_server)
result = requests.get(url, auth=auth)
result.raise_for_status()
images = result.json()
image_filenames = [j['filename'] for j in images]

if args.ls:
    for image in images:
        print(image['filename'])
    exit(0)

if args.filename in image_filenames and not args.overwrite:
    print("Won't overwrite existing image")
    exit(1)

if args.filename:
    with open(args.filename, 'rb') as f:
        url = "http://{}/v2/compute/qemu/images/{}".format(
            gns3_server, args.filename)
        size = os.stat(args.filename).st_size
        streamer = StreamingIterator(size, f)
        result = requests.post(
            url,
            auth=auth,
            data=streamer,
            headers={'Content-Type': 'application/octet-stream'})
        result.raise_for_status()
 def setup(self, get_iterable):
     self.letters = [b'a', b'b', b'c', b'd', b'e']
     self.chunks = (letter * 2000 for letter in self.letters)
     self.size = 5 * 2000
     self.uploader = StreamingIterator(self.size, get_iterable(self.chunks))