Exemplo n.º 1
0
def download_artifacts(session):
    response = session.get("{0}/lastBuild/api/json/".format(JENKINS_URL),
                           headers={"Accept": "application/json"})
    response.raise_for_status()
    json_response = response.json()
    assert not json_response["building"]
    assert json_response["result"] == "SUCCESS"

    paths = []

    for artifact in json_response["artifacts"]:
        response = session.get("{0}artifact/{1}".format(
            json_response["url"], artifact["relativePath"]),
                               stream=True)
        assert response.headers["content-length"]
        print("Downloading {0}".format(artifact["fileName"]))
        bar = ProgressBar(expected_size=int(
            response.headers["content-length"]),
                          filled_char="=")
        content = io.BytesIO()
        for data in response.iter_content(chunk_size=8192):
            content.write(data)
            bar.show(content.tell())
        assert bar.expected_size == content.tell()
        bar.done()
        out_path = os.path.join(
            os.path.dirname(__file__),
            "dist",
            artifact["fileName"],
        )
        with open(out_path, "wb") as f:
            f.write(content.getvalue())
        paths.append(out_path)
    return paths
Exemplo n.º 2
0
def download(url, dest):
    m = re.match(r'^https://send.firefox.com/download/(\w+)/#([\w_-]+)$', url)
    if not m:
        raise ValueError("URL format appears to be incorrect")

    fid = m.group(1)
    key = base64.urlsafe_b64decode(m.group(2) + '==')

    print("Downloading %s..." % url)
    url = "https://send.firefox.com/api/download/" + fid
    resp = requests.get(url, stream=True)
    resp.raise_for_status()
    flen = int(resp.headers.get('Content-Length'))

    metadata = json.loads(resp.headers.get('X-File-Metadata'))
    filename = metadata['filename']

    if os.path.isdir(dest):
        filename = os.path.join(dest, filename)
    else:
        filename = dest

    iv = binascii.unhexlify(metadata['id'])
    cipher = AES.new(key, AES.MODE_GCM, iv)

    ho = sha256()

    print("Downloading to %s..." % filename)

    try:
        with open(filename + '.tmp', 'wb') as outf:
            bar = ProgressBar(expected_size=flen, filled_char='=')

            dl = 0
            tag = b''
            taglen = 16
            for data in resp.iter_content(chunk_size=8192):
                dl += len(data)
                bar.show(dl)

                if dl > flen - taglen:
                    dend = max(len(data) - (dl - (flen - taglen)), 0)
                    tag += data[dend:]
                    data = data[:dend]

                chunk = cipher.decrypt(data)
                ho.update(chunk)
                outf.write(chunk)
                if len(tag) == taglen:
                    break

            print()
            cipher.verify(tag)
    except Exception as e:
        print("File download failed:", e)
        os.unlink(filename + '.tmp')
    else:
        os.rename(filename + '.tmp', filename)
        print("Done, file verified!")
Exemplo n.º 3
0
def download(fid, secret, dest, password=None, url=None):
    metadata, nonce = get_metadata(fid, secret, password, url)

    encryptKey = deriveFileKey(secret)
    authKey = deriveAuthKey(secret, password, url)

    sig = hmac.new(authKey, nonce, sha256).digest()
    url = "https://send.firefox.com/api/download/" + fid
    resp = requests.get(url,
                        headers={'Authorization': 'send-v1 ' + b64encode(sig)},
                        stream=True)
    resp.raise_for_status()

    flen = int(resp.headers.get('Content-Length'))
    filename = metadata['metadata']['name']

    if os.path.isdir(dest):
        filename = os.path.join(dest, filename)
    else:
        filename = dest

    iv = b64decode(metadata['metadata']['iv'])
    cipher = AES.new(encryptKey, AES.MODE_GCM, iv, mac_len=16)

    ho = sha256()

    print("Downloading to %s..." % filename)

    try:
        with open(filename + '.tmp', 'wb') as outf:
            bar = ProgressBar(expected_size=flen, filled_char='=')

            dl = 0
            tag = b''
            taglen = 16
            for data in resp.iter_content(chunk_size=8192):
                dl += len(data)
                bar.show(dl)

                if dl > flen - taglen:
                    dend = max(len(data) - (dl - (flen - taglen)), 0)
                    tag += data[dend:]
                    data = data[:dend]

                chunk = cipher.decrypt(data)
                ho.update(chunk)
                outf.write(chunk)
                if len(tag) == taglen:
                    break

            print()
            cipher.verify(tag)
    except Exception as e:
        print("File download failed:", e)
        os.unlink(filename + '.tmp')
    else:
        os.rename(filename + '.tmp', filename)
        print("Done, file verified!")
Exemplo n.º 4
0
    def resume_upload(self,
                      file_path,
                      file_endpoint,
                      chunk_size=None,
                      headers=None,
                      auth=None,
                      offset=None):
        chunk_size = chunk_size or self.chunk_size

        try:
            offset = self._get_offset(file_endpoint,
                                      headers=headers,
                                      auth=auth)
        except FloydException as e:
            floyd_logger.error(
                "Failed to fetch offset data from upload server! %s",
                e.message)
            return False
        except requests.exceptions.ConnectionError as e:
            floyd_logger.error(
                "Cannot connect to the Floyd data upload server for offset. "
                "Check your internet connection.")
            return False

        total_sent = 0
        file_size = os.path.getsize(file_path)

        with open(file_path, 'rb') as f:

            pb = ProgressBar(filled_char="=", expected_size=file_size)
            while offset < file_size:
                pb.show(offset)
                f.seek(offset)
                data = f.read(chunk_size)
                try:
                    offset = self._upload_chunk(data,
                                                offset,
                                                file_endpoint,
                                                headers=headers,
                                                auth=auth)
                    total_sent += len(data)
                    floyd_logger.debug("%s bytes sent", total_sent)
                except FloydException as e:
                    floyd_logger.error(
                        "Failed to fetch offset data from upload server! %s",
                        e.message)
                    return False
                except requests.exceptions.ConnectionError as e:
                    floyd_logger.error(
                        "Cannot connect to the Floyd data upload server. "
                        "Check your internet connection.")
                    return False

            # Complete the progress bar with one more call to show()
            pb.show(offset)
            pb.done()
        return True
Exemplo n.º 5
0
        class CallBack:
            def __init__(self):
                self.bar = None

            def __call__(self, size_rd, total):
                if self.bar is None:
                    self.bar = ProgressBar(expected_size=total,
                                           filled_char='=')
                self.bar.show(size_rd)
Exemplo n.º 6
0
def download_artifacts(session):
    response = session.get(
        "{0}/lastBuild/api/json/".format(JENKINS_URL),
        headers={
            "Accept": "application/json"
        }
    )
    response.raise_for_status()
    assert not response.json()["building"]
    assert response.json()["result"] == "SUCCESS"

    paths = []

    last_build_number = response.json()["number"]
    for run in response.json()["runs"]:
        if run["number"] != last_build_number:
            print(
                "Skipping {0} as it is not from the latest build ({1})".format(
                    run["url"], last_build_number
                )
            )
            continue

        response = session.get(
            run["url"] + "api/json/",
            headers={
                "Accept": "application/json",
            }
        )
        response.raise_for_status()
        for artifact in response.json()["artifacts"]:
            response = session.get(
                "{0}artifact/{1}".format(run["url"], artifact["relativePath"]),
                stream=True
            )
            assert response.headers["content-length"]
            print("Downloading {0}".format(artifact["fileName"]))
            bar = ProgressBar(
                expected_size=int(response.headers["content-length"]),
                filled_char="="
            )
            content = io.BytesIO()
            for data in response.iter_content(chunk_size=8192):
                content.write(data)
                bar.show(content.tell())
            assert bar.expected_size == content.tell()
            bar.done()
            out_path = os.path.join(
                os.path.dirname(__file__),
                "dist",
                artifact["fileName"],
            )
            with open(out_path, "wb") as f:
                f.write(content.getvalue())
            paths.append(out_path)
    return paths
Exemplo n.º 7
0
class ProgressBar():
    def __init__(self, length):
        self.bar = Bar(expected_size=length, filled_char='=')
        self.status = 0

    def increment(self, value):
        self.status += value

    def progress(self, value):
        self.bar.show(self.status + value)
Exemplo n.º 8
0
    def _upload(self, package):
        data = package.metadata_dictionary()
        data.update({
            # action
            ":action": "file_upload",
            "protcol_version": "1",
        })

        data_to_send = self._convert_data_to_list_of_tuples(data)

        print("Uploading {0}".format(package.basefilename))

        with open(package.filename, "rb") as fp:
            data_to_send.append((
                "content",
                (package.basefilename, fp, "application/octet-stream"),
            ))
            encoder = MultipartEncoder(data_to_send)
            bar = ProgressBar(expected_size=encoder.len, filled_char='=')
            monitor = MultipartEncoderMonitor(
                encoder, lambda monitor: bar.show(monitor.bytes_read))

            resp = self.session.post(
                self.url,
                data=monitor,
                allow_redirects=False,
                headers={'Content-Type': monitor.content_type},
            )
            bar.done()

        return resp
Exemplo n.º 9
0
    def _upload(self, package):
        data = package.metadata_dictionary()
        data.update({
            # action
            ":action": "file_upload",
            "protcol_version": "1",
        })

        data_to_send = self._convert_data_to_list_of_tuples(data)

        print("Uploading {0}".format(package.basefilename))

        with open(package.filename, "rb") as fp:
            data_to_send.append((
                "content",
                (package.basefilename, fp, "application/octet-stream"),
            ))
            encoder = MultipartEncoder(data_to_send)
            bar = ProgressBar(expected_size=encoder.len, filled_char='=')
            monitor = MultipartEncoderMonitor(
                encoder, lambda monitor: bar.show(monitor.bytes_read)
            )

            resp = self.session.post(
                self.url,
                data=monitor,
                allow_redirects=False,
                headers={'Content-Type': monitor.content_type},
            )
            bar.done()

        return resp
Exemplo n.º 10
0
class ProgressDownloadStream(object):
    """
  Progress bar buffer class that can monitor a file download by displaying
  progress to the user while also writing to the desired file.

  Basic usage:

    download_stream = ProgressDownloadStream(stream=api_response_obj,
                                             expected_size=total_file_bytes)

    download_stream.stream_to_file(desired_file_path)

  """
    def __init__(self, stream=None, expected_size=None, chunk_size=512):
        """
    :param stream:
      Streaming API response object returned by the 'requests' library
      :type: requests.Response
    :param int expected_size:
      Total size of file being downloaded (in bytes)
    :param int chunk_size:
      Chunk size to use when iterating over streamed response content
    """
        self.stream = stream
        self.prog_bar = ProgressBar(expected_size=expected_size,
                                    filled_char='=')
        self.progress = 0
        self.chunk_size = chunk_size

    def stream_to_file(self, path):
        """
    Download file to given path and display progress to user

    :param str path: Desired file path
    """
        # Using default state of files being overwritten for now
        if os.path.exists(path):
            os.remove(path)

        # Stream downloaded contents to file and show progress
        with open(path, 'wb') as f:
            for chunk in self.stream.iter_content(chunk_size=self.chunk_size):
                f.write(chunk)
                self.progress += int(len(chunk))
                self.prog_bar.show(self.progress)
Exemplo n.º 11
0
class FileChunks(object):
    def __init__(self,
                 filename,
                 chunk_size=2 * 1024 * 1024,
                 progress_hook=None):
        self.chunk_size = chunk_size
        self.amount_seen = 0
        self.filename = filename
        self.file_obj = open(filename, 'rb')
        self.file_size = os.fstat(self.file_obj.fileno()).st_size
        self.progress_hook = progress_hook
        if self.progress_hook is None and ProgressBar is not None:
            self.progress_bar = ProgressBar(label=filename,
                                            expected_size=self.file_size,
                                            filled_char='=')
            #self.progress_bar.show(self.amount_seen)
        else:
            self.progress_bar = None

    def get(self):
        try:
            data = self.file_obj.read(self.chunk_size)
            while len(data) > 0:
                self.amount_seen += len(data)
                if self.progress_hook is not None:
                    self.progress_hook(self.filename, self.amount_seen,
                                       self.file_size)
                if self.progress_bar is not None:
                    self.progress_bar.show(self.amount_seen)
                if self.progress_bar is None and self.progress_hook is None:
                    print('Uploading %s: %s / %s\r' %
                          (self.filename, self.amount_seen, self.file_size))
                yield data
                data = self.file_obj.read(self.chunk_size)
        finally:
            if self.progress_bar is not None:
                self.progress_bar.done()
            if self.progress_bar is None and self.progress_hook is None:
                print('Done Uploading %s' % (self.filename, ))
            self.file_obj.close()

    def close(self):
        if not self.file_obj.closed:
            self.file_obj.close()
Exemplo n.º 12
0
def do_artifacts_artifact_add(opts):
    logging.debug('add artifact %r', opts)
    url = artifacts_url(opts.service)
    image = {
        'name': opts.name,
        'description': opts.description,
    }
    # build contents of multipart/form-data, image meta must come first, hence
    # we use an OrderedDict to preserve the order
    files = OrderedDict()
    for k, v in image.items():
        files[k] = (None, io.StringIO(v))
    # followed by firmware data
    # but first, try to find out the size of firmware data
    files['size'] = str(os.stat(opts.infile).st_size)
    files['artifact'] = (opts.infile, open(opts.infile, 'rb'),
                         "application/octet-stream", {})

    encoder = MultipartEncoder(files)

    if sys.stderr.isatty():
        try:
            from requests_toolbelt import MultipartEncoderMonitor
            from clint.textui.progress import Bar as ProgressBar

            pb = ProgressBar(expected_size=encoder.len,
                             filled_char='=',
                             every=1024 * 1024)
            monitor = MultipartEncoderMonitor(
                encoder, lambda mon: pb.show(mon.bytes_read))
            encoder = monitor
        except ImportError:
            pass

    with api_from_opts(opts) as api:
        rsp = api.post(url,
                       data=encoder,
                       headers={'Content-Type': encoder.content_type})
        if rsp.status_code == 201:
            # created
            location = rsp.headers.get('Location', '')
            print("created with URL: {}".format(location))
            print('artifact ID: ', location.rsplit('/')[-1])
        else:
            errorprinter(rsp)
Exemplo n.º 13
0
def course_exports(course_list):
    '''Do course exports for each course in the list'''
    #pprint.pprint(course_list)
    bar = Bar(label='Exporting Courses ', expected_size=len(course_list))
    export_list = []
    for idx, course in enumerate(course_list):
        export_list.append(course_export(course))
        bar.show(idx)

    blen = len(export_list)
    bar.label = "Checking Export Statuses: "
    bar.expected_length = blen
    bar.show(1)
    while export_list:
        if len(export_list) < 10:
            time.sleep(2)
        bar.show(blen - len(export_list))
        #print export_list
        for idx, cm in enumerate(export_list):
            course, res = cm
            url = res.get('progress_url', None)
            if not url:
                del (export_list[idx])

                print 'not able to generate export for course', course
            else:
                download_progress = requests.get(
                    url, headers=config['headers']).json()

                bar.label = "Checking Export Status: {} {}% ".format(
                    course[course_id_field], download_progress['completion'])
                if download_progress['workflow_state'] not in [
                        'queued', 'running'
                ]:
                    if download_progress['workflow_state'] == 'completed':
                        url = "https://{domain}/api/v1/courses/{}/content_exports/{}".format(
                            course[course_id_field], res['id'], **config)
                        export_info = requests.get(
                            url, headers=config['headers']).json()
                        #print 'export_info',export_info
                        export_url = export_info['attachment']['url']
                        course['export_url'] = export_url
                        yield course
                    else:
                        course['export_url'] = None
                    del (export_list[idx])
Exemplo n.º 14
0
def course_exports(course_list):
  '''Do course exports for each course in the list'''
  #pprint.pprint(course_list)
  bar = Bar(label='Exporting Courses ',expected_size=len(course_list))
  export_list = []
  for idx,course in enumerate(course_list):
    export_list.append(course_export(course))
    bar.show(idx)

  blen = len(export_list)
  bar.label = "Checking Export Statuses: "
  bar.expected_length = blen
  bar.show(1)
  while export_list:
    if len(export_list) < 10:
      time.sleep(2)
    bar.show(blen-len(export_list))
    #print export_list
    for idx,cm in enumerate(export_list):
      course,res = cm
      url = res.get('progress_url',None)
      if not url:
        del(export_list[idx])

        print 'not able to generate export for course',course
      else:
        download_progress = requests.get(url,headers=config['headers']).json()

        bar.label = "Checking Export Status: {} {}% ".format(course[course_id_field],download_progress['completion'])
        if download_progress['workflow_state'] not in ['queued','running']:
          if download_progress['workflow_state'] == 'completed':
            url = "https://{domain}/api/v1/courses/{}/content_exports/{}".format(course[course_id_field],res['id'],**config)
            export_info = requests.get(url,headers=config['headers']).json()
            #print 'export_info',export_info
            export_url = export_info['attachment']['url']
            course['export_url'] = export_url
            yield course
          else:
            course['export_url'] = None
          del(export_list[idx])
Exemplo n.º 15
0
    _end = time.time()
    puts(
        colored.magenta("Network download speed %s/sec \n" % (humanizeFileSize(
            (filesize / (_end - _start))))))
    del x

    # DOWNLOAD TEST 2
    puts(
        colored.green('Test 2 | requests | Stream | File size: %s' %
                      humanizeFileSize(filesize)))
    progr2 = ProgressBar(expected_size=filesize)
    _start = time.time()
    _bytesread = 0
    for chunk in fileobj.stream():
        _bytesread = _bytesread + len(chunk)
        progr2.show(_bytesread)
    _end = time.time()
    puts(
        colored.magenta("Network download speed %s/sec \n" % (humanizeFileSize(
            (filesize / (_end - _start))))))

    # TODO: PRINT STATS IN A TABLE FOR COMPARISON / BOOKKEEPING
    # Versions: jottalib, urllib3, requests, jottaAPI
    # Server version from jottacloud.com

    # TEST WITHOUR REQUESTS, ONLY urllib3
    # UPLOAD TEST
    #
    # 2015-07-30 Disabled until urllib3 supports streaming uploads. HG
    #
    #
Exemplo n.º 16
0
def wget(url,target=None,printhook=None,proxies=None,connect_timeout=10,download_timeout=None,verify_cert=False,referer=None,user_agent=None,cert=None,resume=False,md5=None,sha1=None,sha256=None,cache_dir=None):
    r"""Copy the contents of a file from a given URL to a local file.

    Args:
        url (str): URL to document
        target (str) : full file path of downloaded file. If None, put in a temporary dir with supplied url filename (final part of url)
        proxies (dict) : proxies to use. eg {'http':'http://wpad:3128','https':'http://wpad:3128'}
        timeout (int)  : seconds to wait for answer before giving up
        auth (list)    : (user,password) to authenticate wirh basic auth
        verify_cert (bool or str) : either False, True (verify with embedded CA list), or path to a directory or PEM encoded CA bundle file
                                    to check https certificate signature against.
        cert (list) : pair of (x509certfilename,pemkeyfilename) for authenticating the client
        referer (str):
        user_agent:
        resume (bool):
        md5 (str) :
        sha1 (str) :
        sha256 (str) :
        cache_dir (str) : if file exists here, and md5 matches, copy from here instead of downloading. If not, put a copy of the file here after downloading.

    Returns:
        str : path to downloaded file

    >>> respath = wget('http://wapt.tranquil.it/wapt/tis-firefox_28.0.0-1_all.wapt','c:\\tmp\\test.wapt',proxies={'http':'http://proxy:3128'})
    ???
    >>> os.stat(respath).st_size>10000
    True
    >>> respath = wget('http://localhost:8088/runstatus','c:\\tmp\\test.json')
    ???
    """
    start_time = time.time()
    last_time_display = 0.0
    last_downloaded = 0

    def reporthook(received,total):
        total = float(total)
        if total>1 and received>1:
            # print only every second or at end
            if (time.time()-start_time>1) and ((time.time()-last_time_display>=1) or (received>=total)):
                speed = received /(1024.0 * (time.time()-start_time))
                if printhook:
                    printhook(received,total,speed,url)
                elif sys.stdout is not None:
                    try:
                        if received == 0:
                            print(u"Downloading %s (%.1f Mb)" % (url,int(total)/1024/1024))
                        elif received>=total:
                            print(u"  -> download finished (%.0f Kb/s)" % (total /(1024.0*(time.time()+.001-start_time))))
                        else:
                            print(u'%i / %i (%.0f%%) (%.0f KB/s)\r' % (received,total,100.0*received/total,speed))
                    except:
                        return False
                return True
            else:
                return False

    if target is None:
        target = tempfile.gettempdir()

    if os.path.isdir(target):
        target = os.path.join(target,'')

    (dir,filename) = os.path.split(target)
    if not filename:
        url_parts = urlparse.urlparse(url)
        filename = url_parts.path.split('/')[-1]
    if not dir:
        dir = os.getcwd()

    if not os.path.isdir(dir):
        os.makedirs(dir)

    if verify_cert == False:
        requests.packages.urllib3.disable_warnings() # pylint: disable=no-member
    header=default_http_headers()
    if referer != None:
        header.update({'referer': '%s' % referer})
    if user_agent != None:
        header.update({'user-agent': '%s' % user_agent})

    target_fn = os.path.join(dir,filename)

    # return cached file if md5 matches.
    if (md5 is not None or sha1 is not None or sha256 is not None) and cache_dir is not None and os.path.isdir(cache_dir):
        cached_filename = os.path.join(cache_dir,filename)
        if os.path.isfile(cached_filename):
            if _check_hash_for_file(cached_filename,md5=md5,sha1=sha1,sha256=sha256):
                resume = False
                if cached_filename != target_fn:
                    shutil.copy2(cached_filename,target_fn)
                return target_fn
    else:
        cached_filename = None

    if os.path.isfile(target_fn) and resume:
        try:
            actual_size = os.stat(target_fn).st_size
            size_req = requests.head(url,
                proxies=proxies,
                timeout=connect_timeout,
                verify=verify_cert,
                headers=header,
                cert = cert,
                allow_redirects=True)

            target_size = int(size_req.headers['content-length'])
            file_date = size_req.headers.get('last-modified',None)

            if target_size > actual_size:
                header.update({'Range':'bytes=%s-' % (actual_size,)})
                write_mode = 'ab'
            elif target_size < actual_size:
                target_size = None
                write_mode = 'wb'
        except Exception as e:
            target_size = None
            write_mode = 'wb'

    else:
        file_date = None
        actual_size = 0
        target_size = None
        write_mode = 'wb'

    # check hashes if size equal
    if resume and (md5 is not None or sha1 is not None or sha256 is not None) and target_size is not None and (target_size == actual_size):
        if not _check_hash_for_file(target_fn,md5=md5,sha1=sha1,sha256=sha256):
            # restart download...
            target_size = None
            write_mode = 'wb'


    if not resume or target_size is None or (target_size - actual_size) > 0:
        httpreq = requests.get(url,stream=True,
            proxies=proxies,
            timeout=connect_timeout,
            verify=verify_cert,
            headers=header,
            cert = cert,
            allow_redirects=True)

        httpreq.raise_for_status()

        total_bytes = int(httpreq.headers['content-length'])
        target_free_bytes = get_disk_free_space(os.path.dirname(os.path.abspath(target)))
        if total_bytes > target_free_bytes:
            raise Exception('wget : not enough free space on target drive to get %s MB. Total size: %s MB. Free space: %s MB' % (url,total_bytes // (1024*1024),target_free_bytes // (1024*1024)))

        # 1Mb max, 1kb min
        chunk_size = min([1024*1024,max([total_bytes/100,2048])])

        cnt = 0
        if printhook is None and ProgressBar is not None:
            progress_bar = ProgressBar(label=filename,expected_size=target_size or total_bytes, filled_char='=')
            progress_bar.show(actual_size)

        with open(target_fn,write_mode) as output_file:
            last_time_display = time.time()
            last_downloaded = 0
            if httpreq.ok:
                for chunk in httpreq.iter_content(chunk_size=chunk_size):
                    output_file.write(chunk)
                    output_file.flush()
                    if download_timeout is not None and (time.time()-start_time>download_timeout):
                        raise requests.Timeout(r'Download of %s takes more than the requested %ss'%(url,download_timeout))
                    if printhook is None and ProgressBar is not None:
                        if (time.time()-start_time>0.2) and (time.time()-last_time_display>=0.2):
                            progress_bar.show(actual_size + cnt*len(chunk))
                            last_time_display = time.time()
                    else:
                        if reporthook(cnt*len(chunk),total_bytes):
                            last_time_display = time.time()
                    last_downloaded += len(chunk)
                    cnt +=1
                if printhook is None and ProgressBar is not None:
                    progress_bar.show(total_bytes)
                    progress_bar.done()
                    last_time_display = time.time()
                elif reporthook(last_downloaded,total_bytes):
                    last_time_display = time.time()

        # check hashes
        if sha256 is not None:
            file_hash =  _hash_file(target_fn,hash_func=hashlib.sha256)
            if file_hash != sha256:
                raise Exception(u'Downloaded file %s sha256 %s does not match expected %s' % (url,file_hash,sha256))
        elif sha1 is not None:
            file_hash = _hash_file(target_fn,hash_func=hashlib.sha1)
            if file_hash != sha1:
                raise Exception(u'Downloaded file %s sha1 %s does not match expected %s' % (url,file_hash,sha1))
        elif md5 is not None:
            file_hash = _hash_file(target_fn,hash_func=hashlib.md5)
            if file_hash != md5:
                raise Exception(u'Downloaded file %s md5 %s does not match expected %s' % (url,file_hash,md5))

        file_date = httpreq.headers.get('last-modified',None)

    if file_date:
        file_datetime = httpdatetime2time(file_date)
        os.utime(target_fn,(file_datetime,file_datetime))

    # cache result
    if cache_dir:
        if not os.path.isdir(cache_dir):
            os.makedirs(cache_dir)
        cached_filename = os.path.join(cache_dir,filename)
        if target_fn != cached_filename:
            shutil.copy2(target_fn,cached_filename)

    return target_fn
Exemplo n.º 17
0
class DataCompressor(object):
    """
    Local Data Compression with progress bar.
    """
    def __init__(self, source_dir, filename):
        # Data directory to compress
        self.source_dir = source_dir
        # Archive (Tar file) name
        # e.g. "/tmp/contents.tar.gz"
        self.filename = filename

        # Prgress Bar for tracking data compression
        self.__compression_bar = None

        # Number of files to compress
        self.__files_to_compress = 0
        self.__get_nfiles_to_compress()

        # Number of files already compressed
        self.__files_compressed = 0

    def __get_nfiles_to_compress(self):
        """
        Return the number of files to compress

        Note: it should take about 0.1s for counting 100k files on a dual core machine
        """
        floyd_logger.info(
            "Get number of files to compress... (this could take a few seconds)"
        )
        paths = [self.source_dir]
        try:
            # Traverse each subdirs of source_dir and count files/dirs
            while paths:
                path = paths.pop()
                for item in scandir(path):
                    if item.is_dir():
                        paths.append(item.path)
                        self.__files_to_compress += 1
                    elif item.is_file():
                        self.__files_to_compress += 1
        except OSError as e:
            # OSError: [Errno 13] Permission denied
            if e.errno == errno.EACCES:
                self.source_dir = os.getcwd(
                ) if self.source_dir == '.' else self.source_dir  # Expand cwd
                sys.exit(
                    ("Permission denied. Make sure to have read permission "
                     "for all the files and directories in the path: %s") %
                    (self.source_dir))
        floyd_logger.info("Compressing %d files", self.__files_to_compress)

    def create_tarfile(self):
        """
        Create a tar file with the contents of the current directory
        """
        floyd_logger.info("Compressing data...")
        # Show progress bar (file_compressed/file_to_compress)
        self.__compression_bar = ProgressBar(
            expected_size=self.__files_to_compress, filled_char='=')

        # Auxiliary functions
        def dfilter_file_counter(tarinfo):
            """
            Dummy filter function used to track the progression at file levels.
            """
            self.__compression_bar.show(self.__files_compressed)
            self.__files_compressed += 1
            return tarinfo

        def warn_purge_exit(info_msg, filename, progress_bar, exit_msg):
            """
            Warn the user that's something went wrong,
            remove the tarball and provide an exit message.
            """
            progress_bar.done()
            floyd_logger.info(info_msg)
            rmtree(os.path.dirname(filename))
            sys.exit(exit_msg)

        try:
            # Define the default signal handler for catching: Ctrl-C
            signal.signal(signal.SIGINT, signal.default_int_handler)
            with tarfile.open(self.filename, "w:gz") as tar:
                tar.add(self.source_dir,
                        arcname=os.path.basename(self.source_dir),
                        filter=dfilter_file_counter)
            self.__compression_bar.done()
        except (OSError, IOError) as e:
            # OSError: [Errno 13] Permission denied
            if e.errno == errno.EACCES:
                self.source_dir = os.getcwd(
                ) if self.source_dir == '.' else self.source_dir  # Expand cwd
                warn_purge_exit(
                    info_msg="Permission denied. Removing compressed data...",
                    filename=self.filename,
                    progress_bar=self.__compression_bar,
                    exit_msg=(
                        "Permission denied. Make sure to have read permission "
                        "for all the files and directories in the path: %s") %
                    (self.source_dir))
            # OSError: [Errno 28] No Space Left on Device (IOError on python2.7)
            elif e.errno == errno.ENOSPC:
                dir_path = os.path.dirname(self.filename)
                warn_purge_exit(
                    info_msg="No space left. Removing compressed data...",
                    filename=self.filename,
                    progress_bar=self.__compression_bar,
                    exit_msg=
                    ("No space left when compressing your data in: %s.\n"
                     "Make sure to have enough space before uploading your data."
                     ) % (os.path.abspath(dir_path)))

        except KeyboardInterrupt:  # Purge tarball on Ctrl-C
            warn_purge_exit(
                info_msg="Ctrl-C signal detected: Removing compressed data...",
                filename=self.filename,
                progress_bar=self.__compression_bar,
                exit_msg="Stopped the data upload gracefully.")
Exemplo n.º 18
0
    # DOWNLOAD TEST 1
    puts(colored.green('Test 2 | requests | Read | File size: %s' % humanizeFileSize(filesize)))
    _start = time.time()
    x = fileobj.read()
    _end = time.time()
    puts(colored.magenta("Network download speed %s/sec \n" % ( humanizeFileSize( (filesize / (_end-_start)) ) )))
    del x

    # DOWNLOAD TEST 2
    puts(colored.green('Test 2 | requests | Stream | File size: %s' % humanizeFileSize(filesize)))
    progr2 = ProgressBar(expected_size=filesize)
    _start = time.time()
    _bytesread = 0
    for chunk in fileobj.stream():
        _bytesread = _bytesread + len(chunk)
        progr2.show(_bytesread)
    _end = time.time()
    puts(colored.magenta("Network download speed %s/sec \n" % ( humanizeFileSize( (filesize / (_end-_start)) ) )))

    # TODO: PRINT STATS IN A TABLE FOR COMPARISON / BOOKKEEPING
    # Versions: jottalib, urllib3, requests, jottaAPI
    # Server version from jottacloud.com

    # TEST WITHOUR REQUESTS, ONLY urllib3
    # UPLOAD TEST
    #
    # 2015-07-30 Disabled until urllib3 supports streaming uploads. HG
    #
    #
    #puts(colored.green('Test4. urllib3 upload speed. File size: %s' % humanizeFileSize(filesize)))
    #_start = time.time()