Exemple #1
0
def download(node_id: str, local_path: str, exclude: list) -> int:
    node = query.get_node(node_id)

    if not node.is_available():
        return 0

    if node.is_folder():
        return download_folder(node_id, local_path, exclude)

    loc_name = node.name

    # # downloading a non-cached node
    # if not loc_name:
    # loc_name = node_id

    for reg in exclude:
        if re.match(reg, loc_name):
            print('Skipping download of "%s" because of exclusion pattern.' % loc_name)
            return 0

    hasher = hashing.IncrementalHasher()

    try:
        print('Current file: %s' % loc_name)
        content.download_file(node_id, loc_name, local_path, length=node.size, write_callback=hasher.update)
    except RequestError as e:
        logger.error('Downloading "%s" failed. Code: %s, msg: %s' % (loc_name, e.status_code, e.msg))
        return UL_DL_FAILED

    return compare_hashes(hasher.get_result(), node.md5, loc_name)
Exemple #2
0
 def test_download(self):
     fn, sz = gen_rand_file()
     self.assertTrue(sz < content.CONSECUTIVE_DL_LIMIT)
     md5 = hashing.hash_file(fn)
     n = content.upload_file(fn)
     self.assertIn('id', n)
     os.remove(fn)
     self.assertFalse(os.path.exists(fn))
     content.download_file(n['id'], fn)
     md5_dl = hashing.hash_file(fn)
     self.assertEqual(md5, md5_dl)
     trash.move_to_trash(n['id'])
     os.remove(fn)
 def test_download(self):
     fn, sz = gen_rand_file()
     self.assertTrue(sz < content.CONSECUTIVE_DL_LIMIT)
     md5 = hashing.hash_file(fn)
     n = content.upload_file(fn)
     self.assertIn('id', n)
     os.remove(fn)
     self.assertFalse(os.path.exists(fn))
     content.download_file(n['id'], fn)
     md5_dl = hashing.hash_file(fn)
     self.assertEqual(md5, md5_dl)
     trash.move_to_trash(n['id'])
     os.remove(fn)
Exemple #4
0
    def test_incomplete_download(self):
        ch_sz = gen_rand_sz()
        content.CHUNK_SIZE = ch_sz
        fn, sz = gen_rand_file(size=5 * ch_sz)
        md5 = hashing.hash_file(fn)
        n = content.upload_file(fn)
        self.assertEqual(n['contentProperties']['md5'], md5)
        os.remove(fn)
        self.assertFalse(os.path.exists(fn))
        with self.assertRaises(RequestError) as cm:
            content.download_file(n['id'], fn, length=sz + 1)

        #os.remove(fn + content.PARTIAL_SUFFIX)
        self.assertEqual(cm.exception.status_code, RequestError.CODE.INCOMPLETE_RESULT)
        content.download_file(n['id'], fn, length=sz)
        os.remove(fn)
Exemple #5
0
def download_file(node_id: str, local_path: str,
                  pg_handler: progress.FileProgress=None) -> RetryRetVal:
    node = query.get_node(node_id)
    name, md5, size = node.name, node.md5, node.size
    # db.Session.remove()  # otherwise, sqlalchemy will complain if thread crashes

    logger.info('Downloading "%s"' % name)

    hasher = hashing.IncrementalHasher()
    try:
        content.download_file(node_id, name, local_path, length=size,
                              write_callbacks=[hasher.update, pg_handler.update])
    except RequestError as e:
        logger.error('Downloading "%s" failed. Code: %s, msg: %s' % (name, e.status_code, e.msg))
        return UL_DL_FAILED
    else:
        return compare_hashes(hasher.get_result(), md5, name)
Exemple #6
0
def download_file(node_id: str, local_path: str,
                  pg_handler: progress.FileProgress=None) -> RetryRetVal:
    node = query.get_node(node_id)
    name, md5, size = node.name, node.md5, node.size
    # db.Session.remove()  # otherwise, sqlalchemy will complain if thread crashes

    logger.info('Downloading "%s"' % name)

    hasher = hashing.IncrementalHasher()
    try:
        content.download_file(node_id, name, local_path, length=size,
                              write_callbacks=[hasher.update, pg_handler.update])
    except RequestError as e:
        logger.error('Downloading "%s" failed. Code: %s, msg: %s' % (name, e.status_code, e.msg))
        return UL_DL_FAILED
    else:
        return compare_hashes(hasher.get_result(), md5, name)
    def test_incomplete_download(self):
        ch_sz = gen_rand_sz()
        content.CHUNK_SIZE = ch_sz
        fn, sz = gen_rand_file(size=5 * ch_sz)
        md5 = hashing.hash_file(fn)
        n = content.upload_file(fn)
        self.assertEqual(n['contentProperties']['md5'], md5)
        os.remove(fn)
        self.assertFalse(os.path.exists(fn))
        with self.assertRaises(RequestError) as cm:
            content.download_file(n['id'], fn, length=sz + 1)

        #os.remove(fn + content.PARTIAL_SUFFIX)
        self.assertEqual(cm.exception.status_code,
                         RequestError.CODE.INCOMPLETE_RESULT)
        content.download_file(n['id'], fn, length=sz)
        trash.move_to_trash(n['id'])
        os.remove(fn)
Exemple #8
0
 def test_download_resume(self):
     ch_sz = gen_rand_sz()
     content.CHUNK_SIZE = ch_sz
     content.CONSECUTIVE_DL_LIMIT = ch_sz
     fn, sz = gen_rand_file(size=5 * ch_sz)
     md5 = hashing.hash_file(fn)
     n = content.upload_file(fn)
     self.assertEqual(n['contentProperties']['md5'], md5)
     os.remove(fn)
     self.assertFalse(os.path.exists(fn))
     p_fn = fn + content.PARTIAL_SUFFIX
     with open(p_fn, 'wb') as f:
         content.chunked_download(n['id'], f, length=int(sz * random.random()))
     self.assertLess(os.path.getsize(p_fn), sz)
     content.download_file(n['id'], fn)
     trash.move_to_trash(n['id'])
     dl_md5 = hashing.hash_file(fn)
     self.assertEqual(md5, dl_md5)
     os.remove(fn)
 def test_download_resume(self):
     ch_sz = gen_rand_sz()
     content.CHUNK_SIZE = ch_sz
     content.CONSECUTIVE_DL_LIMIT = ch_sz
     fn, sz = gen_rand_file(size=5 * ch_sz)
     md5 = hashing.hash_file(fn)
     n = content.upload_file(fn)
     self.assertEqual(n['contentProperties']['md5'], md5)
     os.remove(fn)
     self.assertFalse(os.path.exists(fn))
     p_fn = fn + content.PARTIAL_SUFFIX
     with open(p_fn, 'wb') as f:
         content.chunked_download(n['id'],
                                  f,
                                  length=int(sz * random.random()))
     self.assertLess(os.path.getsize(p_fn), sz)
     content.download_file(n['id'], fn)
     trash.move_to_trash(n['id'])
     dl_md5 = hashing.hash_file(fn)
     self.assertEqual(md5, dl_md5)
     os.remove(fn)
Exemple #10
0
def download(node_id: str, local_path: str, exclude: list) -> int:
    node = query.get_node(node_id)

    if not node.is_available():
        return 0

    if node.is_folder():
        return download_folder(node_id, local_path, exclude)

    loc_name = node.name

    # # downloading a non-cached node
    # if not loc_name:
    # loc_name = node_id

    for reg in exclude:
        if re.match(reg, loc_name):
            print('Skipping download of "%s" because of exclusion pattern.' %
                  loc_name)
            return 0

    hasher = hashing.IncrementalHasher()

    try:
        print('Current file: %s' % loc_name)
        content.download_file(node_id,
                              loc_name,
                              local_path,
                              length=node.size,
                              write_callback=hasher.update)
    except RequestError as e:
        logger.error('Downloading "%s" failed. Code: %s, msg: %s' %
                     (loc_name, e.status_code, e.msg))
        return UL_DL_FAILED

    return compare_hashes(hasher.get_result(), node.md5, loc_name)