Ejemplo n.º 1
0
 async def show(queue):
     with context(verbose=True, message='getting proxies in show'):
         async with AIOFile(temp_file_name,
                            'w') as tmp_f, AIOFile(file_name, 'a') as f:
             tmp_fp_eriter = Writer(tmp_f)
             fp_writer = Writer(f)
             while True:
                 proxy = await queue.get()
                 if proxy is None:
                     break
                 data = json.dumps(proxy.as_json())
                 await tmp_fp_eriter(f'{data}\n')
                 await tmp_f.fsync()
                 await fp_writer(f'{data}\n')
                 await f.fsync()
Ejemplo n.º 2
0
async def _monitor_docker_container(
    container: DockerContainer,
    log_cb: Awaitable[Callable[[LogType, str], None]],
    out_log_file: Optional[Path],
) -> None:
    # Avoids raising UnboundLocalError: local variable 'log_type' referenced before assignment
    log_type, parsed_line = LogType.INSTRUMENTATION, "Undefined"
    log_file = out_log_file or touch_tmpfile(extension=".dat")

    try:
        async with AIOFile(str(log_file), "w+") as afp:
            writer = Writer(afp)
            async for line in container.log(stdout=True,
                                            stderr=True,
                                            follow=True):
                log_type, parsed_line = await parse_line(line)
                await log_cb(log_type, parsed_line)
                await writer(f"{log_type.name}: {parsed_line}")

    except DockerError as e:
        log_type, parsed_line = await parse_line(
            f"Could not recover logs because: {e}")
        await log_cb(log_type, parsed_line)

    finally:
        if not out_log_file and log_file:
            log_file.unlink(missing_ok=True)
Ejemplo n.º 3
0
async def save_html(url_: URL, html: str) -> str:
    file_name = f"{url_.host.replace('.', '_')}_{uuid.uuid4()}.html"
    path = PATH_TO_FILES.joinpath(file_name)
    async with AIOFile(path, "w") as f:
        writer = Writer(f)
        await writer(html)
    return str(path)
Ejemplo n.º 4
0
    async def write(self, urls):
        async with AIOFile(self.filename, 'w') as aiodf:
            writer = Writer(aiodf)

            for url in urls:
                await writer("{}\n".format(url))
            await aiodf.fsync()
Ejemplo n.º 5
0
async def download_image(session, url, folder):
    await logger.info(f'Downloading {url} to folder {folder}')

    try:
        async with session.get(url, timeout=60) as response:
            if response.status != 200:
                await logger.error(f'Failed to download: {url}')
            else:
                [filename, extension] = get_url_filename(url)

                async with AIOFile(f'{folder}/{filename}', 'wb') as afh:
                    writer = Writer(afh)
                    bytes = await response.read()
                    await writer(bytes)

                    await logger.info(
                        f'Correctly downloaded {url}, to folder {folder}')

    except asyncio.exceptions.TimeoutError:
        await logger.error(f'Error while downloading image: {url}, timed out!')

    except Exception as e:
        await logger.error(
            f'Error while downloading image: {url} , something went wrong ... {e}'
        )
Ejemplo n.º 6
0
async def writing_results_task(result_dict, csv_file):
    async with AIOFile(csv_file, 'a') as afp:
        writer = Writer(afp)
        await writer(
            pd.DataFrame.from_dict(result_dict,
                                   orient='index').to_csv(header=False,
                                                          index=None))
        await afp.fsync()
Ejemplo n.º 7
0
 async def save_default(self, filename: str,
                        view_adapter: AdapterBase) -> None:
     async with AIOFile(os.path.join(self.images_path, filename),
                        'wb') as f:
         writer = Writer(f)
         async for chunk in view_adapter.read():
             await writer(chunk)
         await f.fsync()
Ejemplo n.º 8
0
    async def download(self, folder, name=None):
        path = self._path(folder, name)

        response = self.get()

        async with AIOFile(filepath, 'wb') as file:
            writer = Writer(file)
            async for data in response.content.iter_any():
                await writer(data)
Ejemplo n.º 9
0
async def upload_chunk_by_patch(request):
    images_directory = request.app["images_directory"]
    repository = request.match_info["repository"]
    session_id = request.match_info["session_id"]

    request.app["token_checker"].authenticate(request, repository, ["push"])

    session = request.app["sessions"].get(session_id, None)
    if not session:
        raise exceptions.BlobUploadInvalid(session=session_id)

    uploads = images_directory / "uploads"
    if not uploads.exists():
        os.makedirs(uploads)

    upload_path = uploads / session_id

    content_range = request.headers.get("Content-Range", "")
    if content_range:
        size = 0
        if os.path.exists(upload_path):
            size = os.path.getsize(upload_path)

        content_range = request.headers["Content-Range"]
        left, right = content_range.split("-")

        if int(left) != size:
            raise web.HTTPRequestRangeNotSatisfiable(
                headers={
                    "Location": f"/v2/{repository}/blobs/uploads/{session_id}",
                    "Range": f"0-{size}",
                    "Content-Length": "0",
                    "Blob-Upload-Session-ID": session_id,
                })

    async with AIOFile(upload_path, "ab") as fp:
        writer = Writer(fp)
        chunk = await request.content.read(1024 * 1024)
        while chunk:
            await writer(chunk)
            session["hasher"].update(chunk)
            session["size"] += len(chunk)
            chunk = await request.content.read(1024 * 1024)
        await fp.fsync()

    size = os.stat(upload_path).st_size - 1

    return web.Response(
        status=202,
        headers={
            "Location": f"/v2/{repository}/blobs/uploads/{session_id}",
            "Blob-Upload-Session-ID": session_id,
            "Range": f"0-{size}",
        },
    )
Ejemplo n.º 10
0
    async def _do_transfer(self, hash, repo, urls, destination):
        if destination.exists():
            logger.debug("%s already exists, not requesting", destination)
            return True

        if not urls:
            logger.debug("No urls for hash %s yet", hash)
            return False

        url = random.choice(urls)
        logger.critical("Starting download from %s to %s", url, destination)

        if not destination.parent.exists():
            os.makedirs(destination.parent)

        temporary_path = self.image_directory / "uploads" / str(uuid.uuid4())
        if not temporary_path.parent.exists():
            os.makedirs(temporary_path.parent)

        digest = hashlib.sha256()

        headers = {}

        # If auth is turned on we need to supply a JWT token
        if self.token_getter:
            token = await self.token_getter.get_token(repo, ["pull"])
            headers["Authorization"] = f"Bearer {token}"

        async with self.session.get(url, headers=headers) as resp:
            if resp.status != 200:
                logger.error("Failed to retrieve: %s, status %s", url,
                             resp.status)
                return False
            async with AIOFile(temporary_path, "wb") as fp:
                writer = Writer(fp)
                chunk = await resp.content.read(1024 * 1024)
                while chunk:
                    await writer(chunk)
                    digest.update(chunk)
                    chunk = await resp.content.read(1024 * 1024)
                await fp.fsync()

        mirrored_hash = "sha256:" + digest.hexdigest()

        if mirrored_hash != hash:
            os.unlink(temporary_path)
            return False

        os.rename(temporary_path, destination)

        for fut in self._futures.get(hash, []):
            fut.set_result(destination)

        return True
Ejemplo n.º 11
0
async def downImage(url):
    async with aiohttp.ClientSession(headers=headers) as session:
        async with session.get(url) as resp:
            if resp.status == 200:
                filename = url[url.rfind('/') + 1:]
                async with AIOFile(os.path.join(dirname, filename),
                                   'wb') as afp:
                    writer = Writer(afp)
                    await writer(await resp.content.read())
                    await afp.fsync()
                    logging.info(('image\t' + filename + '\tis  over '))
Ejemplo n.º 12
0
 async def __copy_file_object(self, file_from, file_to):
     async with AIOFile(file_from, 'rb') as fileFromObj:
         async with AIOFile(file_to, "wb+") as fileToObj:
             reader = Reader(fileFromObj, chunk_size=self.chunk_size)
             writer = Writer(fileToObj)
             async for chunk in reader:
                 await writer(chunk)
                 self.progress += len(chunk)
                 if self.progress_callback is not None:
                     self.progress_callback(Path(file_to).name, self.progress, self.max_progress)
             await fileToObj.fsync()
async def download(url_):
    """
    下载图片
    """
    pic_name = re.split(r'//|/', url_)[-1].split('.')
    async with aiohttp.ClientSession() as session:
        async with AIOFile('{}.{}'.format(pic_name[0], pic_name[1]),
                           'wb') as afo:
            write_ = Writer(afo)
            result = await fetch(session, url_, mode='pic')
            await write_(result)
            await afo.fsync()
Ejemplo n.º 14
0
 async def write_part(build_id, part):
     filename = part.filename.replace("/", "")  # no paths separators allowed
     dir_path = buildout_path / str(build_id)
     if not dir_path.is_dir():
         dir_path.mkdir(parents=True)
     async with AIOFile("%s/%s" % (dir_path, filename), "xb") as afp:
         writer = Writer(afp)
         while True:  # FIXME: timeout
             # FIXME: might not return, timeout and cancel
             chunk = await part.read_chunk()  # 8192 bytes by default.
             if not chunk:
                 break
             await writer(chunk)
Ejemplo n.º 15
0
async def writeFiles(txt, filename, dirname):
    '''async write files

    Arguments:
        txt {string} -- the content
        filename {string} -- filename
        dirname {string} -- dirname
    '''

    async with AIOFile(os.path.join(dirname, filename), 'wb') as afp:
        writer = Writer(afp)
        await writer(txt)
        await afp.fsync()
Ejemplo n.º 16
0
class Emitter:

    events = queue.Queue()
    current_date = datetime.date.today()
    file_name = "logs/{}-{}-{}.txt".format(current_date.month,
                                           current_date.day, current_date.year)
    command_count = 0
    event_counter = 0
    running = True
    aio_file = None

    #Initial DEV_MODE message to be printed upon the first time runnning the bot in a dev environment
    init_dev_mode_b = False
    init_dev_mode_m = '\nIF YOU\'RE SEEING THIS, THAT MEANS YOU HAVE DEV MODE ON WHICH SHOULD BE TURNED OFF FOR PRODUCTION USE\n'

    if os.path.isfile(file_name):
        aio_file = AIOFile(file_name, 'a+')
    else:
        aio_file = AIOFile(file_name, 'w+')

    writer = Writer(aio_file)

    @staticmethod
    async def emit(event, event_data):
        time = datetime.datetime.now()
        time_str = "{}:{}:{}".format(time.hour, time.minute, time.second)
        log_string = "[{}]-[{}]-:{}\n".format(time_str, event, event_data)

        await Emitter.writer(bytes(log_string, 'utf-8'))

        if Emitter.event_counter == 99:
            Emitter.event_counter = 0
            await Emitter.aio_file.fsync()
        else:
            Emitter.event_counter += 1

        if config.DEV_MODE:
            if not Emitter.init_dev_mode_b:
                Emitter.init_dev_mode_b = True
                print(Emitter.init_dev_mode_m)

            print('---[EVENT]---')
            print(log_string + ' was written to file')
            print('--------------\n')

    @staticmethod
    async def shutdown():
        await Emitter.aio_file.fsync()
        Emitter.aio_file.close()
Ejemplo n.º 17
0
async def read_chat(filename, chat_connector):

    assert bool(filename) and filename.isprintable(), AssertionError("Filename has to be non-empty printable.")

    async with AIOFile(filename, mode="a", encoding="utf-8") as file:
        writer = Writer(file)

        try:
            await chat_connector(writer)

            await file.fsync()

        except asyncio.CancelledError:
            await file.fsync()
            raise
Ejemplo n.º 18
0
    async def write(self, urls):
        async with AIOFile(self.filename, 'w') as aiodf:
            writer = Writer(aiodf)
            await writer('<?xml version="1.0" encoding="utf-8"?>\n')
            await writer(
                '<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9"'
                ' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"'
                ' xsi:schemaLocation="http://www.sitemaps.org/schemas/sitemap/0.9 http://www.sitemaps.org/schemas/sitemap/0.9/sitemap.xsd">\n'
            )
            await aiodf.fsync()
            for url in urls:
                await writer('<url><loc>{}</loc></url>\n'.format(url))
            await aiodf.fsync()

            await writer('</urlset>')
            await aiodf.fsync()
Ejemplo n.º 19
0
async def help_func(name, name2):
    async with AIOFile(name, 'r') as file:
        async for line in LineReader(file):
            print(f'I\'m here: {line[:-2:]}')
            async with aiohttp.ClientSession() as session:
                async with session.get(line) as response:
                    text_file = await response.text()
                    #print(text_file)
                    text_file = re.split(r'[\r\n]', str(text_file))
                    print(f'Lines that start with <a :')
                    for el in text_file:
                        if el.strip().startswith('<a '):
                            print(el)
                            async with AIOFile(name2, 'a') as file:
                                writer = Writer(file)
                                await writer(f'{el}\n')
    print('I finished')
Ejemplo n.º 20
0
 async def collect_stats(self):
     logger.info("Monitoring streamed into: %s", self.monitor_filename)
     async with AIOFile(self.monitor_filename, "w") as monitor_file:
         writer = Writer(monitor_file)
         while True:
             await asyncio.sleep(1)
             results = []
             try:
                 for data in asyncio.as_completed(
                     [_fetch_stats(w) for w in self.state.all_workers],
                         timeout=1):
                     results.append(json.dumps(await data))
             except asyncio.TimeoutError:
                 logger.error("Fetching stats timeouted")
             if results:
                 await writer("\n".join(results) + "\n")
                 await monitor_file.fsync()
Ejemplo n.º 21
0
async def buildlog_writer(build_id):
    filename = get_log_file_path(build_id)
    if not filename:
        logger.error("buildlog_writer: cannot get path for build %s", str(build_id))
        del buildlogs[build_id]
        return
    try:
        async with AIOFile(filename, 'a') as afp:
            writer = Writer(afp)
            while True:
                msg = await dequeue(buildlogs[build_id])
                if msg is None:
                    await enqueue_backend({"logging_done": build_id})
                    continue
                elif msg is False:
                    break
                await writer(msg)
                await afp.fsync()
    except Exception as exc:
        logger.exception(exc)

    del buildlogs[build_id]
Ejemplo n.º 22
0
async def get_item(x, all):
    # try:
    if f'data_{x["id"]}.csv' not in TRAIN_FILES:
        current_url = get_url(x)
        response = await get_page(current_url)
        all_items = []
        lvl = 0
        data = None
        if response:
            all_items = await get_page_items(response)
            if len(all_items) < 15:
                while len(all_items) < 15 and lvl <= 12:
                    lvl += 1
                    current_url = get_url(x, lvl)
                    response = await get_page(current_url)
                    if response:
                        all_items = await get_page_items(response)
                    else:
                        all_items = []
                if len(all_items) > 50:
                    lvl -= 1
                    current_url = get_url(x, lvl)
                    response = await get_page(current_url)
                    if response:
                        all_items = await get_page_items(response)
                    else:
                        all_items = []
                    if len(all_items) < 10:
                        lvl += 1
                        current_url = get_url(x, lvl)
                        response = await get_page(current_url)
                        if response:
                            all_items = await get_page_items(response)

        for item in all_items:
            try:
                new_data = await get_for_test_item(item, x, lvl)
                if data is None:
                    data = pd.DataFrame(columns=new_data.keys())
                if new_data is None:
                    raise Exception('new_data is None')
                data = data.append(new_data, ignore_index=True)
            except Exception as e:
                data = None
                print()
                print(f'{e=}')
                print(traceback.print_tb(e.__traceback__))
                # return
                break

        if data is not None:
            data_str = data.to_csv(encoding='utf-8', index=False)
            async with AIOFile(f'train/data_{x["id"]}.csv', 'w') as afp:
                writer = Writer(afp)
                await writer(data_str)
                await afp.fsync()

    global COUNTS, LEFT
    COUNTS += 1
    complete = int((COUNTS*100)/all)
    if not COUNTS == complete:
        LEFT = complete
    sys.stdout.write(f'Ход выполнения: {LEFT}%({COUNTS} из {all})\r')
    sys.stdout.flush()
Ejemplo n.º 23
0
async def DownloadDebSrc(repo_id, sourcedir, sourcename, build_id, version,
                         basemirror, projectversion):
    await buildlogtitle(build_id, "Source Package Republish")
    await buildlog(
        build_id, "I: downloading source package from {} ({})\n".format(
            projectversion, basemirror))
    cfg = Configuration()
    apt_url = cfg.aptly.get("apt_url")
    sources_url = "{}/{}/repos/{}/dists/stable/main/source/Sources".format(
        apt_url, basemirror, projectversion)

    # download Sources file
    Sources = ""
    try:
        async with aiohttp.ClientSession() as http:
            async with http.get(sources_url) as resp:
                if resp.status == 200:
                    Sources = await resp.text()
    except Exception:
        pass

    if not Sources:
        await buildlog(build_id,
                       "E: Error downloading {}\n".format(sources_url))
        return False

    # parse Soures file
    files = []
    directory = None
    found_package_entry = False
    found_directory_entry = False
    found_files_section = False
    for line in Sources.split('\n'):
        if not found_package_entry:
            if line != "Package: {}".format(sourcename):
                continue
            found_package_entry = True
            continue
        else:  # Package section
            if not found_directory_entry:
                if line == "":
                    break
                if not line.startswith("Directory: "):
                    continue
                found_directory_entry = True
                directory = line.split(" ")[1]
                continue
            elif not found_files_section:
                if line == "":
                    break
                if line != "Files:":
                    continue
                found_files_section = True
                continue
            else:  # Files section
                if line.startswith(" "):
                    files.append(line[1:].split(" "))
                else:
                    break

    if not found_directory_entry:
        await buildlog(
            build_id, "E: Could not find {}/{} in Sources file: {}\n".format(
                sourcename, version, sources_url))
        return False

    await buildlog(build_id, "I: found directory: {}\n".format(directory))
    await buildlog(build_id, "I: downloading source files:\n")
    sourcepath = None
    sourcetype = None
    source_files = []
    repopath = f"/var/lib/molior/repositories/{repo_id}"
    tmpdir = mkdtemp(dir=repopath)
    for f in files:
        await buildlog(build_id, " - {}\n".format(f[2]))

        file_url = "{}/{}/repos/{}/{}/{}".format(apt_url, basemirror,
                                                 projectversion, directory,
                                                 f[2])
        body = None
        async with aiohttp.ClientSession() as http:
            async with http.get(file_url) as resp:
                if not resp.status == 200:
                    await buildlog(
                        build_id, "E: Error downloading {}\n".format(file_url))
                    continue
                body = await resp.read()

        filepath = f"{tmpdir}/{f[2]}"
        async with AIOFile(filepath, "wb") as afp:
            writer = Writer(afp)
            await writer(body)

        source_files.append(f[2])

        if filepath.endswith(".git"):
            sourcetype = "git"
            sourcepath = filepath
        elif filepath.endswith(".tar.gz") or filepath.endswith(".tar.xz"):
            sourcetype = "tar"
            sourcepath = filepath

    # extract source, if git, checkout version tag
    ret = None
    if sourcetype:
        output = ""

        async def outh(line):
            nonlocal output
            await buildlog(build_id, "{}\n".format(line))
            output += line

        if sourcetype == "tar":
            cmd = "tar xf {}".format(sourcepath)
            await buildlog(build_id, "$ {}\n".format(cmd))
            process = Launchy(cmd, outh, outh, cwd=tmpdir)
            await process.launch()
            ret = await process.wait()
        elif sourcetype == "git":
            cmd = f"git clone -b v{version.replace('~', '-')} {filepath} {sourcedir}"
            await buildlog(build_id, "$ {}\n".format(cmd))
            process = Launchy(cmd, outh, outh, cwd=tmpdir)
            await process.launch()
            ret = await process.wait()
            output = ""

        if ret == 0:
            cmd = "dpkg-genchanges -S"
            await buildlog(build_id, "$ {}\n".format(cmd))
            process = Launchy(cmd, outh, outh, cwd=f"{tmpdir}/{sourcedir}")
            await process.launch()
            ret = await process.wait()

        if ret == 0:
            cmd = "dpkg-genbuildinfo --build=source"
            await buildlog(build_id, "$ {}\n".format(cmd))
            process = Launchy(cmd, outh, outh, cwd=f"{tmpdir}/{sourcedir}")
            await process.launch()
            ret = await process.wait()

        source_files.append(f"{sourcename}_{version}_source.buildinfo")

    for source_file in source_files:
        try:
            os.rename(f"{tmpdir}/{source_file}", f"{repopath}/{source_file}")
        except Exception as exc:
            logger.exception(exc)

    try:
        rmtree(tmpdir)
    except Exception as exc:
        logger.exception(exc)

    return ret == 0
Ejemplo n.º 24
0
 async def writeResult(self):
     async with AIOFile('ico.json', 'a') as afp:
         writer = Writer(afp)
         await writer(self.result)
         await afp.fsync()
         logging.info('{} json is writed'.format(self.result['id']))
Ejemplo n.º 25
0
async def put_manifest(request):
    mirrorer = request.app["mirrorer"]
    images_directory = request.app["images_directory"]
    repository = request.match_info["repository"]
    tag = request.match_info["tag"]

    request.app["token_checker"].authenticate(request, repository, ["push"])

    content_type = request.headers.get("Content-Type", "")
    if not content_type:
        raise exceptions.ManifestInvalid(reason="no_content_type")

    manifest = await request.read()
    content_size = len(manifest)

    # This makes sure the manifest is somewhat valid
    try:
        direct_deps, dependencies = await recursive_analyze(
            mirrorer, repository, content_type, manifest)
    except Exception:
        logger.exception("Error while validating manifest")
        raise

    hash = hashlib.sha256(manifest).hexdigest()
    prefixed_hash = f"sha256:{hash}"

    manifest_path = get_manifest_path(images_directory, prefixed_hash)
    manifests_dir = manifest_path.parent

    if not os.path.exists(manifests_dir):
        os.makedirs(manifests_dir)

    async with AIOFile(manifest_path, "wb") as fp:
        writer = Writer(fp)
        await writer(manifest)
        await fp.fsync()

    send_action = request.app["send_action"]
    identifier = request.app["identifier"]

    success = await send_action(dependencies + [
        {
            "type": RegistryActions.MANIFEST_MOUNTED,
            "hash": prefixed_hash,
            "repository": repository,
            "user": request["user"],
        },
        {
            "type": RegistryActions.MANIFEST_STORED,
            "hash": prefixed_hash,
            "location": identifier,
            "user": request["user"],
        },
        {
            "type": RegistryActions.MANIFEST_STAT,
            "hash": prefixed_hash,
            "size": content_size,
        },
        {
            "type": RegistryActions.MANIFEST_INFO,
            "hash": prefixed_hash,
            "dependencies": direct_deps,
            "content_type": content_type,
        },
        {
            "type": RegistryActions.HASH_TAGGED,
            "repository": repository,
            "tag": tag,
            "hash": prefixed_hash,
            "user": request["user"],
        },
    ])

    if not success:
        raise exceptions.ManifestInvalid()

    request.app["wh_manager"].send({
        "id": str(uuid.uuid4()),
        "timestamp": "2016-03-09T14:44:26.402973972-08:00",
        "action": "push",
        "target": {
            "mediaType": content_type,
            "size": 708,
            "digest": prefixed_hash,
            "length": 708,
            "repository": repository,
            "url": f"/v2/{repository}/manifests/{prefixed_hash}",
            "tag": tag,
        },
        "request": {
            "id": str(uuid.uuid4()),
            # "addr": "192.168.64.11:42961",
            # "host": "192.168.100.227:5000",
            "method": "PUT",
            # "useragent": "curl/7.38.0",
        },
        "actor": {},
        # "source": {
        #    "addr": "xtal.local:5000",
        #    "instanceID": "a53db899-3b4b-4a62-a067-8dd013beaca4",
        # },
    })

    return web.Response(
        status=201,
        headers={
            "Location": f"/v2/{repository}/manifests/{prefixed_hash}",
            "Docker-Content-Digest": prefixed_hash,
        },
    )
Ejemplo n.º 26
0
async def upload_finish(request):
    images_directory = request.app["images_directory"]
    repository = request.match_info["repository"]
    session_id = request.match_info["session_id"]
    expected_digest = request.query.get("digest", "")

    request.app["token_checker"].authenticate(request, repository, ["push"])

    session = request.app["sessions"].get(session_id, None)
    if not session:
        raise exceptions.BlobUploadInvalid(session=session_id)

    uploads = images_directory / "uploads"
    if not uploads.exists():
        os.makedirs(uploads)

    upload_path = uploads / session_id

    async with AIOFile(upload_path, "ab") as fp:
        writer = Writer(fp)
        chunk = await request.content.read(1024 * 1024)
        while chunk:
            await writer(chunk)
            session["hasher"].update(chunk)
            session["size"] += len(chunk)
            chunk = await request.content.read(1024 * 1024)
        await fp.fsync()

    hash = session["hasher"].hexdigest()
    digest = f"sha256:{hash}"

    if expected_digest != digest:
        raise exceptions.BlobUploadInvalid()

    blob_path = get_blob_path(images_directory, digest)
    blob_dir = blob_path.parent
    if not blob_dir.exists():
        os.makedirs(blob_dir)

    os.rename(upload_path, blob_path)

    send_action = request.app["send_action"]
    identifier = request.app["identifier"]

    success = await send_action([
        {
            "type": RegistryActions.BLOB_MOUNTED,
            "hash": digest,
            "repository": repository,
            "user": request["user"],
        },
        {
            "type": RegistryActions.BLOB_STAT,
            "hash": digest,
            "size": session["size"],
        },
        {
            "type": RegistryActions.BLOB_STORED,
            "hash": digest,
            "location": identifier,
            "user": request["user"],
        },
    ])

    if not success:
        raise exceptions.BlobUploadInvalid()

    return web.Response(
        status=201,
        headers={
            "Location": f"/v2/{repository}/blobs/{digest}",
            "Content-Length": "0",
            "Docker-Content-Digest": digest,
        },
    )
Ejemplo n.º 27
0
async def start_upload(request):
    images_directory = request.app["images_directory"]

    repository = request.match_info["repository"]
    mount_digest = request.query.get("mount", "")
    mount_repository = request.query.get("from", "")

    request.app["token_checker"].authenticate(request, repository, ["push"])

    if mount_digest and mount_repository:
        request.app["token_checker"].authenticate(request, mount_repository,
                                                  ["pull"])

        if mount_repository == repository:
            raise exceptions.BlobUploadInvalid(mount=mount_digest,
                                               repository=mount_repository)

        registry_state = request.app["registry_state"]
        if registry_state.is_blob_available(mount_repository, mount_digest):
            send_action = request.app["send_action"]

            success = await send_action([
                {
                    "type": RegistryActions.BLOB_MOUNTED,
                    "hash": mount_digest,
                    "repository": repository,
                    "user": request["user"],
                },
            ])

            if not success:
                logger.warning(
                    "Can cross-mount %s from %s to %s but failed to commit to journal",
                    mount_digest,
                    mount_repository,
                    repository,
                )
                raise exceptions.BlobUploadInvalid(mount=mount_digest,
                                                   repository=mount_repository)

            return web.Response(
                status=201,
                headers={
                    "Location": f"/v2/{repository}/blobs/{mount_digest}",
                    "Content-Length": "0",
                    "Docker-Content-Digest": mount_digest,
                },
            )

    session_id = str(uuid.uuid4())

    session = request.app["sessions"][session_id] = {
        "hasher": hashlib.sha256(),
        "size": 0,
    }

    expected_digest = request.query.get("digest", None)
    if expected_digest:
        uploads = images_directory / "uploads"
        if not uploads.exists():
            os.makedirs(uploads)

        upload_path = uploads / session_id

        async with AIOFile(upload_path, "ab") as fp:
            writer = Writer(fp)
            chunk = await request.content.read(1024 * 1024)
            while chunk:
                await writer(chunk)
                session["hasher"].update(chunk)
                session["size"] += len(chunk)
                chunk = await request.content.read(1024 * 1024)
            await fp.fsync()

        hash = session["hasher"].hexdigest()
        digest = f"sha256:{hash}"

        if expected_digest != digest:
            raise exceptions.BlobUploadInvalid()

        blob_path = get_blob_path(images_directory, digest)
        blob_dir = blob_path.parent
        if not blob_dir.exists():
            os.makedirs(blob_dir)

        os.rename(upload_path, blob_path)

        send_action = request.app["send_action"]
        identifier = request.app["identifier"]

        success = await send_action([
            {
                "type": RegistryActions.BLOB_MOUNTED,
                "hash": digest,
                "repository": repository,
                "user": request["user"],
            },
            {
                "type": RegistryActions.BLOB_STAT,
                "hash": digest,
                "size": session["size"],
            },
            {
                "type": RegistryActions.BLOB_STORED,
                "hash": digest,
                "location": identifier,
                "user": request["user"],
            },
        ])

        if not success:
            raise exceptions.BlobUploadInvalid()

        return web.Response(
            status=201,
            headers={
                "Location": f"/v2/{repository}/blobs/{digest}",
                "Docker-Content-Digest": digest,
            },
        )

    return web.Response(
        status=202,
        headers={
            "Location": f"/v2/{repository}/blobs/uploads/{session_id}",
            "Range": "0-0",
            "Blob-Upload-Session-ID": session_id,
        },
    )