def execute(self, task_status_queue=None):
        api_client = api_factory.get_api(
            self._source_resource.storage_url.scheme)
        if copy_util.check_for_cloud_clobber(self._user_request_args,
                                             api_client,
                                             self._destination_resource):
            log.status.Print(
                copy_util.get_no_clobber_message(
                    self._destination_resource.storage_url))
            if self._send_manifest_messages:
                manifest_util.send_skip_message(
                    task_status_queue, self._source_resource,
                    self._destination_resource,
                    copy_util.get_no_clobber_message(
                        self._destination_resource.storage_url))
            return

        progress_callback = progress_callbacks.FilesAndBytesProgressCallback(
            status_queue=task_status_queue,
            offset=0,
            length=self._source_resource.size,
            source_url=self._source_resource.storage_url,
            destination_url=self._destination_resource.storage_url,
            operation_name=task_status.OperationName.INTRA_CLOUD_COPYING,
            process_id=os.getpid(),
            thread_id=threading.get_ident(),
        )

        request_config = request_config_factory.get_request_config(
            self._destination_resource.storage_url,
            decryption_key_hash=self._source_resource.decryption_key_hash,
            user_request_args=self._user_request_args)
        # TODO(b/161900052): Support all of copy_object's parameters
        result_resource = api_client.copy_object(
            self._source_resource,
            self._destination_resource,
            request_config,
            progress_callback=progress_callback)

        if self._print_created_message:
            log.status.Print('Created: {}'.format(result_resource.storage_url))
        if self._send_manifest_messages:
            manifest_util.send_success_message(
                task_status_queue,
                self._source_resource,
                self._destination_resource,
                md5_hash=result_resource.md5_hash)
        if self._delete_source:
            return task.Output(additional_task_iterators=[[
                delete_object_task.DeleteObjectTask(
                    self._source_resource.storage_url)
            ]],
                               messages=None)
 def _get_output(self, destination_resource):
     messages = []
     if self._component_number is not None:
         messages.append(
             task.Message(topic=task.Topic.UPLOADED_COMPONENT,
                          payload=UploadedComponent(
                              component_number=self._component_number,
                              object_resource=destination_resource)))
     else:
         messages.append(
             task.Message(topic=task.Topic.CREATED_RESOURCE,
                          payload=destination_resource))
     return task.Output(additional_task_iterators=None, messages=messages)
Exemple #3
0
    def execute(self, task_status_queue=None):
        """Deletes temporary components and associated tracker files.

    Args:
      task_status_queue: See base class.

    Returns:
      A task.Output with tasks for deleting temporary components.
    """
        del task_status_queue

        component_tracker_path_prefix = tracker_file_util.get_tracker_file_path(
            copy_component_util.get_temporary_component_resource(
                self._source_resource,
                self._destination_resource,
                self._random_prefix,
                component_id='').storage_url,
            tracker_file_util.TrackerFileType.UPLOAD,
            # TODO(b/190093425): Setting component_number will not be necessary
            # after using the final destination to generate component tracker paths.
            component_number='')
        # Matches all paths, regardless of component number:
        component_tracker_paths = glob.iglob(component_tracker_path_prefix +
                                             '*')

        delete_tasks = []
        for component_tracker_path in component_tracker_paths:
            tracker_data = tracker_file_util.read_resumable_upload_tracker_file(
                component_tracker_path)
            if tracker_data.complete:
                _, _, component_number = component_tracker_path.rpartition('_')
                component_resource = (
                    copy_component_util.get_temporary_component_resource(
                        self._source_resource,
                        self._destination_resource,
                        self._random_prefix,
                        component_id=component_number))

                delete_tasks.append(
                    delete_object_task.DeleteObjectTask(
                        component_resource.storage_url, verbose=False))
            os.remove(component_tracker_path)

        # TODO(b/228956264): May be able to remove after task graph improvements.
        additional_task_iterators = [delete_tasks] if delete_tasks else None
        return task.Output(additional_task_iterators=additional_task_iterators,
                           messages=None)
    def execute(self, task_status_queue=None):
        del task_status_queue  # Unused.
        request_config = request_config_factory.get_request_config(
            self._destination_resource.storage_url,
            user_request_args=self._user_request_args)

        provider = self._destination_resource.storage_url.scheme
        created_resource = api_factory.get_api(provider).compose_objects(
            self._source_resources,
            self._destination_resource,
            request_config,
            original_source_resource=self._original_source_resource)
        return task.Output(messages=[
            task.Message(topic=task.Topic.CREATED_RESOURCE,
                         payload=created_resource),
        ],
                           additional_task_iterators=[])
def _thread_worker(task_queue, task_output_queue, task_status_queue,
                   idle_thread_count):
    """A consumer thread run in a child process.

  Args:
    task_queue (multiprocessing.Queue): Holds task_graph.TaskWrapper instances.
    task_output_queue (multiprocessing.Queue): Sends information about completed
      tasks back to the main process.
    task_status_queue (multiprocessing.Queue|None): Used by task to report it
      progress to a central location.
    idle_thread_count (multiprocessing.Semaphore): Keeps track of how many
      threads are busy. Useful for spawning new workers if all threads are busy.
  """
    while True:
        with _task_queue_lock():
            task_wrapper = task_queue.get()
        if task_wrapper == _SHUTDOWN:
            break
        idle_thread_count.acquire()

        task_execution_error = None
        try:
            task_output = task_wrapper.task.execute(
                task_status_queue=task_status_queue)
        # pylint: disable=broad-except
        # If any exception is raised, it will prevent the executor from exiting.
        except Exception as exception:
            task_execution_error = exception
            log.error(exception)
            log.debug(exception, exc_info=sys.exc_info())
            if task_wrapper.task.report_error:
                task_output = task.Output(additional_task_iterators=None,
                                          messages=[
                                              task.Message(
                                                  topic=task.Topic.FATAL_ERROR,
                                                  payload={})
                                          ])
            else:
                task_output = None
        # pylint: enable=broad-except
        finally:
            task_wrapper.task.exit_handler(task_execution_error,
                                           task_status_queue)

        task_output_queue.put((task_wrapper, task_output))
        idle_thread_count.release()
Exemple #6
0
    def execute(self, task_status_queue=None):
        """Performs download."""
        digesters = _get_digesters(self._component_number,
                                   self._source_resource)

        progress_callback = progress_callbacks.FilesAndBytesProgressCallback(
            status_queue=task_status_queue,
            offset=self._offset,
            length=self._length,
            source_url=self._source_resource.storage_url,
            destination_url=self._destination_resource.storage_url,
            component_number=self._component_number,
            total_components=self._total_components,
            operation_name=task_status.OperationName.DOWNLOADING,
            process_id=os.getpid(),
            thread_id=threading.get_ident(),
        )

        request_config = request_config_factory.get_request_config(
            self._source_resource.storage_url,
            decryption_key_hash=self._source_resource.decryption_key_hash,
            user_request_args=self._user_request_args,
        )

        if self._source_resource.size and self._component_number is not None:
            try:
                api_download_result = self._perform_component_download(
                    request_config, progress_callback, digesters)
            # pylint:disable=broad-except
            except Exception as e:
                # pylint:enable=broad-except
                return task.Output(
                    additional_task_iterators=None,
                    messages=[task.Message(topic=task.Topic.ERROR, payload=e)])

        elif self._strategy is cloud_api.DownloadStrategy.RESUMABLE:
            api_download_result = self._perform_resumable_download(
                request_config, progress_callback, digesters)
        else:
            api_download_result = self._perform_one_shot_download(
                request_config, progress_callback, digesters)
        return self._get_output(digesters, api_download_result)
Exemple #7
0
    def _get_output(self, digesters, api_download_result):
        """Generates task.Output from download execution results.

    Args:
      digesters (dict): Contains hash objects for download checksums.
      api_download_result (cloud_api.DownloadApiClientReturnValue|None): Generic
        information from API client about the download results.

    Returns:
      task.Output: Data the parent download or finalize download class would
        like to have.
    """
        messages = []
        if hash_util.HashAlgorithm.MD5 in digesters:
            md5_digest = hash_util.get_base64_hash_digest_string(
                digesters[hash_util.HashAlgorithm.MD5])
            messages.append(
                task.Message(topic=task.Topic.MD5, payload=md5_digest))

        if hash_util.HashAlgorithm.CRC32C in digesters:
            crc32c_checksum = crc32c.get_checksum(
                digesters[hash_util.HashAlgorithm.CRC32C])
            messages.append(
                task.Message(topic=task.Topic.CRC32C,
                             payload={
                                 'component_number': self._component_number,
                                 'crc32c_checksum': crc32c_checksum,
                                 'length': self._length,
                             }))

        if (api_download_result and self._user_request_args
                and self._user_request_args.system_posix_data):
            messages.append(
                task.Message(topic=task.Topic.API_DOWNLOAD_RESULT,
                             payload=api_download_result))

        return task.Output(additional_task_iterators=None, messages=messages)
    def execute(self, task_status_queue=None):
        """Validates and clean ups after sliced download."""
        component_error_occurred = False
        for message in self.received_messages:
            if message.topic is task.Topic.ERROR:
                log.error(message.payload)
                component_error_occurred = True
        if component_error_occurred:
            raise errors.Error(
                'Failed to download one or more component of sliced download.')

        temporary_object_path = (
            self._temporary_destination_resource.storage_url.object_name)
        final_destination_object_path = (
            self._final_destination_resource.storage_url.object_name)
        if (properties.VALUES.storage.check_hashes.Get() !=
                properties.CheckHashes.NEVER.value
                and self._source_resource.crc32c_hash):

            component_payloads = [
                message.payload for message in self.received_messages
                if message.topic == task.Topic.CRC32C
            ]
            if component_payloads:
                # Returns list of payload values sorted by component number.
                sorted_component_payloads = sorted(
                    component_payloads, key=lambda d: d['component_number'])

                downloaded_file_checksum = sorted_component_payloads[0][
                    'crc32c_checksum']
                for i in range(1, len(sorted_component_payloads)):
                    payload = sorted_component_payloads[i]
                    downloaded_file_checksum = crc32c.concat_checksums(
                        downloaded_file_checksum,
                        payload['crc32c_checksum'],
                        b_byte_count=payload['length'])

                downloaded_file_hash_object = crc32c.get_crc32c_from_checksum(
                    downloaded_file_checksum)
                downloaded_file_hash_digest = crc32c.get_hash(
                    downloaded_file_hash_object)

                download_util.validate_download_hash_and_delete_corrupt_files(
                    temporary_object_path, self._source_resource.crc32c_hash,
                    downloaded_file_hash_digest)

        download_util.decompress_or_rename_file(
            self._source_resource,
            temporary_object_path,
            final_destination_object_path,
            do_not_decompress_flag=self._do_not_decompress)

        if self._user_request_args and self._user_request_args.system_posix_data:
            posix_util.set_posix_attributes_on_file(
                final_destination_object_path,
                task_util.get_first_matching_message_payload(
                    self.received_messages,
                    task.Topic.API_DOWNLOAD_RESULT).posix_attributes)

        tracker_file_util.delete_download_tracker_files(
            self._temporary_destination_resource.storage_url)

        if self._print_created_message:
            log.status.Print(
                'Created: {}'.format(final_destination_object_path))
        if self._send_manifest_messages:
            # Does not send md5_hash because sliced download uses CRC32C.
            manifest_util.send_success_message(
                task_status_queue, self._source_resource,
                self._final_destination_resource)

        if self._delete_source:
            return task.Output(additional_task_iterators=[[
                delete_object_task.DeleteObjectTask(
                    self._source_resource.storage_url),
            ]],
                               messages=None)
Exemple #9
0
    def execute(self, task_status_queue=None):
        destination_provider = self._destination_resource.storage_url.scheme
        if copy_util.check_for_cloud_clobber(
                self._user_request_args,
                api_factory.get_api(destination_provider),
                self._destination_resource):
            log.status.Print(
                copy_util.get_no_clobber_message(
                    self._destination_resource.storage_url))
            if self._send_manifest_messages:
                manifest_util.send_skip_message(
                    task_status_queue, self._source_resource,
                    self._destination_resource,
                    copy_util.get_no_clobber_message(
                        self._destination_resource.storage_url))
            return

        source_url = self._source_resource.storage_url
        original_source_path = source_url.object_name
        should_gzip_locally = gzip_util.should_gzip_locally(
            getattr(self._user_request_args, 'gzip_settings', None),
            original_source_path)

        if source_url.is_pipe:
            size = None
            source_path = original_source_path
        else:
            if should_gzip_locally:
                source_path = gzip_util.get_temporary_gzipped_file(
                    original_source_path)
            else:
                source_path = original_source_path
            size = os.path.getsize(source_path)

        api_capabilties = api_factory.get_capabilities(destination_provider)
        component_count = copy_component_util.get_component_count(
            size,
            properties.VALUES.storage.parallel_composite_upload_component_size.
            Get(),
            # TODO(b/232550921): This is a big no-no. Keep API references out of the
            # task-level. Porting because in the process of solving a major bug.
            gcs_api.MAX_OBJECTS_PER_COMPOSE_CALL)
        should_perform_single_transfer = (
            source_url.is_pipe or size < self._composite_upload_threshold
            or not self._composite_upload_threshold
            or cloud_api.Capability.COMPOSE_OBJECTS not in api_capabilties
            or not task_util.should_use_parallelism() or component_count <= 1)

        if should_perform_single_transfer:
            task_output = file_part_upload_task.FilePartUploadTask(
                self._source_resource,
                self._destination_resource,
                source_path,
                offset=0,
                length=size,
                user_request_args=self._user_request_args).execute(
                    task_status_queue)
            result_resource = task_util.get_first_matching_message_payload(
                task_output.messages, task.Topic.CREATED_RESOURCE)
            if result_resource:
                if self._print_created_message:
                    log.status.Print('Created: {}'.format(
                        result_resource.storage_url))
                if self._send_manifest_messages:
                    manifest_util.send_success_message(
                        task_status_queue,
                        self._source_resource,
                        self._destination_resource,
                        md5_hash=result_resource.md5_hash)

            if should_gzip_locally:
                # Delete temporary gzipped version of source file.
                os.remove(source_path)
            if self._delete_source:
                # Delete original source file.
                os.remove(self._source_resource.storage_url.object_name)
        else:
            component_offsets_and_lengths = (
                copy_component_util.get_component_offsets_and_lengths(
                    size, component_count))

            tracker_file_path = tracker_file_util.get_tracker_file_path(
                self._destination_resource.storage_url,
                tracker_file_util.TrackerFileType.PARALLEL_UPLOAD,
                source_url=source_url)
            tracker_data = tracker_file_util.read_composite_upload_tracker_file(
                tracker_file_path)

            if tracker_data:
                random_prefix = tracker_data.random_prefix
            else:
                random_prefix = _get_random_prefix()

            tracker_file_util.write_composite_upload_tracker_file(
                tracker_file_path, random_prefix)

            file_part_upload_tasks = []
            for i, (offset,
                    length) in enumerate(component_offsets_and_lengths):

                temporary_component_resource = (
                    copy_component_util.get_temporary_component_resource(
                        self._source_resource, self._destination_resource,
                        random_prefix, i))

                upload_task = file_part_upload_task.FilePartUploadTask(
                    self._source_resource,
                    temporary_component_resource,
                    source_path,
                    offset,
                    length,
                    component_number=i,
                    total_components=len(component_offsets_and_lengths),
                    user_request_args=self._user_request_args)

                file_part_upload_tasks.append(upload_task)

            finalize_upload_task = (
                finalize_composite_upload_task.FinalizeCompositeUploadTask(
                    expected_component_count=len(file_part_upload_tasks),
                    source_resource=self._source_resource,
                    destination_resource=self._destination_resource,
                    source_path=source_path,
                    random_prefix=random_prefix,
                    delete_source=self._delete_source,
                    print_created_message=self._print_created_message,
                    user_request_args=self._user_request_args))

            return task.Output(additional_task_iterators=[
                file_part_upload_tasks, [finalize_upload_task]
            ],
                               messages=None)
Exemple #10
0
    def execute(self, task_status_queue=None):
        uploaded_components = [
            message.payload for message in self.received_messages
            if message.topic == task.Topic.UPLOADED_COMPONENT
        ]

        if len(uploaded_components) != self._expected_component_count:
            raise errors.Error(
                'Temporary components were not uploaded correctly.'
                ' Please retry this upload.')

        uploaded_objects = [
            component.object_resource for component in sorted(
                uploaded_components,
                key=lambda component: component.component_number)
        ]

        compose_task = compose_objects_task.ComposeObjectsTask(
            uploaded_objects,
            self._destination_resource,
            original_source_resource=self._source_resource,
            user_request_args=self._user_request_args)
        compose_task_output = compose_task.execute(
            task_status_queue=task_status_queue)

        result_resource = task_util.get_first_matching_message_payload(
            compose_task_output.messages, task.Topic.CREATED_RESOURCE)
        if result_resource:
            if self._print_created_message:
                log.status.Print('Created: {}'.format(
                    result_resource.storage_url))
            if self._send_manifest_messages:
                manifest_util.send_success_message(
                    task_status_queue,
                    self._source_resource,
                    self._destination_resource,
                    md5_hash=result_resource.md5_hash)

        # After a successful compose call, we consider the upload complete and can
        # delete tracker files.
        tracker_file_path = tracker_file_util.get_tracker_file_path(
            self._destination_resource.storage_url,
            tracker_file_util.TrackerFileType.PARALLEL_UPLOAD,
            source_url=self._source_resource)
        tracker_file_util.delete_tracker_file(tracker_file_path)

        if gzip_util.should_gzip_locally(
                getattr(self._user_request_args, 'gzip_settings', None),
                self._source_path) and self._source_path.endswith(
                    storage_url.TEMPORARY_FILE_SUFFIX):
            # Delete temporary gzipped version of source file.
            os.remove(self._source_path)
        if self._delete_source:
            # Delete original source file.
            os.remove(self._source_resource.storage_url.object_name)

        return task.Output(additional_task_iterators=[[
            delete_temporary_components_task.DeleteTemporaryComponentsTask(
                self._source_resource,
                self._destination_resource,
                self._random_prefix,
            )
        ]],
                           messages=None)
Exemple #11
0
    def execute(self, task_status_queue=None):
        """Copies file by downloading and uploading in parallel."""
        # TODO (b/168712813): Add option to use the Data Transfer component.
        destination_client = api_factory.get_api(
            self._destination_resource.storage_url.scheme)
        if copy_util.check_for_cloud_clobber(self._user_request_args,
                                             destination_client,
                                             self._destination_resource):
            log.status.Print(
                copy_util.get_no_clobber_message(
                    self._destination_resource.storage_url))
            if self._send_manifest_messages:
                manifest_util.send_skip_message(
                    task_status_queue, self._source_resource,
                    self._destination_resource,
                    copy_util.get_no_clobber_message(
                        self._destination_resource.storage_url))
            return

        progress_callback = progress_callbacks.FilesAndBytesProgressCallback(
            status_queue=task_status_queue,
            offset=0,
            length=self._source_resource.size,
            source_url=self._source_resource.storage_url,
            destination_url=self._destination_resource.storage_url,
            operation_name=task_status.OperationName.DAISY_CHAIN_COPYING,
            process_id=os.getpid(),
            thread_id=threading.get_ident(),
        )

        buffer_controller = BufferController(
            self._source_resource,
            self._destination_resource.storage_url.scheme,
            self._user_request_args, progress_callback)

        # Perform download in a separate thread so that upload can be performed
        # simultaneously.
        buffer_controller.start_download_thread()

        content_type = (self._source_resource.content_type
                        or request_config_factory.DEFAULT_CONTENT_TYPE)

        request_config = request_config_factory.get_request_config(
            self._destination_resource.storage_url,
            content_type=content_type,
            md5_hash=self._get_md5_hash(),
            size=self._source_resource.size,
            user_request_args=self._user_request_args)

        result_resource = None
        try:
            upload_strategy = upload_util.get_upload_strategy(
                api=destination_client,
                object_length=self._source_resource.size)
            result_resource = destination_client.upload_object(
                buffer_controller.readable_stream,
                self._destination_resource,
                request_config,
                source_resource=self._source_resource,
                upload_strategy=upload_strategy)
        except _AbruptShutdownError:
            # Not raising daisy_chain_stream.exception_raised here because we want
            # to wait for the download thread to finish.
            pass
        except Exception as e:  # pylint: disable=broad-except
            # For all the other errors raised during upload, we want to to make
            # sure that the download thread is terminated before we re-reaise.
            # Hence we catch any exception and store it to be re-raised later.
            buffer_controller.shutdown(e)

        buffer_controller.wait_for_download_thread_to_terminate()
        buffer_controller.readable_stream.close()
        if buffer_controller.exception_raised:
            raise buffer_controller.exception_raised

        if result_resource:
            if self._print_created_message:
                log.status.Print('Created: {}'.format(
                    result_resource.storage_url))
            if self._send_manifest_messages:
                manifest_util.send_success_message(
                    task_status_queue,
                    self._source_resource,
                    self._destination_resource,
                    md5_hash=result_resource.md5_hash)

        if self._delete_source:
            return task.Output(additional_task_iterators=[[
                delete_object_task.DeleteObjectTask(
                    self._source_resource.storage_url)
            ]],
                               messages=None)
    def execute(self, task_status_queue=None):
        """Creates appropriate download tasks."""
        destination_url = self._destination_resource.storage_url
        # We need to call os.remove here for two reasons:
        # 1. It saves on disk space during a transfer.
        # 2. Os.rename fails if a file exists at the destination. Avoiding this by
        # removing files after a download makes us susceptible to a race condition
        # between two running instances of gcloud storage. See the following PR for
        # more information: https://github.com/GoogleCloudPlatform/gsutil/pull/1202.
        if destination_url.exists():
            if self._user_request_args and self._user_request_args.no_clobber:
                log.status.Print(
                    copy_util.get_no_clobber_message(destination_url))
                if self._send_manifest_messages:
                    manifest_util.send_skip_message(
                        task_status_queue, self._source_resource,
                        self._destination_resource,
                        copy_util.get_no_clobber_message(destination_url))
                return
            os.remove(destination_url.object_name)

        temporary_download_file_exists = (
            self._temporary_destination_resource.storage_url.exists())
        if temporary_download_file_exists and os.path.getsize(
                self._temporary_destination_resource.storage_url.object_name
        ) > self._source_resource.size:
            self._restart_download()

        if _should_perform_sliced_download(self._source_resource,
                                           self._destination_resource):
            download_component_task_list, finalize_sliced_download_task_list = (
                self._get_sliced_download_tasks())

            _, found_tracker_file = (
                tracker_file_util.read_or_create_download_tracker_file(
                    self._source_resource,
                    self._temporary_destination_resource.storage_url,
                    total_components=len(download_component_task_list),
                ))
            if found_tracker_file:
                log.debug(
                    'Resuming sliced download with {} components.'.format(
                        len(download_component_task_list)))
            else:
                if temporary_download_file_exists:
                    # Component count may have changed, invalidating earlier download.
                    self._restart_download()
                log.debug(
                    'Launching sliced download with {} components.'.format(
                        len(download_component_task_list)))

            copy_component_util.create_file_if_needed(
                self._source_resource, self._temporary_destination_resource)

            return task.Output(additional_task_iterators=[
                download_component_task_list,
                finalize_sliced_download_task_list,
            ],
                               messages=None)

        part_download_task_output = file_part_download_task.FilePartDownloadTask(
            self._source_resource,
            self._temporary_destination_resource,
            offset=0,
            length=self._source_resource.size,
            do_not_decompress=self._do_not_decompress,
            strategy=self._strategy,
            user_request_args=self._user_request_args,
        ).execute(task_status_queue=task_status_queue)

        temporary_file_url = self._temporary_destination_resource.storage_url
        download_util.decompress_or_rename_file(
            self._source_resource,
            temporary_file_url.object_name,
            destination_url.object_name,
            do_not_decompress_flag=self._do_not_decompress)

        if self._user_request_args and self._user_request_args.system_posix_data:
            posix_util.set_posix_attributes_on_file(
                destination_url.object_name,
                task_util.get_first_matching_message_payload(
                    part_download_task_output.messages,
                    task.Topic.API_DOWNLOAD_RESULT).posix_attributes)

        # For sliced download, cleanup is done in the finalized sliced download task
        # We perform cleanup here for all other types in case some corrupt files
        # were left behind.
        tracker_file_util.delete_download_tracker_files(temporary_file_url)

        if self._print_created_message:
            log.status.Print('Created: {}'.format(destination_url))
        if self._send_manifest_messages:
            manifest_util.send_success_message(
                task_status_queue,
                self._source_resource,
                self._destination_resource,
                md5_hash=task_util.get_first_matching_message_payload(
                    part_download_task_output.messages, task.Topic.MD5))

        if self._delete_source:
            return task.Output(additional_task_iterators=[[
                delete_object_task.DeleteObjectTask(
                    self._source_resource.storage_url),
            ]],
                               messages=None)