Beispiel #1
0
    def _clone_worker(self, objects_list):
        log_msg = self.indicator_options['msg'] = 'Ensuring Target Container'
        LOG.info(log_msg)
        with indicator.Spinner(**self.indicator_options):
            self.job.put_container(
                url=self.target_args['storage_url'],
                container=self.target_args['container']
            )

        log_msg = self.indicator_options['msg'] = 'Creating workspace'
        LOG.info(log_msg)
        with indicator.Spinner(**self.indicator_options):
            workspace = self.job_args['clone_workspace'] = tempfile.mkdtemp(
                suffix='_clone',
                prefix='turbolift_',
                dir=self.job_args.get('workspace')
            )
            working_dirs = set(
                [os.path.dirname(i['name']) for i in objects_list]
            )
        for item in working_dirs:
            self.mkdir(
                path=os.path.join(
                    self.job_args['clone_workspace'],
                    item
                )
            )

        LOG.info('Running Clone')
        try:
            self._multi_processor(self._check_clone, items=objects_list)
        finally:
            self.remove_dirs(workspace)
Beispiel #2
0
    def start(self):
        LOG.info('Archiving...')
        with indicator.Spinner(**self.indicator_options):
            archive = self._compressor(file_list=self._index_fs())

        LOG.info('Ensuring Container...')
        with indicator.Spinner(**self.indicator_options):
            self._put_container()

        LOG.info('Uploading Archive...')
        with indicator.Spinner(**self.indicator_options):
            self._upload(**archive)

        if not self.job_args.get('no_cleanup'):
            os.remove(archive['local_object'])
Beispiel #3
0
    def start(self):
        LOG.info('Indexing File System...')
        with indicator.Spinner(**self.indicator_options):
            upload_objects = self._index_fs()

            if not upload_objects:
                raise exceptions.DirectoryFailure(
                    'No objects found to process. Check your command.'
                )

        LOG.info('Ensuring Container...')
        with indicator.Spinner(**self.indicator_options):
            self._put_container()

        self._multi_processor(self._upload, items=upload_objects)
Beispiel #4
0
    def start(self):
        """Return a list of objects from the API for a container."""
        LOG.info('Interacting with the CDN...')
        with indicator.Spinner(run=self.run_indicator):
            cdn_item = self._cdn()

        self.print_virt_table(cdn_item.headers)
Beispiel #5
0
    def start(self):
        """Return a list of objects from the API for a container."""
        LOG.info('Listing options...')
        with indicator.Spinner(**self.indicator_options):
            objects_list = self._list_contents()
            if not objects_list:
                return

        if isinstance(objects_list[0], dict):
            filter_dlo = self.job_args.get('filter_dlo')
            if filter_dlo:
                dynamic_hash = hashlib.sha256(self.job_args.get('container'))
                dynamic_hash = dynamic_hash.hexdigest()
                objects_list = [
                    i for i in objects_list
                    if dynamic_hash not in i.get('name')
                ]
            string_filter = self.job_args.get('filter')
            if string_filter:
                objects_list = [
                    i for i in objects_list if string_filter in i.get('name')
                ]
            self.print_horiz_table(objects_list)
        else:
            self.print_virt_table(objects_list[0].headers)
Beispiel #6
0
    def _multi_processor(self, func, items):
        base_queue = multiprocessing.Queue(maxsize=self.max_jobs)
        concurrency = self.job_args.get('concurrency')
        item_count = len(items)
        if concurrency > item_count:
            concurrency = item_count

        # Yield a queue of objects with a max input as set by `max_jobs`
        for queue in self._queue_generator(items, base_queue):
            self.indicator_options['msg'] = 'Processing workload...'
            self.indicator_options['work_q'] = queue
            with indicator.Spinner(**self.indicator_options):
                concurrent_jobs = [
                    multiprocessing.Process(target=self._process_func,
                                            args=(
                                                func,
                                                queue,
                                            )) for _ in range(concurrency)
                ]

                # Create an empty list to join later.
                join_jobs = list()
                try:
                    for job in concurrent_jobs:
                        join_jobs.append(job)
                        job.start()

                    # Join finished jobs
                    for job in join_jobs:
                        job.join()
                except KeyboardInterrupt:
                    for job in join_jobs:
                        job.terminate()
                    else:
                        exceptions.emergency_kill()
Beispiel #7
0
    def start(self):
        """Clone objects from one container to another.

        This method was built to clone a container between data-centers while
        using the same credentials. The method assumes that an authentication
        token will be valid within the two data centers.
        """

        LOG.info('Clone warm up...')
        # Create the target args
        self._target_auth()

        last_list_obj = None
        while True:
            self.indicator_options['msg'] = 'Gathering object list'
            with indicator.Spinner(**self.indicator_options):
                objects_list = self._list_contents(
                    single_page_return=True,
                    last_obj=last_list_obj
                )
                if not objects_list:
                    return

            last_obj = utils.byte_encode(objects_list[-1].get('name'))
            LOG.info(
                'Last object [ %s ] Last object in the list [ %s ]',
                last_obj,
                last_list_obj
            )
            if last_list_obj == last_obj:
                return
            else:
                last_list_obj = last_obj
                self._clone_worker(objects_list=objects_list)
Beispiel #8
0
    def start(self):
        LOG.info('Grabbing details...')
        with indicator.Spinner(**self.indicator_options):
            items = self._show(
                container=self.job_args['container'],
                container_objects=self.job_args.get('object')
            )

        for item in items:
            self.print_virt_table(item.headers)
Beispiel #9
0
    def start(self):
        LOG.info('Archiving...')
        with indicator.Spinner(**self.indicator_options):
            archive = self._compressor(file_list=self._index_fs())

        LOG.info('Ensuring Container...')
        with indicator.Spinner(**self.indicator_options):
            self._put_container()

        LOG.info('Uploading Archive...')
        upload_objects = self._return_deque()
        archive_item = self._encapsulate_object(
            full_path=archive['local_object'],
            split_path=os.path.dirname(archive['local_object']))
        upload_objects.append(archive_item)

        self._multi_processor(self._upload, items=upload_objects)

        if not self.job_args.get('no_cleanup'):
            os.remove(archive['local_object'])
Beispiel #10
0
    def run_manager(self, job_override=None):
        """The run manager.

        The run manager is responsible for loading the plugin required based on
        what the user has inputted using the parsed_command value as found in
        the job_args dict. If the user provides a *job_override* the method
        will attempt to import the module and class as provided by the user.

        Before the method attempts to run any job the run manager will first
        authenticate to the the cloud provider.

        :param job_override: ``str`` DOT notation for import with Colin used to
                                     separate the class used for the job.
        """

        for arg_name, arg_value in self.job_args.items():
            if arg_name.endswith('_headers'):
                if isinstance(arg_value, list):
                    self.job_args[arg_name] = self._list_headers(
                        headers=arg_value)
                elif not arg_name:
                    self.job_args[arg_name] = self._str_headers(
                        header=arg_value)
                else:
                    self.job_args[arg_name] = dict()

        # Set base header for the user-agent
        self.job_args['base_headers']['User-Agent'] = 'turbolift'

        LOG.info('Authenticating')
        indicator_options = {'run': self.job_args.get('run_indicator', True)}
        with indicator.Spinner(**indicator_options):
            LOG.debug('Authenticate against the Service API')
            self.job_args.update(auth.authenticate(job_args=self.job_args))

        if job_override:
            action = self._get_method(method=job_override)
        else:
            parsed_command = self.job_args.get('parsed_command')
            if not parsed_command:
                raise exceptions.NoCommandProvided(
                    'Please provide a command. Basic commands are: %s',
                    list(self.job_map.keys()))
            else:
                action = self._get_method(method=self.job_map[parsed_command])

        run = action(job_args=self.job_args)
        run.start()
Beispiel #11
0
    def start(self):
        """Return a list of objects from the API for a container."""
        LOG.info('Listing options...')
        with indicator.Spinner(**self.indicator_options):
            objects_list = self._list_contents()
            if not objects_list:
                return

        # Index items
        self._index_objects(objects_list=objects_list)
        # Create the underlying structure
        self._make_directory_structure()

        # Download everything
        LOG.debug('Download Items: %s', self.download_items)
        self._multi_processor(
            self._get,
            items=[i for i in self.download_items.values() for i in i])
Beispiel #12
0
    def _index_objects(self, objects_list):
        LOG.info('Indexing dowload objects...')

        if not self.job_args['directory'].endswith(os.sep):
            self.job_args['directory'] = '%s%s' % (self.job_args['directory'],
                                                   os.sep)

        with indicator.Spinner(**self.indicator_options):
            for item in objects_list:
                normalized_name = item['name'].lstrip(os.sep)
                directory = os.path.join(
                    self.job_args['directory'].rstrip(os.sep),
                    os.path.dirname(normalized_name))

                self.download_items[directory].append({
                    'container_object':
                    item.get('name'),
                    'local_object':
                    '%s%s' % (self.job_args['directory'], normalized_name)
                })
Beispiel #13
0
def main():
    """Run the main application."""
    args, run_spinner, log = preload_for_main()
    with indicator.Spinner(run=run_spinner):

        if args['parsed_command'] == 'create-report':
            function_args = ['yaprt.packaging_report', 'create_report', True]
        elif args['parsed_command'] == 'build-wheels':
            function_args = ['yaprt.wheel_builder', 'build_wheels', True, True]
        elif args['parsed_command'] == 'create-html-indexes':
            function_args = [
                'yaprt.html_indexer', 'create_html_indexes', False
            ]
        elif args['parsed_command'] == 'store-repos':
            function_args = [None, None, True]
        else:
            # This is imported here because its not used unless there is an
            # error. If imported above, this caused a double log entry.
            from yaprt import utils
            raise utils.AError('No known parsed command, Current Args: "%s"',
                               args)

        runner(args, *function_args)
Beispiel #14
0
 def _make_directory_structure(self):
     LOG.info('Creating local directory structure...')
     with indicator.Spinner(**self.indicator_options):
         for item in self.download_items.keys():
             self.mkdir(item)
Beispiel #15
0
 def test_class_objects_msg(self):
     spinner = indicator.Spinner(msg='test')
     self.assertEqual(spinner.work_q, None)
     self.assertEqual(spinner.run, True)
     self.assertEqual(spinner.msg, 'test')
Beispiel #16
0
 def test_class_objects_work_q(self):
     spinner = indicator.Spinner(work_q=queue.Queue())
     self.assertEqual(isinstance(spinner.work_q, queue.Queue), True)
     self.assertEqual(spinner.run, True)
     self.assertEqual(spinner.msg, None)
Beispiel #17
0
 def test_run_indicator(self):
     with indicator.Spinner():
         self.sys.write.assert_called()
Beispiel #18
0
 def test_run_indicator_object(self):
     spinner = indicator.Spinner()
     spin = spinner.start()
     self.multi.assert_called()
     self.assertTrue(spin)
     spinner.stop()