def get_renderables(self): summary_tasks, other_tasks = split( lambda task: task.fields.get("progress_type") == "summary", self.tasks, ) self.columns = self.SUMMARY_COLS yield self.make_tasks_table(summary_tasks) self.columns = self.TRANSFER_COLS yield self.make_tasks_table(other_tasks)
def main(): command = sys.argv[1] kwargs, args = split(r'^--', sys.argv[2:]) kwargs = dict(map(r'^--(\w+)=(.+)', kwargs)) from .ext import django django.register(args, kwargs) files = FileSet(args, base=kwargs.get('base'), ignore=kwargs.get('ignore')) if command == 'global': global_usage(files) elif command == 'local': local_usage(files) else: print 'Unknown command', command
def main(): command = sys.argv[1] opts, args = split(r'^--', sys.argv[2:]) opts = dict(map(r'^--([\w-]+)(?:=(.+))?', opts)) # Run ipdb on exception if 'ipdb' in opts: import ipdb, traceback def info(type, value, tb): traceback.print_exception(type, value, tb) print() # Insert look-around helpers into the frame import inspect, ast from .asttools import to_source frame = inspect.getinnerframes(tb)[-1][0] frame.f_globals.setdefault('ast', ast) frame.f_globals.setdefault('to_source', to_source) # Run debugger ipdb.pm() sys.excepthook = info # Register plugins from .ext import django django.register(args, opts) # Do the job files = FileSet(args, base=opts.get('base'), ignore=opts.get('ignore'), entry_points=opts.get('entry-points')) if command == 'global': global_usage(files) elif command == 'local': local_usage(files) elif command == 'scope': assert len(files) == 1 (filename, file), = files.items() print(file.scope) else: print('Unknown command', command)
def search(request): q = request.GET.get('q') if not q: return {'series': None} exclude_tags = keep(silent(int), request.GET.getlist('exclude_tags')) serie_tags, tag_series, tag_ids = series_tags_data() q_string, q_tags = _parse_query(q) q_tags, wrong_tags = split(lambda t: t.lower() in tag_ids, q_tags) if wrong_tags: message = 'Unknown tag%s %s.' % ('s' if len(wrong_tags) > 1 else '', ', '.join(wrong_tags)) messages.warning(request, message) if not q_string and not q_tags: return {'series': None} qs = search_series_qs(q_string) if q_tags: q_tag_ids = keep(tag_ids.get(t.lower()) for t in q_tags) include_series = reduce(set.intersection, (tag_series[t] for t in q_tag_ids)) if include_series: qs = qs.filter(id__in=include_series) else: message = 'No series annotated with %s.' \ % (q_tags[0] if len(q_tags) == 1 else 'all these tags simultaneously') messages.warning(request, message) return {'series': []} if exclude_tags: exclude_series = join(tag_series[t] for t in exclude_tags) qs = qs.exclude(id__in=exclude_series) series_ids = qs.values_list('id', flat=True) tags = distinct(imapcat(serie_tags, series_ids), key=itemgetter('id')) # TODO: do not hide excluded tags return { 'series': qs, 'tags': tags, 'serie_tags': serie_tags, }
def get_data(self): all_transactions = filter( lambda t: t["type"] in ("request-changes", "accept"), cat(pluck("transactions", self.raw_data)), ) accept_transactions, reject_transactions = split( lambda t: t["type"] == "accept", all_transactions) most_accepting_author, most_accepting_count = Counter( count_by(itemgetter("authorPHID"), accept_transactions)).most_common(1)[0] most_rejecting_author, most_rejecting_count = Counter( count_by(itemgetter("authorPHID"), reject_transactions)).most_common(1)[0] return ( { "author": self.users_mapping[most_accepting_author], "count": most_accepting_count, }, { "author": self.users_mapping[most_rejecting_author], "count": most_rejecting_count, }, )
def _do_transfer( src: "ObjectDB", dest: "ObjectDB", obj_ids: Iterable["HashInfo"], missing_ids: Iterable["HashInfo"], processor: Callable, src_index: Optional["ObjectDBIndexBase"] = None, dest_index: Optional["ObjectDBIndexBase"] = None, cache_odb: Optional["ObjectDB"] = None, **kwargs: Any, ): from dvc.exceptions import FileTransferError dir_ids, file_ids = split(lambda hash_info: hash_info.isdir, obj_ids) total_fails = 0 succeeded_dir_objs = [] all_file_ids = set(file_ids) for dir_hash in dir_ids: dir_obj = find_tree_by_obj_id([cache_odb, src], dir_hash) assert dir_obj entry_ids = {oid for _, _, oid in dir_obj} bound_file_ids = all_file_ids & entry_ids all_file_ids -= entry_ids dir_fails = sum(processor(bound_file_ids)) if dir_fails: logger.debug( "failed to upload full contents of '%s', " "aborting .dir file upload", dir_obj.name, ) logger.error( "failed to upload '%s' to '%s'", src.get(dir_obj.hash_info).path_info, dest.get(dir_obj.hash_info).path_info, ) total_fails += dir_fails + 1 elif entry_ids.intersection(missing_ids): # if for some reason a file contained in this dir is # missing both locally and in the remote, we want to # push whatever file content we have, but should not # push .dir file logger.debug( "directory '%s' contains missing files," "skipping .dir file upload", dir_obj.name, ) else: is_dir_failed = sum(processor([dir_obj.hash_info])) total_fails += is_dir_failed if not is_dir_failed: succeeded_dir_objs.append(dir_obj) # insert the rest total_fails += sum(processor(all_file_ids)) if total_fails: if src_index: src_index.clear() raise FileTransferError(total_fails) # index successfully pushed dirs if dest_index: for dir_obj in succeeded_dir_objs: file_hashes = {oid.value for _, _, oid in dir_obj} logger.debug( "Indexing pushed dir '%s' with '%s' nested files", dir_obj.hash_info, len(file_hashes), ) assert dir_obj.hash_info and dir_obj.hash_info.value dest_index.update([dir_obj.hash_info.value], file_hashes)
def _parse_query(q): tags, words = split(r'^tag:', q.split()) tags = map(r'^tag:(.*)', tags) return ' '.join(words), tags