def _process_logo(self, logo, event): path = get_archived_file(logo, self.archive_dirs)[1] if path is None: self.print_error(cformat('%{red!}Logo not found on disk; skipping it'), event_id=event.id) return try: logo_image = Image.open(path) except IOError as e: self.print_warning("Cannot open {}: {}".format(path, e), event_id=event.id) return if logo_image.mode == 'CMYK': self.print_warning("Logo is a CMYK {}; converting to RGB".format(logo_image.format), event_id=event.id) # this may result in wrong colors, but there's not much we can do... logo_image = logo_image.convert('RGB') logo_bytes = BytesIO() logo_image.save(logo_bytes, 'PNG') logo_bytes.seek(0) logo_content = logo_bytes.read() logo_filename = secure_filename(convert_to_unicode(logo.fileName), 'logo') logo_filename = os.path.splitext(logo_filename)[0] + '.png' event.logo_metadata = { 'size': len(logo_content), 'hash': crc32(logo_content), 'filename': logo_filename, 'content_type': 'image/png' } event.logo = logo_content if not self.quiet: self.print_success(cformat('- %{cyan}[Logo] {}').format(logo.fileName), event_id=event.id)
def main(): import_all_models() for cls, rels in sorted(_find_backrefs().iteritems(), key=lambda x: x[0].__name__): path = _get_source_file(cls) with open(path, "r") as f: source = [line.rstrip("\n") for line in f] new_source = [] in_class = in_backrefs = backrefs_written = False for i, line in enumerate(source): if in_backrefs: if not backrefs_written: for backref_name, target, target_rel_name in sorted(rels, key=itemgetter(0)): new_source.append(" # - {} ({}.{})".format(backref_name, target, target_rel_name)) backrefs_written = True if not line.startswith(" # - "): in_backrefs = False else: continue elif in_class: if line == " # relationship backrefs:": in_backrefs = True elif line and not line.startswith(" " * 4): # end of the indented class block in_class = False else: if line.startswith("class {}(".format(cls.__name__)): in_class = True new_source.append(line) if not backrefs_written: print cformat("%{yellow}Class {} has no comment for backref information").format(cls.__name__) if source != new_source: print cformat("%{green!}Updating backref info for {} in {}").format(cls.__name__, path) with open(path, "w") as f: f.writelines(line + "\n" for line in new_source)
def process_principal(self, event, principals, legacy_principal, name, color, full_access=None, roles=None): if isinstance(legacy_principal, basestring): user = self.all_users_by_email.get(legacy_principal) principal = user or EmailPrincipal(legacy_principal) else: principal = self.convert_principal(legacy_principal) if principal is None: self.print_warning( cformat("%%{%s}{}%%{reset}%%{yellow} does not exist:%%{reset} {}" % color).format( name, legacy_principal ), event_id=event.id, ) return try: entry = principals[principal] except KeyError: entry = EventPrincipal(event_id=event.id, principal=principal, full_access=False, roles=[]) principals[principal] = entry if full_access: entry.full_access = True if roles: entry.roles = sorted(set(entry.roles) | set(roles)) if not self.quiet: self.print_msg(cformat(" - %%{%s}[{}]%%{reset} {}" % color).format(name.lower(), principal)) return principal
def migrate_settings(self): print cformat('%{white!}migrating settings') LiveSyncPlugin.settings.delete_all() opts = self.zodb_root['plugins']['livesync']._PluginBase__options LiveSyncPlugin.settings.set('excluded_categories', [{'id': x} for x in opts['excludedCategories']._PluginOption__value]) db.session.commit()
def migrate_settings(self): print cformat('%{white!}migrating settings') VidyoPlugin.settings.delete_all() opts = self.zodb_root['plugins']['Collaboration']._PluginType__plugins['Vidyo']._PluginBase__options VidyoPlugin.settings.set('managers', convert_principal_list(opts['admins'])) VidyoPlugin.settings.set('acl', convert_principal_list(opts['AuthorisedUsersGroups'])) settings_map = { 'adminAPIURL': 'admin_api_wsdl', 'userAPIURL': 'user_api_wsdl', 'prefix': 'indico_room_prefix', 'indicoGroup': 'room_group_name', 'phoneNumbers': 'vidyo_phone_link', 'maxDaysBeforeClean': 'num_days_old', 'indicoUsername': '******', 'indicoPassword': '******', 'contactSupport': 'support_email', 'cleanWarningAmount': 'max_rooms_warning', 'additionalEmails': 'notification_emails' } for old, new in settings_map.iteritems(): value = option_value(opts[old]) if old == 'prefix': value = int(value) elif old == 'phoneNumbers': match = next((re.search(r'https?://[^"]+', convert_to_unicode(v)) for v in value), None) if match is None: continue value = match.group(0) elif old == 'additionalEmails': value = list(set(value) | {x.email for x in option_value(opts['admins'])}) VidyoPlugin.settings.set(new, value) db.session.commit()
def _copy(src, dst, force=False): if not force and os.path.exists(dst): _echo(cformat('%{yellow!}{}%{reset}%{yellow} already exists; not copying %{yellow!}{}') .format(dst, src)) return _echo(cformat('%{green}Copying %{green!}{}%{reset}%{green} -> %{green!}{}').format(src, dst)) shutil.copy(src, dst)
def migrate_settings(self): print cformat('%{white!}migrating settings') ImporterInvenioPlugin.settings.delete_all() opts = self.zodb_root['plugins']['importer']._PluginType__plugins['invenio']._PluginBase__options ImporterInvenioPlugin.settings.set('server_url', convert_to_unicode(opts['location']._PluginOption__value).strip()) db.session.commit()
def migrate_event_images(self): self.print_step('migrating event images') for event, picture in committing_iterator(self._iter_pictures()): local_file = picture._localFile content_type = mimetypes.guess_type(local_file.fileName)[0] or 'application/octet-stream' storage_backend, storage_path, size = self._get_local_file_info(local_file) if storage_path is None: self.print_warning(cformat('%{yellow}[{}]%{reset} -> %{red!}Not found in filesystem').format( local_file.id), event_id=event.id) continue filename = secure_filename(convert_to_unicode(local_file.fileName), 'image') image = ImageFile(event_id=event.id, filename=filename, content_type=content_type, created_dt=now_utc(), size=size, storage_backend=storage_backend, storage_file_id=storage_path) db.session.add(image) db.session.flush() map_entry = LegacyImageMapping(event_id=event.id, legacy_image_id=local_file.id, image_id=image.id) db.session.add(map_entry) if not self.quiet: self.print_success(cformat('%{cyan}[{}]%{reset} -> %{blue!}{}').format(local_file.id, image), event_id=event.id)
def migrate_layout_settings(self): print cformat('%{white!}migrating layout settings, event logos and custom stylesheets') default_styles = self.zodb_root['MaKaCInfo']['main']._styleMgr._defaultEventStylesheet for event, event_type, dmgr, logo, custom_css in committing_iterator(self._iter_event_layout_data()): if event_type != 'conference': theme = dmgr._defaultstyle if not theme or theme == default_styles[event_type]: continue layout_settings.set(event, 'timetable_theme', theme) if not self.quiet: self.print_success(cformat('- %{cyan}Default timetable theme: {}').format(theme), event_id=event.id) continue settings = self._get_event_settings(event, dmgr) layout_settings.set_multi(event, settings) if not self.quiet: self.print_success(cformat('- %{cyan}Layout settings'), event_id=event.id) if logo or custom_css: sa_event = Event.get(event.id) if not sa_event: self.print_warning('Event does not exist (anymore)! Logo and/or CSS file not saved!', event_id=event.id) continue if logo: self._process_logo(logo, sa_event) if custom_css: self._process_css(custom_css, sa_event)
def create_all_tables(db, verbose=False, add_initial_data=True): """Create all tables and required initial objects""" from indico.modules.categories import Category from indico.modules.designer import TemplateType from indico.modules.designer.models.templates import DesignerTemplate from indico.modules.oauth.models.applications import OAuthApplication, SystemAppType from indico.modules.users import User if verbose: print(cformat('%{green}Creating tables')) db.create_all() if add_initial_data: if verbose: print(cformat('%{green}Creating system user')) db.session.add(User(id=0, is_system=True, first_name='Indico', last_name='System')) if verbose: print(cformat('%{green}Creating root category')) cat = Category(id=0, title='Home', protection_mode=ProtectionMode.public) db.session.add(cat) db.session.flush() if verbose: print(cformat('%{green}Creating default ticket template for root category ')) dt = DesignerTemplate(category_id=0, title='Default ticket', type=TemplateType.badge, data=DEFAULT_TEMPLATE_DATA, is_system_template=True) cat.default_ticket_template = dt db.session.add(dt) if verbose: print(cformat('%{green}Creating system oauth apps')) for sat in SystemAppType: if sat != SystemAppType.none: db.session.add(OAuthApplication(system_app_type=sat, **sat.default_data)) db.session.commit()
def migrate_vidyo_room(self, booking): booking_params = booking._bookingParams vc_room = VCRoom(created_by_id=Config.getInstance().getJanitorUserId()) vc_room.type = 'vidyo' vc_room.status = VCRoomStatus.created if booking._created else VCRoomStatus.deleted vc_room.name = booking_params['roomName'] vc_room.data = { 'description': booking_params['roomDescription'], 'room_pin': booking._pin, 'moderation_pin': getattr(booking, '_moderatorPin', ''), 'vidyo_id': booking._roomId, 'url': booking._url, 'owner': ('User', int(booking._owner.id)), 'owner_identity': booking._ownerVidyoAccount, 'auto_mute': booking_params.get('autoMute', True) } vc_room.modified_dt = booking._modificationDate vc_room.created_dt = booking._creationDate db.session.add(vc_room) vidyo_ext = VidyoExtension(vc_room=vc_room, extension=int(booking._extension), owned_by_id=int(booking._owner.id)) db.session.add(vidyo_ext) db.session.flush() self.vc_rooms_by_extension[vidyo_ext.extension] = vc_room print cformat('%{green}+++%{reset} %{cyan}{}%{reset} [%{yellow!}{}%{reset}]').format( vc_room.name, booking._roomId) return vc_room
def create(grant_admin): """Creates a new user""" user_type = 'user' if not grant_admin else 'admin' while True: email = prompt_email() if email is None: return email = email.lower() if not User.query.filter(User.all_emails == email, ~User.is_deleted, ~User.is_pending).has_rows(): break print(cformat('%{red}Email already exists')) first_name = click.prompt("First name").strip() last_name = click.prompt("Last name").strip() affiliation = click.prompt("Affiliation", '').strip() print() while True: username = click.prompt("Enter username").lower().strip() if not Identity.find(provider='indico', identifier=username).count(): break print(cformat('%{red}Username already exists')) password = prompt_pass() if password is None: return identity = Identity(provider='indico', identifier=username, password=password) user = create_user(email, {'first_name': to_unicode(first_name), 'last_name': to_unicode(last_name), 'affiliation': to_unicode(affiliation)}, identity) user.is_admin = grant_admin _print_user_info(user) if click.confirm(cformat("%{yellow}Create the new {}?").format(user_type), default=True): db.session.add(user) db.session.commit() print(cformat("%{green}New {} created successfully with ID: %{green!}{}").format(user_type, user.id))
def _print_occurrences(user, occurrences, _defaults={}, _overrides={}): if not _defaults or not _overrides: _defaults.update({RepeatFrequency.WEEK: rb_settings.get('notification_before_days_weekly'), RepeatFrequency.MONTH: rb_settings.get('notification_before_days_monthly'), RepeatFrequency.NEVER: rb_settings.get('notification_before_days'), RepeatFrequency.DAY: rb_settings.get('notification_before_days')}) _overrides.update({RepeatFrequency.WEEK: lambda r: r.notification_before_days_weekly, RepeatFrequency.MONTH: lambda r: r.notification_before_days_monthly, RepeatFrequency.NEVER: lambda r: r.notification_before_days, RepeatFrequency.DAY: lambda r: r.notification_before_days}) print cformat('%{grey!}*** {} ({}) ***').format(user.full_name, user.email) for occ in occurrences: default = _defaults[occ.reservation.repeat_frequency] override = _overrides[occ.reservation.repeat_frequency](occ.reservation.room) days = default if override is None else override days_until = (occ.start_dt.date() - date.today()).days print cformat(' * %{yellow}{}%{reset} %{green}{:5}%{reset} {} {} {} \t %{blue!}{}%{reset} {} ({})').format( occ.start_dt.date(), occ.reservation.repeat_frequency.name, days, default if override is not None and override != default else ' ', days_until, occ.reservation.id, occ.reservation.room.full_name, occ.reservation.room.id )
def migrate_event_notes(self): self.print_step('migrating event notes') janitor_user = User.get_one(self.janitor_user_id) self.print_msg('Using janitor user {}'.format(janitor_user), always=True) for event, obj, minutes, special_prot in committing_iterator(self._iter_minutes()): if special_prot: self.print_warning( cformat('%{yellow!}{} minutes have special permissions; skipping them').format(obj), event_id=event.id ) continue path = get_archived_file(minutes, self.archive_dirs)[1] if path is None: self.print_error(cformat('%{red!}{} minutes not found on disk; skipping them').format(obj), event_id=event.id) continue with open(path, 'r') as f: data = convert_to_unicode(f.read()).strip() if not data: self.print_warning(cformat('%{yellow}{} minutes are empty; skipping them').format(obj), always=False, event_id=event.id) continue note = EventNote(linked_object=obj) note.create_revision(RenderMode.html, data, janitor_user) db.session.add(note) if not self.quiet: self.print_success(cformat('%{cyan}{}').format(obj), event_id=event.id)
def migrate_settings(self): print cformat('%{white!}migrating settings') ChatPlugin.settings.delete_all() type_opts = self.zodb_root['plugins']['InstantMessaging']._PluginBase__options opts = self.zodb_root['plugins']['InstantMessaging']._PluginType__plugins['XMPP']._PluginBase__options host = convert_to_unicode(opts['chatServerHost']._PluginOption__value) admin_emails = [x.email for x in opts['admins']._PluginOption__value] ChatPlugin.settings.set('admins', convert_principal_list(opts['admins'])) ChatPlugin.settings.set('server', host) ChatPlugin.settings.set('muc_server', 'conference.{}'.format(host)) settings_map = { 'additionalEmails': 'notify_emails', 'indicoUsername': '******', 'indicoPassword': '******', 'ckEditor': 'how_to_connect' } for old, new in settings_map.iteritems(): value = opts[old]._PluginOption__value if isinstance(value, basestring): value = convert_to_unicode(value).strip() elif new == 'notify_emails': value = [email for email in set(value + admin_emails) if is_valid_mail(email, multi=False)] ChatPlugin.settings.set(new, value) if opts['activateLogs']._PluginOption__value: ChatPlugin.settings.set('log_url', 'https://{}/logs/'.format(host)) chat_links = [] for item in type_opts['customLinks']._PluginOption__value: link = item['structure'].replace('[chatroom]', '{room}').replace('[host]', '{server}') link = re.sub(r'(?<!conference\.)\{server}', host, link) link = link.replace('conference.{server}', '{server}') # {server} is now the MUC server chat_links.append({'title': item['name'], 'link': link}) ChatPlugin.settings.set('chat_links', chat_links) db.session.commit()
def migrate(self): # noinspection PyAttributeOutsideInit self.livesync_root = self.zodb_root['plugins']['livesync']._storage with LiveSyncPlugin.instance.plugin_context(): self.migrate_settings() self.migrate_agents() print cformat('%{cyan!}Note: The old queue is not preserved!%{reset}')
def main(main_uri, rb_uri, sqla_uri, photo_path, drop, merged_avatars): update_session_options(db) # get rid of the zope transaction extension main_root, rb_root, app = setup(main_uri, rb_uri, sqla_uri) global tz try: tz = pytz.timezone(main_root['MaKaCInfo']['main'].getTimezone()) except KeyError: tz = pytz.utc start = time.clock() with app.app_context(): if drop: print cformat('%{yellow!}*** DANGER') print cformat('%{yellow!}***%{reset} ' '%{red!}ALL DATA%{reset} in your database %{yellow!}{!r}%{reset} will be ' '%{red!}PERMANENTLY ERASED%{reset}!').format(db.engine.url) if raw_input(cformat('%{yellow!}***%{reset} To confirm this, enter %{yellow!}YES%{reset}: ')) != 'YES': print 'Aborting' sys.exit(1) delete_all_tables(db) stamp() db.create_all() if Location.find().count(): # Usually there's no good reason to migrate with data in the DB. However, during development one might # comment out some migration tasks and run the migration anyway. print cformat('%{yellow!}*** WARNING') print cformat('%{yellow!}***%{reset} Your database is not empty, migration will most likely fail!') if raw_input(cformat('%{yellow!}***%{reset} To confirm this, enter %{yellow!}YES%{reset}: ')) != 'YES': print 'Aborting' sys.exit(1) migrate(main_root, rb_root, photo_path, merged_avatars) print 'migration took {} seconds'.format((time.clock() - start))
def search(substring, include_deleted, include_pending, include_external, include_system, **criteria): """Searches users matching some criteria""" assert set(criteria.viewkeys()) == {'first_name', 'last_name', 'email', 'affiliation'} criteria = {k: v for k, v in criteria.viewitems() if v is not None} res = search_users(exact=(not substring), include_deleted=include_deleted, include_pending=include_pending, external=include_external, allow_system_user=include_system, **criteria) if not res: print(cformat('%{yellow}No results found')) return elif len(res) > 100: click.confirm('{} results found. Show them anyway?'.format(len(res)), abort=True) users = sorted((u for u in res if isinstance(u, User)), key=lambda x: (x.first_name.lower(), x.last_name.lower(), x.email)) externals = sorted((ii for ii in res if isinstance(ii, IdentityInfo)), key=lambda x: (_safe_lower(x.data.get('first_name')), _safe_lower(x.data.get('last_name')), _safe_lower(x.data['email']))) if users: table_data = [['ID', 'First Name', 'Last Name', 'Email', 'Affiliation']] for user in users: table_data.append([unicode(user.id), user.first_name, user.last_name, user.email, user.affiliation]) table = AsciiTable(table_data, cformat('%{white!}Users%{reset}')) table.justify_columns[0] = 'right' print(table.table) if externals: if users: print() table_data = [['First Name', 'Last Name', 'Email', 'Affiliation', 'Source', 'Identifier']] for ii in externals: data = ii.data table_data.append([data.get('first_name', ''), data.get('last_name', ''), data['email'], data.get('affiliation', '-'), ii.provider.name, ii.identifier]) table = AsciiTable(table_data, cformat('%{white!}Externals%{reset}')) print(table.table)
def migrate_legacy_events(self): print cformat('%{white!}migrating legacy events') # XXX: removed display manager / internal page manager update # don't forget to handle them when updating this for 2.0! wfr = WebFactoryRegistry()._getConfRegistry() for event in self._committing_iterator(self._get_events()): if not hasattr(event, '_old_id'): new_id = self.gen_event_id() event.unindexConf() del self.zodb_root['conferences'][event.id] wf = wfr.pop(event.id, None) event._old_id = event.id event.id = new_id if wf is not None: wfr[event.id] = wf self.zodb_root['conferences'][event.id] = event event.indexConf() EventSetting.find(event_id=event._old_id).update({EventSetting.event_id: event.id}) EventSettingPrincipal.find(event_id=event._old_id).update({EventSettingPrincipal.event_id: event.id}) db.session.add(LegacyEventMapping(legacy_event_id=event._old_id, event_id=int(event.id))) if not self.quiet: self.print_success(cformat('%{cyan}{}').format(event.id), event_id=event._old_id) else: # happens if this importer was executed before but you want to add the mapping to your DB again db.session.add(LegacyEventMapping(legacy_event_id=event._old_id, event_id=int(event.id))) if not self.quiet: self.print_success(cformat('%{cyan}{}%{reset} %{yellow}(already updated in zodb)').format(event.id), event_id=event._old_id)
def prepare(): """Initializes an empty database (creates tables, sets alembic rev to HEAD)""" tables = get_all_tables(db) if 'alembic_version' not in tables['public']: print colored('Setting the alembic version to HEAD', 'green') stamp() PluginScriptDirectory.dir = os.path.join(current_app.root_path, 'core', 'plugins', 'alembic') alembic.command.ScriptDirectory = PluginScriptDirectory plugin_msg = cformat("%{cyan}Setting the alembic version of the %{cyan!}{}%{reset}%{cyan} " "plugin to HEAD%{reset}") for plugin in plugin_engine.get_active_plugins().itervalues(): if not os.path.exists(plugin.alembic_versions_path): continue print plugin_msg.format(plugin.name) with plugin.plugin_context(): stamp() # Retrieve the table list again, just in case we created unexpected hables tables = get_all_tables(db) tables['public'] = [t for t in tables['public'] if not t.startswith('alembic_version')] if any(tables.viewvalues()): print colored('Your database is not empty!', 'red') print colored('If you just added a new table/model, create an alembic revision instead!', 'yellow') print print 'Tables in your database:' for schema, schema_tables in sorted(tables.items()): for t in schema_tables: print cformat(' * %{cyan}{}%{reset}.%{cyan!}{}%{reset}').format(schema, t) return if not _require_extensions('unaccent', 'pg_trgm'): return print colored('Creating tables', 'green') db.create_all()
def migrate_event_acls(self): self.print_step('migrating event ACLs') protection_mode_map = {-1: ProtectionMode.public, 0: ProtectionMode.inheriting, 1: ProtectionMode.protected} for legacy_event, event in committing_iterator(self._iter_events(), 5000): ac = legacy_event._Conference__ac self.print_success('', event_id=event.id) old_protection_mode = protection_mode_map[ac._accessProtection] if old_protection_mode == ProtectionMode.public and ac.requiredDomains: event.protection_mode = ProtectionMode.protected self._migrate_domains(event, ac.requiredDomains) else: event.protection_mode = old_protection_mode no_access_contact = convert_to_unicode(getattr(ac, 'contactInfo', '')) if no_access_contact != 'no contact info defined': event.own_no_access_contact = no_access_contact event.access_key = convert_to_unicode(getattr(legacy_event, '_accessKey', '')) if not self.quiet: self.print_success('Protection mode set to {}'.format(event.protection_mode.name, event_id=event.id)) for legacy_acl in ac.allowed: event_acl = self.convert_acl(legacy_acl) if event_acl is None: self.print_warning(cformat('%{red}ACL%{reset}%{yellow} does not exist:%{reset} {}') .format(legacy_acl), event_id=event.id) continue event.update_principal(event_acl, read_access=True, quiet=True) if not self.quiet: self.print_msg(cformat('%{green}[{}]%{reset} {}').format('Event ACL', event_acl))
def _require_encoding(encoding): cur_encoding = db.engine.execute("SELECT current_setting('server_encoding')").scalar() if cur_encoding >= encoding: return True print(cformat('%{red}Database encoding must be {}; got {}').format(encoding, cur_encoding)) print(cformat('%{yellow}Recreate your database using `createdb -E {} -T template0 ...`').format(encoding)) return False
def migrate(self): print cformat('%{white!}migrating static sites') for item in committing_iterator(chain.from_iterable( self.zodb_root['modules']['offlineEvents']._idxConf.itervalues())): event_id = item.conference.id if is_legacy_id(event_id): print cformat('%{red!}!!!%{reset} ' '%{white!}{0:6s}%{reset} %{yellow!}Event has non-numeric/broken ID').format(event_id) continue if event_id not in self.zodb_root['conferences']: print cformat('%{red!}!!!%{reset} ' '%{white!}{0:6s}%{reset} %{yellow!}Event deleted, skipping static site').format(event_id) continue event_id = int(event_id) user = self._get_user(item.avatar.id) state = STATE_MAPPING[item.status] requested_dt = item.requestTime file_name, file_path = self._get_file_data(item.file) if file_path is None and state == StaticSiteState.success: print cformat('%{yellow!}!!!%{reset} %{white!}{0:6d}%{reset} ' '%{yellow!}file missing, marking static site as expired.').format(event_id) state = StaticSite.expired static_site = StaticSite(creator=user, event_id=event_id, state=state, requested_dt=requested_dt) if static_site.state == StaticSiteState.success: static_site.path = file_path db.session.add(static_site) print cformat('%{green}+++%{reset} %{white!}{0.event_id:6d}%{reset} ' '%{cyan}{0}%{reset}').format(static_site)
def main(): if '-h' in sys.argv or '--help' in sys.argv: print(USAGE) sys.exit(1) year, path, file_ = _process_args(sys.argv[1:]) if path is not None: print(cformat("Updating headers to the year %{yellow!}{year}%{reset} for all the files in " "%{yellow!}{path}%{reset}...").format(year=year, path=path)) for root, _, filenames in os.walk(path): for filename in filenames: update_header(os.path.join(root, filename), year) elif file_ is not None: print(cformat("Updating headers to the year %{yellow!}{year}%{reset} for the file " "%{yellow!}{file}%{reset}...").format(year=year, file=file_)) update_header(file_, year) else: print(cformat("Updating headers to the year %{yellow!}{year}%{reset} for all " "git-tracked files...").format(year=year)) try: for path in check_output(['git', 'ls-files']).splitlines(): update_header(os.path.abspath(path), year) except CalledProcessError: print(cformat('%{red!}[ERROR] you must be within a git repository to run this script.'), file=sys.stderr) print(USAGE) sys.exit(1)
def migrate_event_attachments(self): self.print_step("migrating event attachments") for event, obj, material, resources in self._committing_iterator(self._iter_event_materials()): folder = self._folder_from_material(material, obj) lm = LegacyAttachmentFolderMapping(linked_object=obj, material_id=material.id, folder_id=folder["id"]) self.todo[LegacyAttachmentFolderMapping].append(_sa_to_dict(lm)) if not self.quiet: self.print_success( cformat("%{cyan}[{}]%{reset} %{blue!}({})").format(folder["title"], _link_repr(folder)), event_id=event.id, ) for resource in resources: attachment = self._attachment_from_resource(folder, material, resource, event) if attachment is None: continue lm = LegacyAttachmentMapping( linked_object=obj, material_id=material.id, resource_id=resource.id, attachment_id=attachment["id"] ) self.todo[LegacyAttachmentMapping].append(_sa_to_dict(lm)) if not self.quiet: if attachment["type"] == AttachmentType.link: self.print_success(cformat("- %{cyan}{}").format(attachment["title"]), event_id=event.id) else: self.print_success(cformat("- %{cyan!}{}").format(attachment["title"]), event_id=event.id)
def find_merged_avatars(main_root): print cformat('%{white!}checking for merged avatars') avatar_id_map = {} for avatar in verbose_iterator(main_root['avatars'].itervalues(), len(main_root['avatars']), attrgetter('id'), lambda av: '{}, {}'.format(safe_upper(av.surName), av.name)): for merged_avatar in getattr(avatar, '_mergeFrom', []): avatar_id_map[merged_avatar.getId()] = avatar.getId() return avatar_id_map
def print_prefixed(self, prefix, prefix_color, msg, always=False, event_id=None): """Prints a prefixed message to the console.""" parts = [ cformat("%%{%s}{}%%{reset}" % prefix_color).format(prefix), cformat("%{white!}{:>6s}%{reset}").format(event_id) if event_id is not None else None, msg, ] self.print_msg(" ".join(filter(None, parts)), always)
def _add_to_context(namespace, info, element, name=None, doc=None, color='green'): if not name: name = element.__name__ namespace[name] = element if doc: info.append(cformat('+ %%{%s}{}%%{white!} ({})' % color).format(name, doc)) else: info.append(cformat('+ %%{%s}{}' % color).format(name))
def run_initial_export(self, events): uploader = DebugUploader(self) uploader.run_initial(events) for i, batch in enumerate(grouper(events, 10, skip_missing=True), 1): print print cformat('%{white!}Batch {}:%{reset}').format(i) print MARCXMLGenerator.objects_to_xml(event for event in batch if event is not None) print
def _add_to_context_multi(namespace, info, elements, names=None, doc=None, color='green'): if not names: names = [x.__name__ for x in elements] for name, element in zip(names, elements): namespace[name] = element if doc: info.append(cformat('+ %%{white!}{}:%%{reset} %%{%s}{}' % color).format(doc, ', '.join(names))) else: info.append(cformat('+ %%{%s}{}' % color).format(', '.join(names)))
def migrate_admins(self): print cformat('%{white!}migrating admins') for avatar in committing_iterator( self.zodb_root['adminlist']._AdminList__list): try: user = User.get(int(avatar.id)) except ValueError: continue if user is None or user.is_deleted: continue user.is_admin = True print cformat('%{green}+++%{reset} %{cyan}{}').format(user)
def _get_event_settings(self, event, dmgr): format_opts = getattr(dmgr, '_format', None) tt = getattr(dmgr, '_tickerTape', None) style_mgr = getattr(dmgr, '_styleMngr', None) event_id = event.id menu = getattr(dmgr, '_menu', None) settings = { 'is_searchable': getattr(dmgr, '_searchEnabled', None), 'show_nav_bar': getattr(dmgr, '_displayNavigationBar', None), 'show_social_badges': getattr(dmgr, '_showSocialApps', None), } if format_opts: settings['header_text_color'] = format_opts._data.get( 'titleTextColor') settings['header_background_color'] = format_opts._data.get( 'titleBgColor') else: self.print_error(cformat( '%{red!} Skipping some settings, missing _format attribute'), event_id=event_id) if tt: settings['show_banner'] = getattr(tt, '_enabledNowPlaying', None) settings['announcement'] = getattr(tt, '_text', None) settings['show_announcement'] = getattr(tt, '_enabledSimpleText', None) else: self.print_error(cformat( '%{red!} Skipping some settings, missing _tickerTape attribute' ), event_id=event_id) if style_mgr: template = getattr(style_mgr, '_usingTemplate', None) theme = getattr(template, 'templateId', None) settings['theme'] = theme if theme in ALLOWED_THEMES else None settings['use_custom_css'] = getattr(style_mgr, '_css', None) is not None else: self.print_error(cformat( '%{red!} Skipping some settings, missing _styleMngr attribute' ), event_id=event_id) if menu: settings['timetable_by_room'] = getattr(menu, '_timetable_layout', None) == 'room' settings['timetable_detailed'] = getattr( menu, '_timetable_detailed_view', False) else: self.print_error(cformat( '%{red!} Skipping some settings, missing _menu attribute'), event_id=event_id) return {k: v for k, v in settings.iteritems() if v is not None}
def print_result(self, slow=float('inf'), veryslow=float('inf')): duration = float(self) if duration == float('-inf'): print(cformat('%{blue!}skipped')) elif duration == float('inf'): print(cformat('%{red}running')) elif duration >= veryslow: print(cformat('%{red!}{}').format(self)) elif duration >= slow: print(cformat('%{yellow!}{}').format(self)) else: print(cformat('%{green!}{}').format(self))
def migrate_settings(self): print cformat('%{white!}migrating settings') InvenioSearchPlugin.settings.delete_all() opts = self.zodb_root['plugins']['search']._PluginType__plugins['invenio']._PluginBase__options InvenioSearchPlugin.settings.set('search_url', convert_to_unicode(opts['serverUrl']._PluginOption__value).strip()) type_map = {'public': 'api_public', 'private': 'api_private', 'redirect': 'redirect'} display_mode = type_map[opts['type']._PluginOption__value] InvenioSearchPlugin.settings.set('display_mode', display_mode) db.session.commit()
def populate_db(): """Populate DB with fun stuff""" # set tileserver URL rb_settings.set( 'tileserver_url', 'https://indico-maps.web.cern.ch/styles/cern/{z}/{x}/{y}.png') location = Location(name="CERN") owner = User.get(0) for area in MAP_AREAS: map_area = MapArea(name=area[0], top_left_latitude=area[1], top_left_longitude=area[2], bottom_right_latitude=area[3], bottom_right_longitude=area[4]) db.session.add(map_area) for name in shower_names: # split name in parts building, floor, number = ROOM_RE.match(name).groups() # random number of showers, since we don't have time # to figure out what it really is num_showers = random.choice([2, 3, 4]) file_name = './photos/{}.png'.format(name.replace('/', '_')) photo_data = None # Check if there's a photo in './photos/xxxx' and use it if os.path.exists(file_name): with open(file_name, 'r') as f: photo_data = f.read() else: print cformat("%{yellow}!%{reset} Photo for {} not found!").format( name) for num_shower in range(num_showers): room = Room(building=building, floor=floor, number=number, verbose_name="Shower {}".format(num_shower + 1), location=location, division='CERN', owner=owner, capacity=1) if photo_data: room.photo = Photo(data=photo_data) if building in GEO_INFO: room.latitude, room.longitude = GEO_INFO[building] db.session.add(room) db.session.commit()
def print_prefixed(self, prefix, prefix_color, msg, always=False, event_id=None): """Prints a prefixed message to the console.""" parts = [ cformat('%%{%s}{}%%{reset}' % prefix_color).format(prefix), cformat('%{white!}{:>6s}%{reset}').format(unicode(event_id)) if event_id is not None else None, msg ] self.print_msg(' '.join(filter(None, parts)), always)
def _launch(self, quiet=False, retry=0): assert not self._proc if not quiet and not retry: print(cformat('%{green!}Launching Indico')) try: argv = _disable_reloader(sys.argv) self._proc = subprocess.Popen(argv) except OSError as exc: delay = (retry + 1) * 0.5 print(cformat('%{red!}Could not launch Indico: {}').format(exc)) print(cformat('%{yellow}Retrying in {}s').format(delay)) time.sleep(delay) self._launch(quiet=quiet, retry=(retry + 1))
def migrate_settings(self): print cformat('%{white!}migrating settings') settings_map = { '_apiHTTPSRequired': 'require_https', '_apiPersistentAllowed': 'allow_persistent', '_apiMode': 'security_mode', '_apiCacheTTL': 'cache_ttl', '_apiSignatureTTL': 'signature_ttl' } for old, new in settings_map.iteritems(): api_settings.set(new, getattr(self.zodb_root['MaKaCInfo']['main'], old)) db.session.commit()
def _require_encoding(encoding): cur_encoding = db.engine.execute( "SELECT current_setting('server_encoding')").scalar() if cur_encoding >= encoding: return True print( cformat('%{red}Database encoding must be {}; got {}').format( encoding, cur_encoding)) print( cformat( '%{yellow}Recreate your database using `createdb -E {} -T template0 ...`' ).format(encoding)) return False
def setup(self): self.app = app = IndicoFlask('indico_zodbimport') app.config['PLUGINENGINE_NAMESPACE'] = 'indico.plugins' app.config['PLUGINENGINE_PLUGINS'] = self.plugins app.config['SQLALCHEMY_DATABASE_URI'] = self.sqlalchemy_uri app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = True plugin_engine.init_app(app) if not plugin_engine.load_plugins(app): print( cformat( '%{red!}Could not load some plugins: {}%{reset}').format( ', '.join(plugin_engine.get_failed_plugins(app)))) sys.exit(1) db.init_app(app) setup_request_stats(app) if self.dblog: app.debug = True apply_db_loggers(app) import_all_models() alembic_migrate.init_app(app, db, os.path.join(app.root_path, 'migrations')) self.connect_zodb() try: self.tz = pytz.timezone( getattr(self.zodb_root['MaKaCInfo']['main'], '_timezone', 'UTC')) except KeyError: self.tz = pytz.utc with app.app_context(): request_stats_request_started() if not self.pre_check(): sys.exit(1) if self.has_data(): # Usually there's no good reason to migrate with data in the DB. However, during development one might # comment out some migration tasks and run the migration anyway. print(cformat('%{yellow!}*** WARNING')) print( cformat( '%{yellow!}***%{reset} Your database is not empty, migration may fail or add duplicate ' 'data!')) if raw_input( cformat( '%{yellow!}***%{reset} To confirm this, enter %{yellow!}YES%{reset}: ' )) != 'YES': print('Aborting') sys.exit(1)
def prepare_db(empty=False, root_path=None, verbose=True): """Initialize an empty database (create tables, set alembic rev to HEAD).""" if not _require_pg_version('9.6'): return if not _require_encoding('UTF8'): return if not _require_extensions('unaccent', 'pg_trgm'): return root_path = root_path or current_app.root_path tables = get_all_tables(db) if 'alembic_version' not in tables['public']: if verbose: print(cformat('%{green}Setting the alembic version to HEAD')) stamp(directory=os.path.join(root_path, 'migrations'), revision='heads') PluginScriptDirectory.dir = os.path.join(root_path, 'core', 'plugins', 'alembic') alembic.command.ScriptDirectory = PluginScriptDirectory plugin_msg = cformat( "%{cyan}Setting the alembic version of the %{cyan!}{}%{reset}%{cyan} " "plugin to HEAD%{reset}") for plugin in plugin_engine.get_active_plugins().itervalues(): if not os.path.exists(plugin.alembic_versions_path): continue if verbose: print(plugin_msg.format(plugin.name)) with plugin.plugin_context(): stamp(revision='heads') # Retrieve the table list again, just in case we created unexpected tables tables = get_all_tables(db) tables['public'] = [ t for t in tables['public'] if not t.startswith('alembic_version') ] if any(tables.viewvalues()): if verbose: print(cformat('%{red}Your database is not empty!')) print( cformat( '%{yellow}If you just added a new table/model, create an alembic revision instead!' )) print() print('Tables in your database:') for schema, schema_tables in sorted(tables.items()): for t in schema_tables: print( cformat( ' * %{cyan}{}%{reset}.%{cyan!}{}%{reset}').format( schema, t)) return create_all_tables(db, verbose=verbose, add_initial_data=(not empty))
def update_merged_ids(self): self.print_step('updating merged users in attachment acls') for p in AttachmentPrincipal.find(User.merged_into_id != None, _join=AttachmentPrincipal.user): # noqa user = p.user while p.user.merged_into_user: p.user = p.user.merged_into_user self.print_success(cformat('%{cyan}{}%{reset} -> %{cyan}{}%{reset}').format(user, p.user), always=True) self.print_step('updating merged users in folder acls') for p in AttachmentFolderPrincipal.find(User.merged_into_id != None, _join=AttachmentFolderPrincipal.user): # noqa while p.user.merged_into_user: p.user = p.user.merged_into_user self.print_success(cformat('%{cyan}{}%{reset} -> %{cyan}{}%{reset}').format(user, p.user), always=True) db.session.commit()
def unblock(user_id): """Unblock a given user.""" user = User.get(user_id) if user is None: print(cformat('%{red}This user does not exist')) return _print_user_info(user) if not user.is_blocked: print(cformat('%{yellow}This user is not blocked')) return if click.confirm(cformat('%{yellow}Unblock this user?')): user.is_blocked = False db.session.commit() print(cformat('%{green}Successfully unblocked user'))
def grant_admin(user_id): """Grants administration rights to a given user""" user = User.get(user_id) if user is None: print(cformat("%{red}This user does not exist")) return _print_user_info(user) if user.is_admin: print(cformat("%{yellow}This user already has administration rights")) return if click.confirm(cformat("%{yellow}Grant administration rights to this user?")): user.is_admin = True db.session.commit() print(cformat("%{green}Administration rights granted successfully"))
def revoke_admin(user_id): """Revokes administration rights from a given user""" user = User.get(user_id) if user is None: print(cformat("%{red}This user does not exist")) return _print_user_info(user) if not user.is_admin: print(cformat("%{yellow}This user does not have administration rights")) return if click.confirm(cformat("%{yellow}Revoke administration rights from this user?")): user.is_admin = False db.session.commit() print(cformat("%{green}Administration rights revoked successfully"))
def _add_to_context(namespace, info, element, name=None, doc=None, color='green'): if not name: name = element.__name__ namespace[name] = element if doc: info.append( cformat('+ %%{%s}{}%%{white!} ({})' % color).format(name, doc)) else: info.append(cformat('+ %%{%s}{}' % color).format(name))
def block(user_id): """Block a given user.""" user = User.get(user_id) if user is None: print(cformat("%{red}This user does not exist")) return _print_user_info(user) if user.is_blocked: print(cformat("%{yellow}This user is already blocked")) return if click.confirm(cformat("%{yellow}Block this user?")): user.is_blocked = True db.session.commit() print(cformat("%{green}Successfully blocked user"))
def migrate_agents(self): print cformat('%{white!}migrating agents') for old_agent in committing_iterator( self.livesync_root['agent_manager']._agents.itervalues()): if not old_agent._active: print cformat( '%{yellow}skipping inactive agent {} ({})%{reset}').format( old_agent._id, old_agent._name) continue agent = LiveSyncAgent(name=convert_to_unicode(old_agent._name), initial_data_exported=True) old_agent_class = old_agent.__class__.__name__ if old_agent_class == 'InvenioBatchUploaderAgent': agent.backend_name = 'invenio' agent.settings = {'server_url': old_agent._url} elif old_agent_class == 'CERNSearchUploadAgent': agent.backend_name = 'cernsearch' agent.settings = { 'server_url': old_agent._url, 'username': old_agent._username, 'password': old_agent._password, } else: print cformat('%{red!}skipping unknown agent type: {}%{reset}' ).format(old_agent_class) continue print cformat('- %{cyan}{} ({})').format(agent.name, agent.backend_name) db.session.add(agent)
def migrate_event_shorturls(self): self.print_step("Migrating event shorturls") todo = {} done = {} for shorturl, conf, event in self._iter_shorturls(): shorturl = convert_to_unicode(shorturl) event_shorturl = convert_to_unicode(conf._sortUrlTag) if event_shorturl.lower() != shorturl.lower(): # warn about mismatch but keep the one from the mapping. # this is a bit weird but like this we never risk breaking urls self.print_warning(cformat( '%{yellow}Shorturl %{yellow!}{}%{reset}%{yellow} from mapping not matching ' 'event shorturl %{yellow!}{}%{reset}%{yellow}').format( shorturl, event_shorturl), event_id=event.id) error = self._validate_shorturl(shorturl) if error == 'url': # show obvious garbage in a less prominent way self.print_warning(cformat( '%{yellow}Shorturl %{yellow!}{}%{reset}%{yellow} is invalid: %{yellow!}{}' ).format(shorturl, error), event_id=event.id) continue elif error: self.print_warning(cformat( '%{red}Shorturl %{yellow!}{}%{reset}%{red} is invalid: %{red!}{}' ).format(shorturl, error), event_id=event.id) continue conflict = done.get(shorturl.lower()) if conflict and conflict[1] != event: # if there's a conflict caused by the previously case-sensitive url shortcuts, # discard them in both events - it's better to get a 404 error than a wrong event self.print_error(cformat( '%{red!}Shorturl %{reset}%{red}{}%{red!} collides with ' '%{reset}%{red}{}%{red!} in event %{reset}%{red}{}%{red!}; discarding both' ).format(shorturl, conflict[0], conflict[1]), event_id=event.id) del done[shorturl.lower()] del todo[conflict[1]] continue done[shorturl.lower()] = (shorturl, event) todo[event] = shorturl it = verbose_iterator(todo.iteritems(), len(todo), lambda x: x[0].id, lambda x: '') for event, shorturl in committing_iterator(it): event.url_shortcut = shorturl self.print_success('{} -> {}'.format(shorturl, event.title), event_id=event.id)
def _require_pg_version(version): # convert version string such as '9.4.10' to `90410` which is the # format used by server_version_num req_version = sum( segment * 10**(4 - 2 * i) for i, segment in enumerate(map(int, version.split('.')))) cur_version = db.engine.execute( "SELECT current_setting('server_version_num')::int").scalar() if cur_version >= req_version: return True print cformat( '%{red}Postgres version too old; you need at least {} (or newer)' ).format(version) return False
def migrate_links(self): print cformat('%{white!}migrating links') for avatars in grouper(self._iter_avatars(), 2500, skip_missing=True): avatars = {int(a.id): a for a in avatars} users = ((u, avatars[u.id]) for u in User.find(User.id.in_(avatars))) for user, avatar in committing_iterator( self.flushing_iterator(users, 250), 2500): registrants = set() user_shown = False for type_, entries in avatar.linkedTo.iteritems(): # store registrant roles, in order to avoid duplication below for role, objects in entries.iteritems(): if (type_ == 'category' and role == 'favorite') or type_ == 'group': continue if not objects: continue if type_ == 'registration' and role == 'registrant': registrants |= set(objects) if not user_shown: print cformat( '%{green}+++%{reset} ' '%{white!}{:6d}%{reset} %{cyan}{}%{reset}' ).format(user.id, user.full_name) user_shown = True print cformat( '%{blue!}<->%{reset} ' '%{yellow!}{:4d}%{reset}x %{green!}{:12} %{cyan!}{}%{reset}' ).format(len(objects), type_, role) for obj in objects: try: UserLink.create_link(user, obj, role, type_) except Exception as e: print cformat( '%{red!}!!!%{reset} ' '%{red!}linking failed%{reset} (%{yellow!}{}%{reset}): ' '{}').format(unicode(e), obj) # add old "registrant" entries to registration/registrant for reg in getattr(avatar, 'registrants', {}).itervalues(): if reg.getConference().getOwner( ) and reg not in registrants: UserLink.create_link(user, reg, 'registrant', 'registration') print cformat( '%{cyan!}<->%{reset} ' '%{yellow!} 1%{reset}x %{green!}{:12} %{cyan!}{}%{reset}' ).format('registration', 'registrant')
def create_all_tables(db, verbose=False, add_initial_data=True): """Create all tables and required initial objects""" from indico.modules.categories import Category from indico.modules.designer import TemplateType from indico.modules.designer.models.templates import DesignerTemplate from indico.modules.oauth.models.applications import OAuthApplication, SystemAppType from indico.modules.users import User if verbose: print(cformat('%{green}Creating tables')) db.create_all() if add_initial_data: if verbose: print(cformat('%{green}Creating system user')) db.session.add( User(id=0, is_system=True, first_name='Indico', last_name='System')) if verbose: print(cformat('%{green}Creating root category')) cat = Category(id=0, title='Home', protection_mode=ProtectionMode.public) db.session.add(cat) db.session.flush() if verbose: print( cformat( '%{green}Creating default ticket template for root category ' )) dtt = DesignerTemplate(category_id=0, title='Default ticket', type=TemplateType.badge, data=DEFAULT_TICKET_DATA, is_system_template=True) dbt = DesignerTemplate(category_id=0, title='Default badge', type=TemplateType.badge, data=DEFAULT_BADGE_DATA, is_system_template=True) cat.default_ticket_template = dtt cat.default_badge_template = dbt db.session.add(dtt) db.session.add(dbt) if verbose: print(cformat('%{green}Creating system oauth apps')) for sat in SystemAppType: if sat != SystemAppType.none: db.session.add( OAuthApplication(system_app_type=sat, **sat.default_data)) db.session.commit()
def _print_schedule(self, deleted): table_data = [['Name', 'Schedule']] for entry in sorted(self.app.conf['CELERYBEAT_SCHEDULE'].itervalues(), key=itemgetter('task')): table_data.append([ cformat('%{yellow!}{}%{reset}').format(entry['task']), cformat('%{green}{!r}%{reset}').format(entry['schedule']) ]) for task_name in sorted(deleted): table_data.append([ cformat('%{yellow}{}%{reset}').format(task_name), cformat('%{red!}Disabled%{reset}') ]) print AsciiTable(table_data, cformat('%{white!}Periodic Tasks%{reset}')).table
def unlock(name): """Unlock a locked task. Use this if your celery worker was e.g. killed by your kernel's oom-killer and thus a task never got unlocked. Examples: indico celery unlock event_reminders """ if unlock_task(name): print(cformat('%{green!}Task {} unlocked').format(name)) else: print(cformat('%{yellow}Task {} is not locked').format(name))
def migrate_settings(self): print cformat('%{white!}migrating settings') rb_settings.delete_all() opts = self.zodb_root['plugins']['RoomBooking']._PluginBase__options # Admins & authorized users/groups rb_settings.set('authorized_principals', convert_principal_list(opts['AuthorisedUsersGroups'])) rb_settings.set('admin_principals', convert_principal_list(opts['Managers'])) # Assistance emails emails = [email for email in opts['assistanceNotificationEmails']._PluginOption__value if is_valid_mail(email, False)] rb_settings.set('assistance_emails', emails) # Simple settings rb_settings.set('notification_before_days', opts['notificationBefore']._PluginOption__value) db.session.commit()
def check_plugin_schema(self, name): """Checks if a plugin schema exists in the database. :param name: Name of the plugin """ sql = 'SELECT COUNT(*) FROM "information_schema"."schemata" WHERE "schema_name" = :name' count = db.engine.execute(db.text(sql), name='plugin_{}'.format(name)).scalar() if not count: print cformat('%{red!}Plugin schema does not exist%{reset}') print cformat( 'Run %{yellow!}indico plugindb upgrade --plugin {}%{reset} to create it' ).format(name) return False return True
def _require_extensions(*names): missing = sorted(name for name in names if not has_extension(db.engine, name)) if not missing: return True print( cformat('%{red}Required Postgres extensions missing: {}').format( ', '.join(missing))) print( cformat( '%{yellow}Create them using these SQL commands (as a Postgres superuser):' )) for name in missing: print(cformat('%{white!} CREATE EXTENSION {};').format(name)) return False
def run_watchman(): from .watchman import Watchman try: Watchman().run() except pywatchman.WatchmanError as exc: from indico.util.console import cformat print(cformat('%{red!}watchman error: {}').format(exc))
def flush_rclone(self): if not self.rclone_remote or not self.rclone_queue: return click.echo() for name, data in self.buckets.viewitems(): if not data['exists']: self.create_bucket(name) data['exists'] = True for bucket, data in self.rclone_queue.viewitems(): click.echo( cformat( 'Copying %{cyan}{}%{reset} files (%{cyan}{}%{reset}) to %{cyan}{}%{reset} via rclone' ).format(data['files'], do_filesizeformat(data['bytes']), bucket)) start = datetime.now() try: subprocess.check_call([ 'rclone', 'copy', '--copy-links', data['path'], '{}:{}'.format(self.rclone_remote, bucket) ]) except subprocess.CalledProcessError: click.secho('\nError while running rclone', fg='red') raise duration = (datetime.now() - start) click.echo('...finished after {}'.format( format_human_timedelta(duration, 'minutes', narrow=True))) rmlinktree(data['path']) self.rclone_queue.clear()
def check_format_strings(): """Check whether format strings match. This helps finding cases where e.g. the original string uses ``{error}`` but the translation uses ``{erro}``, resulting in errors when using the translated string. """ root_path = 'indico/translations' paths = set() for root, dirs, files in os.walk(root_path): for file in files: if file.endswith('.po'): paths.add(os.path.join(root, file)) all_valid = True for path in paths: invalid = _get_invalid_po_format_strings(path) if invalid: all_valid = False click.echo('Found invalid format strings in {}'.format( os.path.relpath(path, root_path))) for item in invalid: click.echo( cformat( '%{yellow}{}%{reset} | %{yellow!}{}%{reset}\n%{red}{}%{reset} != %{red!}{}%{reset}' ).format(item['orig'], item['trans'], list(item['orig_placeholders']), list(item['trans_placeholders']))) click.echo() sys.exit(0 if all_valid else 1)