def migrate_event_dates_titles(self): self.print_step("Migrating event dates and titles") for old_event in committing_iterator(self._iter_events()): if 'title' not in old_event.__dict__: self.print_error('Event has no title in ZODB', old_event.id) continue tz = old_event.__dict__.get('timezone', 'UTC') updates = { Event.title: convert_to_unicode(old_event.__dict__['title']) or '(no title)', Event.description: convert_to_unicode(old_event.__dict__['description']) or '', Event.timezone: tz, Event.start_dt: self._fix_naive(old_event, old_event.__dict__['startDate'], tz), Event.end_dt: self._fix_naive(old_event, old_event.__dict__['endDate'], tz) } Event.query.filter_by(id=int(old_event.id)).update(updates, synchronize_session=False) if not self.quiet: self.print_success('', event_id=old_event.id) # deleted events are not in zodb but also need data updates = {Event.title: '***deleted***', Event.description: '', Event.timezone: 'UTC', Event.start_dt: datetime(1970, 1, 1, tzinfo=pytz.utc), Event.end_dt: datetime(1970, 1, 1, tzinfo=pytz.utc)} Event.query.filter_by(is_deleted=True).update(updates, synchronize_session=False) db.session.commit()
def _migrate_category(self, old_cat, position): # unlimited visibility is 999 but we have a 994 for some reason.. since nobody # has 900 levels of nesting we can just go for that threshold instead visibility = None if old_cat._visibility > 900 else old_cat._visibility if visibility == 0: self.print_warning("Raising visibility from 'invisible' to 'category-only'", event_id=old_cat.id) visibility = 1 emails = re.split(r'[\s;,]+', convert_to_unicode(getattr(old_cat, '_notifyCreationList', ''))) emails = {sanitize_email(email).lower() for email in emails} emails = sorted(email for email in emails if is_valid_mail(email, False)) default_themes = self._process_default_themes(old_cat) title = self._fix_title(convert_to_unicode(old_cat.name), old_cat.id) cat = Category(id=int(old_cat.id), position=position, title=title, description=convert_to_unicode(old_cat.description), visibility=visibility, timezone=convert_to_unicode(old_cat._timezone), event_creation_notification_emails=emails, default_event_themes=default_themes, suggestions_disabled=getattr(old_cat, '_suggestions_disabled', False)) if not self.quiet: self.print_success(cat.title, event_id=cat.id) if old_cat._icon: self._process_icon(cat, old_cat._icon) self._process_protection(cat, old_cat) cat.children = [(self._migrate_category(old_subcat, i)) for i, old_subcat in enumerate(sorted(old_cat.subcategories.itervalues(), key=attrgetter('_order')), 1)] return cat
def migrate_event_locations(self): self.print_step("Migrating event locations") for old_event in committing_iterator(self._iter_events()): custom_location = old_event.places[0] if getattr(old_event, 'places', None) else None custom_room = old_event.rooms[0] if getattr(old_event, 'rooms', None) else None location_name = None room_name = None has_room = False updates = {} if custom_location: location_name = convert_to_unicode(fix_broken_string(custom_location.name, True)) if custom_location.address: updates[Event.own_address] = convert_to_unicode(fix_broken_string(custom_location.address, True)) if custom_room: room_name = convert_to_unicode(fix_broken_string(custom_room.name, True)) if location_name and room_name: mapping = self.room_mapping.get((location_name, room_name)) if mapping: has_room = True updates[Event.own_venue_id] = mapping[0] updates[Event.own_room_id] = mapping[1] # if we don't have a RB room set, use whatever location/room name we have if not has_room: venue_id = self.venue_mapping.get(location_name) if venue_id is not None: updates[Event.own_venue_id] = venue_id updates[Event.own_venue_name] = '' else: updates[Event.own_venue_name] = location_name or '' updates[Event.own_room_name] = room_name or '' if updates: Event.query.filter_by(id=int(old_event.id)).update(updates, synchronize_session=False) if not self.quiet: self.print_success(repr(updates), event_id=old_event.id)
def migrate_settings(self): print cformat('%{white!}migrating settings') ChatPlugin.settings.delete_all() type_opts = self.zodb_root['plugins']['InstantMessaging']._PluginBase__options opts = self.zodb_root['plugins']['InstantMessaging']._PluginType__plugins['XMPP']._PluginBase__options host = convert_to_unicode(opts['chatServerHost']._PluginOption__value) admin_emails = [x.email for x in opts['admins']._PluginOption__value] ChatPlugin.settings.set('admins', convert_principal_list(opts['admins'])) ChatPlugin.settings.set('server', host) ChatPlugin.settings.set('muc_server', 'conference.{}'.format(host)) settings_map = { 'additionalEmails': 'notify_emails', 'indicoUsername': '******', 'indicoPassword': '******', 'ckEditor': 'how_to_connect' } for old, new in settings_map.iteritems(): value = opts[old]._PluginOption__value if isinstance(value, basestring): value = convert_to_unicode(value).strip() elif new == 'notify_emails': value = [email for email in set(value + admin_emails) if is_valid_mail(email, multi=False)] ChatPlugin.settings.set(new, value) if opts['activateLogs']._PluginOption__value: ChatPlugin.settings.set('log_url', 'https://{}/logs/'.format(host)) chat_links = [] for item in type_opts['customLinks']._PluginOption__value: link = item['structure'].replace('[chatroom]', '{room}').replace('[host]', '{server}') link = re.sub(r'(?<!conference\.)\{server}', host, link) link = link.replace('conference.{server}', '{server}') # {server} is now the MUC server chat_links.append({'title': item['name'], 'link': link}) ChatPlugin.settings.set('chat_links', chat_links) db.session.commit()
def _event_person_from_legacy(self, old_person): data = dict(first_name=convert_to_unicode(old_person._firstName), last_name=convert_to_unicode(old_person._surName), _title=self.USER_TITLE_MAP.get(getattr(old_person, '_title', ''), UserTitle.none), affiliation=convert_to_unicode(old_person._affilliation), address=convert_to_unicode(old_person._address), phone=convert_to_unicode(old_person._telephone)) email = strict_sanitize_email(old_person._email) if email: person = (self.event_persons_by_email.get(email) or self.event_persons_by_user.get(self.importer.all_users_by_email.get(email))) else: person = self.event_persons_noemail_by_data.get((data['first_name'], data['last_name'], data['affiliation'])) if not person: user = self.importer.all_users_by_email.get(email) person = EventPerson(event_new=self.event, user=user, email=email, **data) if email: self.event_persons_by_email[email] = person if user: self.event_persons_by_user[user] = person if not email and not user: self.event_persons_noemail_by_data[(person.first_name, person.last_name, person.affiliation)] = person person_link = AbstractPersonLink(person=person) person_link.populate_from_dict(data) return person_link
def _migrate_action_log(self, event, item): info = item._logInfo entry = self._migrate_log(event, item) entry.realm = EventLogRealm.event entry.type = "simple" entry.summary = convert_to_unicode(info["subject"]).strip() entry.data = {convert_to_unicode(k): _convert_data(event, v) for k, v in info.iteritems() if k != "subject"} return entry
def _convert_data(event, value): if isinstance(value, timedelta): value = format_human_timedelta(value) elif isinstance(value, datetime): value = format_datetime(value, locale="en_GB", timezone=event.timezone) elif value.__class__.__name__ == "ContributionType": value = value._name elif value.__class__.__name__ == "AbstractFieldContent": value = '{}: "{}"'.format(convert_to_unicode(value.field._caption), convert_to_unicode(value.value)) return convert_to_unicode(value).strip()
def migrate_locations(self): print cformat("%{white!}migrating locations") default_location_name = self.zodb_root["DefaultRoomBookingLocation"] custom_attributes_dict = self.rb_root["CustomAttributesList"] for old_location in self.zodb_root["RoomBookingLocationList"]: # create location l = Location( name=convert_to_unicode(old_location.friendlyName), is_default=(old_location.friendlyName == default_location_name), ) print cformat("- %{cyan}{}").format(l.name) # add aspects for old_aspect in old_location._aspects.values(): a = Aspect( name=convert_to_unicode(old_aspect.name), center_latitude=old_aspect.centerLatitude, center_longitude=old_aspect.centerLongitude, zoom_level=old_aspect.zoomLevel, top_left_latitude=old_aspect.topLeftLatitude, top_left_longitude=old_aspect.topLeftLongitude, bottom_right_latitude=old_aspect.bottomRightLatitude, bottom_right_longitude=old_aspect.bottomRightLongitude, ) print cformat(" %{blue!}Aspect:%{reset} {}").format(a.name) l.aspects.append(a) if old_aspect.defaultOnStartup: l.default_aspect = a # add custom attributes for ca in custom_attributes_dict.get(l.name, []): if ca["type"] != "str": raise RuntimeError("Non-str custom attributes are unsupported: {}".format(ca)) attr_name = attribute_map.get(ca["name"], ca["name"]) attr = RoomAttribute( name=attr_name.replace(" ", "-").lower(), title=attr_name, type=ca["type"], is_required=ca["required"], is_hidden=ca["hidden"], ) l.attributes.append(attr) print cformat(" %{blue!}Attribute:%{reset} {}").format(attr.title) # add new created location db.session.add(l) print print db.session.commit()
def migrate_networks(self): self.print_step('migrating networks') for domain in committing_iterator(self._iter_domains()): ip_networks = filter(None, map(self._to_network, set(domain.filterList))) if not ip_networks: self.print_warning(cformat('%{yellow}Domain has no valid IPs: {}') .format(convert_to_unicode(domain.name))) network = IPNetworkGroup(name=convert_to_unicode(domain.name), description=convert_to_unicode(domain.description), networks=ip_networks) db.session.add(network) self.print_success(repr(network)) db.session.flush()
def _attachment_from_resource(self, folder, material, resource, base_object=None): modified_dt = ( getattr(material, "_modificationDS", None) or getattr(base_object, "startDate", None) or getattr(base_object, "_modificationDS", None) or now_utc() ) data = { "id": self._get_id(Attachment), "folder_id": folder["id"], "user_id": self.janitor_user_id, "title": convert_to_unicode(resource.name).strip() or folder["title"], "description": convert_to_unicode(resource.description), "modified_dt": modified_dt, } if resource.__class__.__name__ == "Link": data["type"] = AttachmentType.link data["link_url"] = convert_to_unicode(resource.url).strip() if not data["link_url"]: self.print_error( cformat("%{red!}[{}] Skipping link, missing URL").format(data["title"]), event_id=base_object.id ) return None else: data["type"] = AttachmentType.file storage_backend, storage_path, size = self._get_local_file_info(resource) if storage_path is None: self.print_error( cformat("%{red!}File {} not found on disk").format(resource._LocalFile__archivedId), event_id=base_object.id, ) return None filename = secure_filename(convert_to_unicode(resource.fileName), "attachment") file_data = { "id": self._get_id(AttachmentFile), "attachment_id": data["id"], "user_id": self.janitor_user_id, "created_dt": modified_dt, "filename": filename, "content_type": mimetypes.guess_type(filename)[0] or "application/octet-stream", "size": size, "storage_backend": storage_backend, "storage_file_id": storage_path, } data["file_id"] = file_data["id"] self.todo[AttachmentFile].append(file_data) tmp = ProtectionTarget() protection_from_ac(tmp, resource._Resource__ac) self.todo[AttachmentPrincipal] += tmp.make_principal_rows(attachment_id=data["id"]) data["protection_mode"] = tmp.protection_mode self.todo[Attachment].append(data) return data
def _folder_from_material(self, material, linked_object): folder_obj = AttachmentFolder(id=self._get_id(AttachmentFolder), title=convert_to_unicode(material.title).strip() or 'Material', description=convert_to_unicode(material.description), linked_object=linked_object, is_always_visible=not getattr(material._Material__ac, '_hideFromUnauthorizedUsers', False)) folder = _sa_to_dict(folder_obj) self.todo[AttachmentFolder].append(folder) tmp = ProtectionTarget() protection_from_ac(tmp, material._Material__ac) self.todo[AttachmentFolderPrincipal] += tmp.make_principal_rows(folder_id=folder['id']) folder['protection_mode'] = tmp.protection_mode return folder
def _migrate_participant(self, old_part): state = PARTICIPANT_STATUS_MAP.get(old_part._status, RegistrationState.complete) registration = Registration(first_name=convert_to_unicode(old_part._firstName), last_name=convert_to_unicode(old_part._familyName), email=self._fix_email(old_part._email), submitted_dt=self.event._creationDS, base_price=0, price_adjustment=0, checked_in=old_part._present, state=state) self.importer.print_info(cformat('%{yellow}Registration%{reset} - %{cyan}{}%{reset} [{}]') .format(registration.full_name, state.title)) self._migrate_participant_user(old_part, registration) self._migrate_participant_data(old_part, registration) self._migrate_participant_status(old_part, registration) return registration
def migrate_settings(self): print cformat('%{white!}migrating settings') VidyoPlugin.settings.delete_all() opts = self.zodb_root['plugins']['Collaboration']._PluginType__plugins['Vidyo']._PluginBase__options VidyoPlugin.settings.set('managers', convert_principal_list(opts['admins'])) VidyoPlugin.settings.set('acl', convert_principal_list(opts['AuthorisedUsersGroups'])) settings_map = { 'adminAPIURL': 'admin_api_wsdl', 'userAPIURL': 'user_api_wsdl', 'prefix': 'indico_room_prefix', 'indicoGroup': 'room_group_name', 'phoneNumbers': 'vidyo_phone_link', 'maxDaysBeforeClean': 'num_days_old', 'indicoUsername': '******', 'indicoPassword': '******', 'contactSupport': 'support_email', 'cleanWarningAmount': 'max_rooms_warning', 'additionalEmails': 'notification_emails' } for old, new in settings_map.iteritems(): value = option_value(opts[old]) if old == 'prefix': value = int(value) elif old == 'phoneNumbers': match = next((re.search(r'https?://[^"]+', convert_to_unicode(v)) for v in value), None) if match is None: continue value = match.group(0) elif old == 'additionalEmails': value = list(set(value) | {x.email for x in option_value(opts['admins'])}) VidyoPlugin.settings.set(new, value) db.session.commit()
def migrate_event_notes(self): self.print_step('migrating event notes') janitor_user = User.get_one(self.janitor_user_id) self.print_msg('Using janitor user {}'.format(janitor_user), always=True) for event, obj, minutes, special_prot in committing_iterator(self._iter_minutes()): if special_prot: self.print_warning( cformat('%{yellow!}{} minutes have special permissions; skipping them').format(obj), event_id=event.id ) continue path = get_archived_file(minutes, self.archive_dirs)[1] if path is None: self.print_error(cformat('%{red!}{} minutes not found on disk; skipping them').format(obj), event_id=event.id) continue with open(path, 'r') as f: data = convert_to_unicode(f.read()).strip() if not data: self.print_warning(cformat('%{yellow}{} minutes are empty; skipping them').format(obj), always=False, event_id=event.id) continue note = EventNote(linked_object=obj) note.create_revision(RenderMode.html, data, janitor_user) db.session.add(note) if not self.quiet: self.print_success(cformat('%{cyan}{}').format(obj), event_id=event.id)
def migrate_groups(self): print cformat('%{white!}migrating groups') for old_group in committing_iterator(self.zodb_root['groups'].itervalues()): if old_group.__class__.__name__ != 'Group': continue group = LocalGroup(id=int(old_group.id), name=convert_to_unicode(old_group.name).strip()) print cformat('%{green}+++%{reset} %{white!}{:6d}%{reset} %{cyan}{}%{reset}').format(group.id, group.name) members = set() for old_member in old_group.members: if old_member.__class__.__name__ != 'Avatar': print cformat('%{yellow!}!!! Unsupported group member type: {}').format( old_member.__class__.__name__) continue user = User.get(int(old_member.id)) if user is None: print cformat('%{yellow!}!!! User not found: {}').format(old_member.id) continue while user.merged_into_user: user = user.merged_into_user if user.is_deleted: print cformat('%{yellow!}!!! User deleted: {}').format(user.id) continue members.add(user) for member in sorted(members, key=attrgetter('full_name')): print cformat('%{blue!}<->%{reset} %{white!}{:6d} %{yellow}{} ({})').format( member.id, member.full_name, member.email) group.members = members db.session.add(group)
def _migrate_question(self, old_question, pos=None, is_deleted=False): assert old_question not in self.question_map question = AbstractReviewQuestion(position=pos, text=convert_to_unicode(old_question._text), is_deleted=is_deleted) self.question_map[old_question] = question self.event.abstract_review_questions.append(question) return question
def migrate_event_images(self): self.print_step('migrating event images') for event, picture in committing_iterator(self._iter_pictures()): local_file = picture._localFile content_type = mimetypes.guess_type(local_file.fileName)[0] or 'application/octet-stream' storage_backend, storage_path, size = self._get_local_file_info(local_file) if storage_path is None: self.print_warning(cformat('%{yellow}[{}]%{reset} -> %{red!}Not found in filesystem').format( local_file.id), event_id=event.id) continue filename = secure_filename(convert_to_unicode(local_file.fileName), 'image') image = ImageFile(event_id=event.id, filename=filename, content_type=content_type, created_dt=now_utc(), size=size, storage_backend=storage_backend, storage_file_id=storage_path) db.session.add(image) db.session.flush() map_entry = LegacyImageMapping(event_id=event.id, legacy_image_id=local_file.id, image_id=image.id) db.session.add(map_entry) if not self.quiet: self.print_success(cformat('%{cyan}[{}]%{reset} -> %{blue!}{}').format(local_file.id, image), event_id=event.id)
def migrate_agents(self): print cformat('%{white!}migrating agents') for old_agent in committing_iterator(self.livesync_root['agent_manager']._agents.itervalues()): if not old_agent._active: print cformat('%{yellow}skipping inactive agent {} ({})%{reset}').format(old_agent._id, old_agent._name) continue agent = LiveSyncAgent(name=convert_to_unicode(old_agent._name), initial_data_exported=True) old_agent_class = old_agent.__class__.__name__ if old_agent_class == 'InvenioBatchUploaderAgent': agent.backend_name = 'invenio' agent.settings = { 'server_url': old_agent._url } elif old_agent_class == 'CERNSearchUploadAgent': agent.backend_name = 'cernsearch' agent.settings = { 'server_url': old_agent._url, 'username': old_agent._username, 'password': old_agent._password, } else: print cformat('%{red!}skipping unknown agent type: {}%{reset}').format(old_agent_class) continue print cformat('- %{cyan}{} ({})').format(agent.name, agent.backend_name) db.session.add(agent)
def _process_logo(self, logo, event): path = get_archived_file(logo, self.archive_dirs)[1] if path is None: self.print_error(cformat('%{red!}Logo not found on disk; skipping it'), event_id=event.id) return try: logo_image = Image.open(path) except IOError as e: self.print_warning("Cannot open {}: {}".format(path, e), event_id=event.id) return if logo_image.mode == 'CMYK': self.print_warning("Logo is a CMYK {}; converting to RGB".format(logo_image.format), event_id=event.id) # this may result in wrong colors, but there's not much we can do... logo_image = logo_image.convert('RGB') logo_bytes = BytesIO() logo_image.save(logo_bytes, 'PNG') logo_bytes.seek(0) logo_content = logo_bytes.read() logo_filename = secure_filename(convert_to_unicode(logo.fileName), 'logo') logo_filename = os.path.splitext(logo_filename)[0] + '.png' event.logo_metadata = { 'size': len(logo_content), 'hash': crc32(logo_content), 'filename': logo_filename, 'content_type': 'image/png' } event.logo = logo_content if not self.quiet: self.print_success(cformat('- %{cyan}[Logo] {}').format(logo.fileName), event_id=event.id)
def _migrate_settings(self): start_dt = self._event_to_utc(self.amgr._submissionStartDate) end_dt = self._event_to_utc(self.amgr._submissionEndDate) modification_end_dt = (self._event_to_utc(self.amgr._modifDeadline) if getattr(self.amgr, '_modifDeadline', None) else None) assert start_dt < end_dt if modification_end_dt and modification_end_dt - end_dt < timedelta(minutes=1): if modification_end_dt != end_dt: self.importer.print_warning('Ignoring mod deadline ({} > {})'.format(end_dt, modification_end_dt), event_id=self.event.id) modification_end_dt = None abstracts_settings.set_multi(self.event, { 'start_dt': start_dt, 'end_dt': end_dt, 'modification_end_dt': modification_end_dt, 'announcement': convert_to_unicode(self.amgr._announcement), 'announcement_render_mode': RenderMode.html, 'allow_multiple_tracks': bool(getattr(self.amgr, '_multipleTracks', True)), 'tracks_required': bool(getattr(self.amgr, '_tracksMandatory', False)), 'allow_attachments': bool(getattr(self.amgr, '_attachFiles', False)), 'allow_speakers': bool(getattr(self.amgr, '_showSelectAsSpeaker', True)), 'speakers_required': bool(getattr(self.amgr, '_selectSpeakerMandatory', True)), 'authorized_submitters': set(filter(None, map(self._user_from_legacy, self.amgr._authorizedSubmitter))) })
def migrate_settings(self): print cformat('%{white!}migrating settings') ImporterInvenioPlugin.settings.delete_all() opts = self.zodb_root['plugins']['importer']._PluginType__plugins['invenio']._PluginBase__options ImporterInvenioPlugin.settings.set('server_url', convert_to_unicode(opts['location']._PluginOption__value).strip()) db.session.commit()
def _process_css(self, css, event): stylesheet = css._localFile path = get_archived_file(stylesheet, self.archive_dirs)[1] if path is None: self.print_error(cformat('%{red!}CSS file not found on disk; skipping it'), event_id=event.id) return with open(path, 'rb') as f: stylesheet_content = convert_to_unicode(f.read()) event.stylesheet_metadata = { 'size': len(stylesheet_content), 'hash': crc32(stylesheet_content), 'filename': secure_filename(convert_to_unicode(stylesheet.fileName), 'stylesheet.css'), 'content_type': 'text/css' } event.stylesheet = stylesheet_content if not self.quiet: self.print_success(cformat('- %{cyan}[CSS] {}').format(stylesheet.fileName), event_id=event.id)
def migrate_settings(self): self.print_step('migrating settings') settings_map = { '_protectionDisclaimerProtected': 'network_protected_disclaimer', '_protectionDisclaimerRestricted': 'restricted_disclaimer' } for old, new in settings_map.iteritems(): legal_settings.set(new, convert_to_unicode(getattr(self.zodb_root['MaKaCInfo']['main'], old))) db.session.commit()
def convert_principal(self, old_principal): principal = convert_principal(old_principal) if (principal is None and old_principal.__class__.__name__ in ('Avatar', 'AvatarUserWrapper') and 'email' in old_principal.__dict__): email = convert_to_unicode(old_principal.__dict__['email']).lower() principal = User.find_first(~User.is_deleted, User.all_emails.contains(email)) if principal is not None: self.print_warning('Using {} for {} (matched via {})'.format(principal, old_principal, email)) return principal
def _migrate_email_log(self, event, item): info = item._logInfo entry = self._migrate_log(event, item) entry.realm = EventLogRealm.emails entry.type = 'email' entry.summary = 'Sent email: {}'.format(convert_to_unicode(info['subject']).strip()) content_type = convert_to_unicode(info.get('contentType')) or ( 'text/html' if seems_html(info['body']) else 'text/plain') entry.data = { 'from': convert_to_unicode(info['fromAddr']), 'to': map(convert_to_unicode, set(info['toList'])), 'cc': map(convert_to_unicode, set(info['ccList'])), 'bcc': map(convert_to_unicode, set(info.get('bccList', []))), 'subject': convert_to_unicode(info['subject']), 'body': convert_to_unicode(info['body']), 'content_type': content_type, } return entry
def _migrate_abstract_email_log(self, abstract, zodb_abstract): for old_entry in zodb_abstract._notifLog._entries: email_template = self.email_template_map.get(old_entry._tpl) email_template_name = email_template.title if email_template else convert_to_unicode(old_entry._tpl._name) entry = AbstractEmailLogEntry(email_template=email_template, sent_dt=old_entry._date, user=self._user_from_legacy(old_entry._responsible), recipients=[], subject='<not available>', body='<not available>', data={'_legacy': True, 'template_name': email_template_name or '<unnamed>'}) abstract.email_logs.append(entry)
def migrate_blockings(self): state_map = { None: BlockedRoom.State.pending, False: BlockedRoom.State.rejected, True: BlockedRoom.State.accepted, } print cformat("%{white!}migrating blockings") for old_blocking_id, old_blocking in self.rb_root["RoomBlocking"]["Blockings"].iteritems(): b = Blocking( id=old_blocking.id, created_by_id=self.merged_avatars.get(old_blocking._createdBy, old_blocking._createdBy), created_dt=as_utc(old_blocking._utcCreatedDT), start_date=old_blocking.startDate, end_date=old_blocking.endDate, reason=convert_to_unicode(old_blocking.message), ) print cformat(u"- %{cyan}{}").format(b.reason) for old_blocked_room in old_blocking.blockedRooms: br = BlockedRoom( state=state_map[old_blocked_room.active], rejected_by=old_blocked_room.rejectedBy, rejection_reason=convert_to_unicode(old_blocked_room.rejectionReason), ) room = Room.get(get_room_id(old_blocked_room.roomGUID)) room.blocked_rooms.append(br) b.blocked_rooms.append(br) print cformat(u" %{blue!}Room:%{reset} {} ({})").format( room.full_name, BlockedRoom.State(br.state).title ) for old_principal in old_blocking.allowed: principal_id = old_principal._id if old_principal._type == "Avatar": principal_id = int(self.merged_avatars.get(old_principal._id, old_principal._id)) principal_type = "User" else: principal_type = "Group" bp = BlockingPrincipal(_principal=[principal_type, principal_id]) b._allowed.add(bp) print cformat(u" %{blue!}Allowed:%{reset} {}({})").format(bp.entity_type, bp.entity_id) db.session.add(b) db.session.commit()
def migrate_news(self): self.print_step('migrating news') old_items = sorted(self.zodb_root['modules']['news']._newsItems, key=attrgetter('_creationDate')) for old_item in old_items: n = NewsItem(title=self._sanitize_title(old_item._title), content=convert_to_unicode(old_item._content), created_dt=old_item._creationDate) db.session.add(n) db.session.flush() self.print_success(n.title) db.session.commit()
def _migrate_email_log(self, event, item): info = item._logInfo entry = self._migrate_log(event, item) entry.realm = EventLogRealm.emails entry.type = "email" entry.summary = "Sent email: {}".format(convert_to_unicode(info["subject"]).strip()) content_type = convert_to_unicode(info.get("contentType")) or ( "text/html" if seems_html(info["body"]) else "text/plain" ) entry.data = { "from": convert_to_unicode(info["fromAddr"]), "to": map(convert_to_unicode, set(info["toList"])), "cc": map(convert_to_unicode, set(info["ccList"])), "bcc": map(convert_to_unicode, set(info.get("bccList", []))), "subject": convert_to_unicode(info["subject"]), "body": convert_to_unicode(info["body"]), "content_type": content_type, } return entry
def _user_from_legacy(self, legacy_user, janitor=False): if isinstance(legacy_user, AvatarUserWrapper): user = legacy_user.user email = convert_to_unicode(legacy_user.__dict__.get('email', '')).lower() or None elif legacy_user.__class__.__name__ == 'Avatar': user = AvatarUserWrapper(legacy_user.id).user email = convert_to_unicode(legacy_user.email).lower() else: self.importer.print_error(cformat('%{red!}Invalid legacy user: {}').format(legacy_user), event_id=self.event.id) return self.importer.janitor_user if janitor else None if user is None: user = self.importer.all_users_by_email.get(email) if email else None if user is not None: msg = 'Using {} for {} (matched via {})'.format(user, legacy_user, email) else: msg = cformat('%{yellow}Invalid legacy user: {}').format(legacy_user) self.importer.print_warning(msg, event_id=self.event.id, always=(msg not in self.legacy_warnings_shown)) self.legacy_warnings_shown.add(msg) return user or (self.importer.janitor_user if janitor else None)
def migrate_settings(self): self.print_step('migrating settings') settings_map = { '_protectionDisclaimerProtected': 'network_protected_disclaimer', '_protectionDisclaimerRestricted': 'restricted_disclaimer' } for old, new in settings_map.iteritems(): legal_settings.set( new, convert_to_unicode( getattr(self.zodb_root['MaKaCInfo']['main'], old))) db.session.commit()
def convert_principal(self, old_principal): principal = convert_principal(old_principal) if (principal is None and old_principal.__class__.__name__ in ('Avatar', 'AvatarUserWrapper') and 'email' in old_principal.__dict__): email = convert_to_unicode(old_principal.__dict__['email']).lower() principal = User.find_first(~User.is_deleted, User.all_emails.contains(email)) if principal is not None: self.print_warning('Using {} for {} (matched via {})'.format( principal, old_principal, email)) return principal
def migrate_settings(self): print cformat('%{white!}migrating settings') InvenioSearchPlugin.settings.delete_all() opts = self.zodb_root['plugins']['search']._PluginType__plugins['invenio']._PluginBase__options InvenioSearchPlugin.settings.set('search_url', convert_to_unicode(opts['serverUrl']._PluginOption__value).strip()) type_map = {'public': 'api_public', 'private': 'api_private', 'redirect': 'redirect'} display_mode = type_map[opts['type']._PluginOption__value] InvenioSearchPlugin.settings.set('display_mode', display_mode) db.session.commit()
def _attachment_from_resource(self, folder, material, resource, base_object=None): modified_dt = (getattr(material, '_modificationDS', None) or getattr(base_object, 'startDate', None) or getattr(base_object, '_modificationDS', None) or now_utc()) data = {'id': self._get_id(Attachment), 'folder_id': folder['id'], 'user_id': self.janitor_user_id, 'title': convert_to_unicode(resource.name).strip() or folder['title'], 'description': convert_to_unicode(resource.description), 'modified_dt': modified_dt} if resource.__class__.__name__ == 'Link': data['type'] = AttachmentType.link data['link_url'] = convert_to_unicode(resource.url).strip() if not data['link_url']: self.print_error(cformat('%{red!}[{}] Skipping link, missing URL').format(data['title']), event_id=base_object.id) return None else: data['type'] = AttachmentType.file storage_backend, storage_path, size = self._get_local_file_info(resource) if storage_path is None: self.print_error(cformat('%{red!}File {} not found on disk').format(resource._LocalFile__archivedId), event_id=base_object.id) return None filename = secure_filename(convert_to_unicode(resource.fileName), 'attachment') file_data = {'id': self._get_id(AttachmentFile), 'attachment_id': data['id'], 'user_id': self.janitor_user_id, 'created_dt': modified_dt, 'filename': filename, 'content_type': mimetypes.guess_type(filename)[0] or 'application/octet-stream', 'size': size, 'storage_backend': storage_backend, 'storage_file_id': storage_path} self.todo[AttachmentFile].append(file_data) tmp = ProtectionTarget() protection_from_ac(tmp, resource._Resource__ac) self.todo[AttachmentPrincipal] += tmp.make_principal_rows(attachment_id=data['id']) data['protection_mode'] = tmp.protection_mode self.todo[Attachment].append(data) return data
def migrate_event_locations(self): self.print_step("Migrating event locations") for old_event in committing_iterator(self._iter_events()): custom_location = old_event.places[0] if getattr( old_event, 'places', None) else None custom_room = old_event.rooms[0] if getattr( old_event, 'rooms', None) else None location_name = None room_name = None has_room = False updates = {} if custom_location: location_name = convert_to_unicode( fix_broken_string(custom_location.name, True)) if custom_location.address: updates[Event.own_address] = convert_to_unicode( fix_broken_string(custom_location.address, True)) if custom_room: room_name = convert_to_unicode( fix_broken_string(custom_room.name, True)) if location_name and room_name: mapping = self.room_mapping.get((location_name, room_name)) if mapping: has_room = True updates[Event.own_venue_id] = mapping[0] updates[Event.own_room_id] = mapping[1] # if we don't have a RB room set, use whatever location/room name we have if not has_room: venue_id = self.venue_mapping.get(location_name) if venue_id is not None: updates[Event.own_venue_id] = venue_id updates[Event.own_venue_name] = '' else: updates[Event.own_venue_name] = location_name or '' updates[Event.own_room_name] = room_name or '' if updates: Event.query.filter_by(id=int(old_event.id)).update( updates, synchronize_session=False) if not self.quiet: self.print_success(repr(updates), event_id=old_event.id)
def _migrate_category(self, old_cat, position): # unlimited visibility is 999 but we have a 994 for some reason.. since nobody # has 900 levels of nesting we can just go for that threshold instead visibility = None if old_cat._visibility > 900 else old_cat._visibility if visibility == 0: self.print_warning( "Raising visibility from 'invisible' to 'category-only'", event_id=old_cat.id) visibility = 1 emails = re.split( r'[\s;,]+', convert_to_unicode(getattr(old_cat, '_notifyCreationList', ''))) emails = {sanitize_email(email).lower() for email in emails} emails = sorted(email for email in emails if is_valid_mail(email, False)) default_themes = self._process_default_themes(old_cat) title = self._fix_title(convert_to_unicode(old_cat.name), old_cat.id) cat = Category(id=int(old_cat.id), position=position, title=title, description=convert_to_unicode(old_cat.description), visibility=visibility, timezone=convert_to_unicode(old_cat._timezone), event_creation_notification_emails=emails, default_event_themes=default_themes, suggestions_disabled=getattr(old_cat, '_suggestions_disabled', False)) if not self.quiet: self.print_success(cat.title, event_id=cat.id) if old_cat._icon: self._process_icon(cat, old_cat._icon) self._process_protection(cat, old_cat) cat.children = [(self._migrate_category(old_subcat, i)) for i, old_subcat in enumerate( sorted(old_cat.subcategories.itervalues(), key=attrgetter('_order')), 1)] return cat
def migrate_event_dates_titles(self): self.print_step("Migrating event dates and titles") for old_event in committing_iterator(self._iter_events()): if 'title' not in old_event.__dict__: self.print_error('Event has no title in ZODB', old_event.id) continue tz = old_event.__dict__.get('timezone', 'UTC') updates = { Event.title: convert_to_unicode(old_event.__dict__['title']) or '(no title)', Event.description: convert_to_unicode(old_event.__dict__['description']) or '', Event.timezone: tz, Event.start_dt: self._fix_naive(old_event, old_event.__dict__['startDate'], tz), Event.end_dt: self._fix_naive(old_event, old_event.__dict__['endDate'], tz) } Event.query.filter_by(id=int(old_event.id)).update( updates, synchronize_session=False) if not self.quiet: self.print_success('', event_id=old_event.id) # deleted events are not in zodb but also need data updates = { Event.title: '***deleted***', Event.description: '', Event.timezone: 'UTC', Event.start_dt: datetime(1970, 1, 1, tzinfo=pytz.utc), Event.end_dt: datetime(1970, 1, 1, tzinfo=pytz.utc) } Event.query.filter_by(is_deleted=True).update( updates, synchronize_session=False) db.session.commit()
def _migrate_api_keys(self, avatar, user): ak = getattr(avatar, 'apiKey', None) if not ak: return last_used_uri = None if ak._lastPath and ak._lastQuery: last_used_uri = '{}?{}'.format(convert_to_unicode(ak._lastPath), convert_to_unicode(ak._lastQuery)) elif ak._lastPath: last_used_uri = convert_to_unicode(ak._lastPath) api_key = APIKey(token=ak._key, secret=ak._signKey, is_blocked=ak._isBlocked, is_persistent_allowed=getattr(ak, '_persistentAllowed', False), created_dt=self._to_utc(ak._createdDT), last_used_dt=self._to_utc(ak._lastUsedDT), last_used_ip=ak._lastUsedIP, last_used_uri=last_used_uri, last_used_auth=ak._lastUseAuthenticated, use_count=ak._useCount) user.api_key = api_key print cformat('%{blue!}<->%{reset} %{yellow}{}%{reset}').format( api_key) for old_key in ak._oldKeys: # We have no creation time so we use *something* older.. fake_created_dt = self._to_utc(ak._createdDT) - timedelta(hours=1) # We don't have anything besides the api key for old keys, so we use a random secret user.old_api_keys.append( APIKey(token=old_key, secret=unicode(uuid4()), created_dt=fake_created_dt, is_active=False))
def process_emails(self, event, principals, emails, name, color, full_access=None, roles=None): emails = { sanitize_email(convert_to_unicode(email).lower()) for email in emails } emails = {email for email in emails if is_valid_mail(email, False)} for email in emails: self.process_principal(event, principals, email, name, color, full_access, roles)
def _migrate_abstract_email_log(self, abstract, zodb_abstract): for old_entry in zodb_abstract._notifLog._entries: email_template = self.email_template_map.get(old_entry._tpl) email_template_name = email_template.title if email_template else convert_to_unicode( old_entry._tpl._name) entry = AbstractEmailLogEntry( email_template=email_template, sent_dt=old_entry._date, user=self._user_from_legacy(old_entry._responsible), recipients=[], subject='<not available>', body='<not available>', data={ '_legacy': True, 'template_name': email_template_name or '<unnamed>' }) abstract.email_logs.append(entry)
def migrate_event_settings(self): print cformat('%{white!}migrating event settings') default_method_name = PaypalPaymentPlugin.settings.get('method_name') EventSetting.delete_all(PaypalPaymentPlugin.event_settings.module) account_id_re = re.compile(r'^[a-zA-Z0-9]{13}$') for event in committing_iterator(self._iter_events(), 25): pp = event._modPay.payMods['PayPal'] business = pp._business.strip() if not business or (not is_valid_mail(business, multi=False) and not account_id_re.match(business)): print cformat(' - %{yellow!}event {} skipped (business: {})').format(event.id, business or '(none)') continue PaypalPaymentPlugin.event_settings.set(event, 'enabled', True) method_name = convert_to_unicode(pp._title) if method_name.lower() == 'paypal': method_name = default_method_name PaypalPaymentPlugin.event_settings.set(event, 'method_name', method_name) PaypalPaymentPlugin.event_settings.set(event, 'business', pp._business) print cformat(' - %{cyan}event {} (business: {})').format(event.id, pp._business)
def _migrate_settings(self): start_dt = self._event_to_utc(self.amgr._submissionStartDate) end_dt = self._event_to_utc(self.amgr._submissionEndDate) modification_end_dt = (self._event_to_utc(self.amgr._modifDeadline) if getattr(self.amgr, '_modifDeadline', None) else None) assert start_dt < end_dt if modification_end_dt and modification_end_dt - end_dt < timedelta( minutes=1): if modification_end_dt != end_dt: self.importer.print_warning( 'Ignoring mod deadline ({} > {})'.format( end_dt, modification_end_dt), event_id=self.event.id) modification_end_dt = None abstracts_settings.set_multi( self.event, { 'start_dt': start_dt, 'end_dt': end_dt, 'modification_end_dt': modification_end_dt, 'announcement': convert_to_unicode(self.amgr._announcement), 'announcement_render_mode': RenderMode.html, 'allow_multiple_tracks': bool(getattr(self.amgr, '_multipleTracks', True)), 'tracks_required': bool(getattr(self.amgr, '_tracksMandatory', False)), 'allow_attachments': bool(getattr(self.amgr, '_attachFiles', False)), 'allow_speakers': bool(getattr(self.amgr, '_showSelectAsSpeaker', True)), 'speakers_required': bool(getattr(self.amgr, '_selectSpeakerMandatory', True)), 'authorized_submitters': set( filter( None, map(self._user_from_legacy, self.amgr._authorizedSubmitter))) })
def _process_icon(self, cat, icon): path = get_archived_file(icon, self.archive_dirs)[1] if path is None: self.print_error( cformat('%{red!}Icon not found on disk; skipping it'), event_id=cat.id) return try: icon_image = Image.open(path) except IOError as e: self.print_warning("Cannot open {}: {}".format(path, e), event_id=cat.id) return if icon_image.mode == 'CMYK': self.print_warning("Icon is a CMYK {}; converting to RGB".format( icon_image.format), always=False, event_id=cat.id) # this may result in wrong colors, but there's not much we can do... icon_image = icon_image.convert('RGB') if icon_image.size != (16, 16): self.print_warning( "Icon is {}x{}; resizing to 16x16".format(*icon_image.size), always=False, event_id=cat.id) icon_image = icon_image.resize((16, 16), Image.ANTIALIAS) icon_bytes = BytesIO() icon_image.save(icon_bytes, 'PNG') icon_bytes.seek(0) icon_content = icon_bytes.read() icon_filename = secure_filename(convert_to_unicode(icon.fileName), 'icon') icon_filename = os.path.splitext(icon_filename)[0] + '.png' cat.icon_metadata = { 'size': len(icon_content), 'hash': crc32(icon_content), 'filename': icon_filename, 'content_type': 'image/png' } cat.icon = icon_content
def _to_network(self, mask): mask = convert_to_unicode(mask).strip() net = None if re.match(r'^[0-9.]+$', mask): # ipv4 mask mask = mask.rstrip('.') segments = mask.split('.') if len(segments) <= 4: addr = '.'.join(segments + ['0'] * (4 - len(segments))) net = ip_network('{}/{}'.format(addr, 8 * len(segments))) elif re.match(r'^[0-9a-f:]+', mask): # ipv6 mask mask = mask.rstrip(':') # there shouldn't be a `::` in the IP as it was a startswith-like check before segments = mask.split(':') if len(segments) <= 8: addr = ':'.join(segments + ['0'] * (8 - len(segments))) net = ip_network('{}/{}'.format(addr, 16 * len(segments))) if net is None: self.print_warning(cformat('%{yellow!}Skipped invalid mask: {}').format(mask)) return net
def _translate_tpl_data(self, tpl_data): width = _sane_float(tpl_data[1]['width']) height = _sane_float(tpl_data[1]['height']) if width is None or height is None: self.importer.print_error(cformat( '%{red!}Template has invalid dimensions ({}, {})').format( width, height), event_id=self.event_id) return None return { 'title': convert_to_unicode(tpl_data[0]), 'data': { 'width': width * self.zoom_multiplier, 'height': _sane_float(tpl_data[1]['height']) * self.zoom_multiplier, 'items': [self._translate_tpl_item(item) for item in tpl_data[4]] } }
def _process_logo(self, logo, event): path = get_archived_file(logo, self.archive_dirs)[1] if path is None: self.print_error( cformat('%{red!}Logo not found on disk; skipping it'), event_id=event.id) return try: logo_image = Image.open(path) except IOError as e: self.print_warning("Cannot open {}: {}".format(path, e), event_id=event.id) return if logo_image.mode == 'CMYK': self.print_warning("Logo is a CMYK {}; converting to RGB".format( logo_image.format), event_id=event.id) # this may result in wrong colors, but there's not much we can do... logo_image = logo_image.convert('RGB') logo_bytes = BytesIO() logo_image.save(logo_bytes, 'PNG') logo_bytes.seek(0) logo_content = logo_bytes.read() logo_filename = secure_filename(convert_to_unicode(logo.fileName), 'logo') logo_filename = os.path.splitext(logo_filename)[0] + '.png' event.logo_metadata = { 'size': len(logo_content), 'hash': crc32(logo_content), 'filename': logo_filename, 'content_type': 'image/png' } event.logo = logo_content if not self.quiet: self.print_success(cformat('- %{cyan}[Logo] {}').format( logo.fileName), event_id=event.id)
def migrate_settings(self): print cformat('%{white!}migrating settings') VidyoPlugin.settings.delete_all() opts = self.zodb_root['plugins']['Collaboration']._PluginType__plugins[ 'Vidyo']._PluginBase__options VidyoPlugin.settings.set('managers', convert_principal_list(opts['admins'])) VidyoPlugin.settings.set( 'acl', convert_principal_list(opts['AuthorisedUsersGroups'])) settings_map = { 'adminAPIURL': 'admin_api_wsdl', 'userAPIURL': 'user_api_wsdl', 'prefix': 'indico_room_prefix', 'indicoGroup': 'room_group_name', 'phoneNumbers': 'vidyo_phone_link', 'maxDaysBeforeClean': 'num_days_old', 'indicoUsername': '******', 'indicoPassword': '******', 'contactSupport': 'support_email', 'cleanWarningAmount': 'max_rooms_warning', 'additionalEmails': 'notification_emails' } for old, new in settings_map.iteritems(): value = option_value(opts[old]) if old == 'prefix': value = int(value) elif old == 'phoneNumbers': match = next( (re.search(r'https?://[^"]+', convert_to_unicode(v)) for v in value), None) if match is None: continue value = match.group(0) elif old == 'additionalEmails': value = list( set(value) | {x.email for x in option_value(opts['admins'])}) VidyoPlugin.settings.set(new, value) db.session.commit()
def migrate_event_images(self): self.print_step('migrating event images') for event, picture in committing_iterator(self._iter_pictures()): local_file = picture._localFile content_type = mimetypes.guess_type( local_file.fileName)[0] or 'application/octet-stream' storage_backend, storage_path, size = self._get_local_file_info( local_file) if storage_path is None: self.print_warning(cformat( '%{yellow}[{}]%{reset} -> %{red!}Not found in filesystem'). format(local_file.id), event_id=event.id) continue filename = secure_filename(convert_to_unicode(local_file.fileName), 'image') image = ImageFile(event_id=event.id, filename=filename, content_type=content_type, created_dt=now_utc(), size=size, storage_backend=storage_backend, storage_file_id=storage_path) db.session.add(image) db.session.flush() map_entry = LegacyImageMapping(event_id=event.id, legacy_image_id=local_file.id, image_id=image.id) db.session.add(map_entry) if not self.quiet: self.print_success( cformat('%{cyan}[{}]%{reset} -> %{blue!}{}').format( local_file.id, image), event_id=event.id)
def _process_protection(self, cat, old_cat): ac = old_cat._Category__ac cat.event_creation_restricted = bool( old_cat._Category__confCreationRestricted) cat.own_no_access_contact = convert_to_unicode( getattr(ac, 'contactInfo', '')) if ac._accessProtection == -1 or (old_cat.id == '0' and ac._accessProtection == 0): cat.protection_mode = ProtectionMode.public elif ac._accessProtection == 0: cat.protection_mode = ProtectionMode.inheriting elif ac._accessProtection == 1: cat.protection_mode = ProtectionMode.protected # migrate domain protection only if the category is public; switch to protected in that case if cat.protection_mode == ProtectionMode.public and ac.requiredDomains: cat.protection_mode = ProtectionMode.protected self._process_domains(cat, ac.requiredDomains) # read access for manager in ac.allowed: self.process_principal(cat, manager, 'User', 'cyan', read_access=True) # management access for manager in ac.managers: self.process_principal(cat, manager, 'Manager', 'blue!', full_access=True) # creation access for creator in old_cat._Category__confCreators: self.process_principal(cat, creator, 'Creator', 'magenta', roles={'create'})
def _user_from_avatar(self, avatar, **kwargs): email = sanitize_email(convert_to_unicode(avatar.email).lower().strip()) secondary_emails = {sanitize_email(convert_to_unicode(x).lower().strip()) for x in avatar.secondaryEmails} secondary_emails = {x for x in secondary_emails if x and is_valid_mail(x, False) and x != email} # we handle deletion later. otherwise it might be set before secondary_emails which would # result in those emails not being marked as deleted is_deleted = kwargs.pop('is_deleted', False) user = User(id=int(avatar.id), email=email, first_name=convert_to_unicode(avatar.name).strip() or 'UNKNOWN', last_name=convert_to_unicode(avatar.surName).strip() or 'UNKNOWN', title=USER_TITLE_MAP.get(avatar.title, UserTitle.none), phone=convert_to_unicode(avatar.telephone[0]).strip(), affiliation=convert_to_unicode(avatar.organisation[0]).strip(), address=convert_to_unicode(avatar.address[0]).strip(), secondary_emails=secondary_emails, is_blocked=avatar.status == 'disabled', **kwargs) if is_deleted or not is_valid_mail(user.email): user.is_deleted = True return user
def migrate_event_notes(self): self.print_step('migrating event notes') janitor_user = User.get_one(self.janitor_user_id) self.print_msg('Using janitor user {}'.format(janitor_user), always=True) for event, obj, minutes, special_prot in committing_iterator( self._iter_minutes()): if special_prot: self.print_warning(cformat( '%{yellow!}{} minutes have special permissions; skipping them' ).format(obj), event_id=event.id) continue path = get_archived_file(minutes, self.archive_dirs)[1] if path is None: self.print_error(cformat( '%{red!}{} minutes not found on disk; skipping them'). format(obj), event_id=event.id) continue with open(path, 'r') as f: data = convert_to_unicode(f.read()).strip() if not data: self.print_warning( cformat('%{yellow}{} minutes are empty; skipping them' ).format(obj), always=False, event_id=event.id) continue note = EventNote(linked_object=obj) note.create_revision(RenderMode.html, data, janitor_user) db.session.add(note) if not self.quiet: self.print_success(cformat('%{cyan}{}').format(obj), event_id=event.id)
def _fix_email(self, email): email = convert_to_unicode(email).lower() or '*****@*****.**' no_email = email == '*****@*****.**' try: user, host = email.split('@', 1) except ValueError: self.importer.print_warning(cformat( 'Garbage email %{red}{0}%{reset}; using %{green}{0}@example.com%{reset} instead' ).format(email), event_id=self.event.id) user = email host = 'example.com' email += '@example.com' n = 1 while email in self.emails: email = '{}+{}@{}'.format(user, n, host) n += 1 if n != 1 and not no_email: self.importer.print_warning(cformat( 'Duplicate email %{yellow}{}@{}%{reset}; using %{green}{}%{reset} ' 'instead').format(user, host, email), event_id=self.event.id) self.emails.add(email) return email
def _sanitize(title): return WHITESPACE_RE.sub( ' ', HTMLParser().unescape(strip_tags(convert_to_unicode(title)))).strip()
def _sanitize_title(self, title, _ws_re=re.compile(r'\s+')): title = convert_to_unicode(title) title = HTMLParser().unescape(strip_tags(title)) return _ws_re.sub(' ', title).strip()
def _migrate_abstract_reviews(self, abstract, zodb_abstract, old_abstract, as_duplicate_reviews): old_judgments = {(j.track_id, j.judge): j for j in old_abstract.judgments} for old_track_id, zodb_judgments in getattr( zodb_abstract, '_trackJudgementsHistorical', {}).iteritems(): seen_judges = set() for zodb_judgment in zodb_judgments: if zodb_judgment is None: continue if zodb_judgment.__class__.__name__ == 'AbstractUnMarkedAsDuplicated': # we don't have "unmarked as duplicate" anymore continue try: track = self.track_map_by_id[int(zodb_judgment._track.id)] except KeyError: self.importer.print_warning(cformat( '%{blue!}Abstract {} {yellow}judged in invalid track {}%{reset}' ).format(zodb_abstract._id, int(zodb_judgment._track.id)), event_id=self.event.id) continue judge = self._user_from_legacy(zodb_judgment._responsible) if not judge: # self.importer.print_warning( # cformat('%{blue!}Abstract {} {yellow}had an empty judge ({})!%{reset}').format( # zodb_abstract._id, zodb_judgment), event_id=self.event.id) continue elif judge in seen_judges: # self.importer.print_warning( # cformat("%{blue!}Abstract {}: {yellow}judge '{}' seen more than once ({})!%{reset}") # .format(zodb_abstract._id, judge, zodb_judgment), event_id=self.event.id) continue seen_judges.add(judge) try: created_dt = as_utc(zodb_judgment._date) except AttributeError: created_dt = self.event.start_dt review = AbstractReview(created_dt=created_dt, proposed_action=self.ACTION_MAP[ zodb_judgment.__class__.__name__], comment=convert_to_unicode( zodb_judgment._comment)) if review.proposed_action == AbstractAction.accept: try: old_judgment = old_judgments[int(old_track_id), judge] except KeyError: self.importer.print_error(cformat( '%{yellow!}Abstract #{} has no new judgment for {} / {}' ).format(abstract.friendly_id, int(old_track_id), judge), event_id=self.event.id) continue review.proposed_contribution_type = old_judgment.accepted_type review.proposed_track = self.track_map_by_id[ old_judgment.track_id] elif review.proposed_action == AbstractAction.change_tracks: review.proposed_tracks = { self.track_map[t] for t in zodb_judgment._proposedTracks } elif review.proposed_action == AbstractAction.mark_as_duplicate: as_duplicate_reviews.add( (review, zodb_judgment._originalAbst)) review.user = judge review.track = track answered_questions = set() for old_answer in getattr(zodb_judgment, '_answers', []): if old_answer._question in answered_questions: self.importer.print_warning(cformat( "%{blue!}Abstract {}: {yellow}question answered more than once!" ).format(abstract.friendly_id), event_id=self.event.id) continue try: question = self.question_map[old_answer._question] except KeyError: question = self._migrate_question(old_answer._question, is_deleted=True) self.importer.print_warning(cformat( "%{blue!}Abstract {}: {yellow}answer for deleted question" ).format(abstract.friendly_id), event_id=self.event.id) rating = AbstractReviewRating( question=question, value=self._convert_scale(old_answer)) review.ratings.append(rating) answered_questions.add(old_answer._question) abstract.reviews.append(review)
def migrate_users(self): print cformat('%{white!}migrating users') seen_identities = set() for avatar in committing_iterator(self._iter_avatars(), 5000): if getattr(avatar, '_mergeTo', None): print cformat('%{red!}!!!%{reset} ' '%{yellow!}Skipping {} - merged into {}').format( avatar.id, avatar._mergeTo.id) continue elif avatar.status == 'Not confirmed': print cformat('%{yellow!}!!!%{reset} ' '%{yellow!}Skipping {} - not activated').format( avatar.id) continue elif not avatar.name.strip() and not avatar.surName.strip(): links = {(obj, role): list(objs) for obj, x in avatar.linkedTo.iteritems() for role, objs in x.iteritems() if objs} if not avatar.identities and not links: print cformat( '%{yellow!}!!!%{reset} ' '%{yellow!}Skipping {} - no names and no identities/links' ).format(avatar.id) continue user = self._user_from_avatar(avatar) self._fix_collisions(user, avatar) db.session.add(user) settings = self._settings_from_avatar(avatar) user_settings.set_multi(user, settings) # favorite users cannot be migrated here since the target user might not have been migrated yet # XXX: adapt to new categories for 2.0 user.favorite_categories = set( filter(None, avatar.linkedTo['category']['favorite'])) db.session.flush() print cformat( '%{green}+++%{reset} ' '%{white!}{:6d}%{reset} %{cyan}{}%{reset} [%{blue!}{}%{reset}] ' '{{%{cyan!}{}%{reset}}}').format( user.id, user.full_name, user.email, ', '.join(user.secondary_emails)) # migrate API keys self._migrate_api_keys(avatar, user) # migrate identities of non-deleted avatars if not user.is_deleted: for old_identity in avatar.identities: identity = None username = convert_to_unicode( old_identity.login).strip().lower() if not username: print cformat( "%{red!}!!!%{reset} " "%{yellow!}Empty username: {}. Skipping identity." ).format(old_identity) continue provider = { 'LocalIdentity': 'indico', 'LDAPIdentity': self.ldap_provider_name }.get(old_identity.__class__.__name__) if provider is None: print cformat( "%{red!}!!!%{reset} " "%{yellow!}Unsupported provider: {}. Skipping identity." ).format(old_identity.__class__.__name__) continue if (provider, username) in seen_identities: print cformat( "%{red!}!!!%{reset} " "%{yellow!}Duplicate identity: {}, {}. Skipping." ).format(provider, username) continue if provider == 'indico' and not self.ignore_local_accounts: identity = Identity(provider=provider, identifier=username) if not hasattr(old_identity, 'algorithm'): # plaintext password if not old_identity.password: # password is empty, skip identity print cformat( "%{red!}!!!%{reset} " "%{yellow!}Identity '{}' has empty password. Skipping identity." ).format(old_identity.login) continue identity.password = old_identity.password else: assert old_identity.algorithm == 'bcrypt' identity.password_hash = old_identity.password elif provider == self.ldap_provider_name: identity = Identity(provider=provider, identifier=username) if identity: print cformat( '%{blue!}<->%{reset} %{yellow}{}%{reset}').format( identity) user.identities.add(identity) seen_identities.add((provider, username)) for merged_avatar in getattr(avatar, '_mergeFrom', ()): if merged_avatar.id == avatar.id: continue merged = self._user_from_avatar(merged_avatar, is_deleted=True, merged_into_id=user.id) print cformat( '%{blue!}***%{reset} ' '%{white!}{:6d}%{reset} %{cyan}{}%{reset} [%{blue!}{}%{reset}] ' '{{%{cyan!}{}%{reset}}}').format( merged.id, merged.full_name, merged.email, ', '.join(merged.secondary_emails)) self._fix_collisions(merged, merged_avatar) db.session.add(merged) db.session.flush()
def migrate_rooms(self): eq = defaultdict(set) vc = defaultdict(set) for old_room_id, old_room in self.rb_root['Rooms'].iteritems(): eq[old_room._locationName].update( e for e in old_room._equipment.split('`') if e) vc[old_room._locationName].update( e for e in getattr(old_room, 'avaibleVC', []) if e) print cformat('%{white!}migrating equipment') for name, eqs in eq.iteritems(): l = Location.find_first(name=name) if l is None: print cformat('%{yellow!}*** WARNING') print cformat( "%{{yellow!}}***%{{reset}} Location '{}' does not exist. Skipped equipment: {}" .format(name, eqs)) continue l.equipment_types.extend(EquipmentType(name=x) for x in eqs) print cformat('- [%{cyan}{}%{reset}] {}').format(name, eqs) db.session.add(l) db.session.commit() print print cformat('%{white!}migrating vc equipment') for name, vcs in vc.iteritems(): l = Location.find_first(name=name) if l is None: print cformat('%{yellow!}*** WARNING') print cformat( "%{{yellow!}}***%{{reset}} Location '{}' does not exist. Skipped VC equipment: {}" .format(name, vcs)) continue pvc = l.get_equipment_by_name('Video conference') for vc_name in vcs: req = EquipmentType(name=vc_name) req.parent = pvc l.equipment_types.append(req) print cformat('- [%{cyan}{}%{reset}] {}').format( name, req.name) db.session.add(l) db.session.commit() print print cformat('%{white!}migrating rooms') for old_room_id, old_room in self.rb_root['Rooms'].iteritems(): l = Location.find_first(name=old_room._locationName) if l is None: print cformat('%{yellow!}*** WARNING') print cformat( "%{{yellow!}}***%{{reset}} Location '{}' does not exist. Skipped room '{}'" .format(old_room._locationName, old_room.id)) continue r = Room( id=old_room_id, name=convert_to_unicode((old_room._name or '').strip() or generate_name(old_room)), site=convert_to_unicode(old_room.site), division=convert_to_unicode(old_room.division), building=convert_to_unicode(old_room.building), floor=convert_to_unicode(old_room.floor), number=convert_to_unicode(old_room.roomNr), notification_before_days=(( old_room.resvStartNotificationBefore or None) if getattr( old_room, 'resvStartNotification', False) else None), notification_for_responsible=getattr( old_room, 'resvNotificationToResponsible', False), notification_for_assistance=getattr( old_room, 'resvNotificationAssistance', False), reservations_need_confirmation=old_room.resvsNeedConfirmation, telephone=convert_to_unicode( getattr(old_room, 'telephone', None)), key_location=convert_to_unicode( getattr(old_room, 'whereIsKey', None)), capacity=getattr(old_room, 'capacity', None), surface_area=getattr(old_room, 'surfaceArea', None), latitude=getattr(old_room, 'latitude', None), longitude=getattr(old_room, 'longitude', None), comments=convert_to_unicode(getattr(old_room, 'comments', None)), owner_id=self.merged_avatars.get(old_room.responsibleId, old_room.responsibleId), is_active=old_room.isActive, is_reservable=old_room.isReservable, max_advance_days=int(old_room.maxAdvanceDays) if getattr( old_room, 'maxAdvanceDays', None) else None) print cformat( '- [%{cyan}{}%{reset}] %{grey!}{:4}%{reset} %{green!}{}%{reset}' ).format(l.name, r.id, r.name) for old_bookable_time in getattr(old_room, '_dailyBookablePeriods', []): r.bookable_hours.append( BookableHours(start_time=old_bookable_time._startTime, end_time=old_bookable_time._endTime)) print cformat(' %{blue!}Bookable:%{reset} {}').format( r.bookable_hours[-1]) for old_nonbookable_date in getattr(old_room, '_nonBookableDates', []): r.nonbookable_periods.append( NonBookablePeriod(start_dt=old_nonbookable_date._startDate, end_dt=old_nonbookable_date._endDate)) print cformat(' %{blue!}Nonbookable:%{reset} {}').format( r.nonbookable_periods[-1]) if self.photo_path: try: with open( os.path.join( self.photo_path, 'large_photos', get_canonical_name_of(old_room) + '.jpg'), 'rb') as f: large_photo = f.read() except Exception: large_photo = None try: with open( os.path.join( self.photo_path, 'small_photos', get_canonical_name_of(old_room) + '.jpg'), 'rb') as f: small_photo = f.read() except Exception: small_photo = None if large_photo and small_photo: r.photo = Photo(data=large_photo, thumbnail=small_photo) print cformat(' %{blue!}Photos') new_eq = [] for old_equipment in ifilter( None, old_room._equipment.split('`') + old_room.avaibleVC): room_eq = l.get_equipment_by_name(old_equipment) new_eq.append(room_eq) r.available_equipment.append(room_eq) if new_eq: print cformat(' %{blue!}Equipment:%{reset} {}').format( ', '.join(sorted(x.name for x in new_eq))) for attr_name, value in getattr(old_room, 'customAtts', {}).iteritems(): value = convert_to_unicode(value) if not value or ('Simba' in attr_name and value == u'Error: unknown mailing list'): continue attr_name = attribute_map.get(attr_name, attr_name).replace(' ', '-').lower() ca = l.get_attribute_by_name(attr_name) if not ca: print cformat( ' %{blue!}Attribute:%{reset} {} %{red!}not found' ).format(attr_name) continue attr = RoomAttributeAssociation() attr.value = value attr.attribute = ca r.attributes.append(attr) print cformat(' %{blue!}Attribute:%{reset} {} = {}').format( attr.attribute.title, attr.value) l.rooms.append(r) db.session.add(l) print db.session.commit()
def migrate_reservations(self): print cformat('%{white!}migrating reservations') i = 1 for rid, v in self.rb_root['Reservations'].iteritems(): room = Room.get(v.room.id) if room is None: print cformat( ' %{red!}skipping resv for dead room {0.room.id}: {0.id} ({0._utcCreatedDT})' ).format(v) continue repeat_frequency, repeat_interval = RepeatMapping.convert_legacy_repeatability( v.repeatability) booked_for_id = getattr(v, 'bookedForId', None) r = Reservation( id=v.id, created_dt=as_utc(v._utcCreatedDT), start_dt=utc_to_local(v._utcStartDT), end_dt=utc_to_local(v._utcEndDT), booked_for_id=self.merged_avatars.get(booked_for_id, booked_for_id) or None, booked_for_name=convert_to_unicode(v.bookedForName), contact_email=convert_to_unicode(v.contactEmail), contact_phone=convert_to_unicode( getattr(v, 'contactPhone', None)), created_by_id=self.merged_avatars.get(v.createdBy, v.createdBy) or None, is_cancelled=v.isCancelled, is_accepted=v.isConfirmed, is_rejected=v.isRejected, booking_reason=convert_to_unicode(v.reason), rejection_reason=convert_to_unicode( getattr(v, 'rejectionReason', None)), repeat_frequency=repeat_frequency, repeat_interval=repeat_interval, uses_vc=getattr(v, 'usesAVC', False), needs_vc_assistance=getattr(v, 'needsAVCSupport', False), needs_assistance=getattr(v, 'needsAssistance', False)) for eq_name in getattr(v, 'useVC', []): eq = room.location.get_equipment_by_name(eq_name) if eq: r.used_equipment.append(eq) occurrence_rejection_reasons = {} if getattr(v, 'resvHistory', None): for h in reversed(v.resvHistory._entries): ts = as_utc(parse_dt_string(h._timestamp)) if len(h._info) == 2: possible_rejection_date, possible_rejection_reason = h._info m = re.match( r'Booking occurrence of the (\d{1,2} \w{3} \d{4}) rejected', possible_rejection_reason) if m: d = datetime.strptime(m.group(1), '%d %b %Y') occurrence_rejection_reasons[ d] = possible_rejection_reason[9:].strip('\'') el = ReservationEditLog(timestamp=ts, user_name=h._responsibleUser, info=map(convert_to_unicode, h._info)) r.edit_logs.append(el) notifications = getattr(v, 'startEndNotification', []) or [] excluded_days = getattr(v, '_excludedDays', []) or [] ReservationOccurrence.create_series_for_reservation(r) for occ in r.occurrences: occ.notification_sent = occ.date in notifications occ.is_rejected = r.is_rejected occ.is_cancelled = r.is_cancelled or occ.date in excluded_days occ.rejection_reason = ( convert_to_unicode(occurrence_rejection_reasons[occ.date]) if occ.date in occurrence_rejection_reasons else None) event_id = getattr(v, '_ReservationBase__owner', None) if hasattr(event_id, '_Impersistant__obj'): # Impersistant object event_id = event_id._Impersistant__obj if event_id is not None: event = self.zodb_root['conferences'].get(event_id) if event: # For some stupid reason there are bookings in the database which have a completely unrelated parent guids = getattr(event, '_Conference__roomBookingGuids', []) if any( int(x.id) == v.id for x in guids if x.id is not None): r.event_id = int(event_id) else: print cformat( ' %{red}event {} does not contain booking {}' ).format(event_id, v.id) print cformat( '- [%{cyan}{}%{reset}/%{green!}{}%{reset}] %{grey!}{}%{reset} {}' ).format(room.location_name, room.name, r.id, r.created_dt.date()) room.reservations.append(r) db.session.add(room) i = (i + 1) % 1000 if not i: db.session.commit() db.session.commit()
def _sanitize(string, html=False): string = convert_to_unicode(string) if not html: string = HTMLParser().unescape(strip_tags(string)) return WHITESPACE_RE.sub(' ', string).strip()