class Connect(SimpleItem): u""" Connect Utility. Fournit les fonctions necessaires pour interagir avec des réunions Adobe Connect. """ implements(IConnect) classProvides( IConnectLayout, IConnectModele, ) security = ClassSecurityInfo() # Parametres généraux url_connexion = FieldProperty(IConnectLayout['url_connexion']) login = FieldProperty(IConnectLayout['login']) password = FieldProperty(IConnectLayout['password']) version = FieldProperty(IConnectLayout['version']) etablissement = FieldProperty(IConnectLayout['etablissement']) num_serveur = FieldProperty(IConnectLayout['num_serveur']) dossiers = FieldProperty(IConnectModele['dossiers']) #debug = [] # Parametres spécifiques session = None def getAttribut(self, param): """ getAttribut renvoit l'attribut param["attribut"]. """ return self.__getattribute__(param["attribut"]) def getConnectProperty(self, key): return getattr(self, "%s" % key) def setProperties(self, form): for key in form.keys(): val = form[key] if key.startswith("activer_"): val = int(val) setattr(self, "%s" % key, val.decode("utf-8")) ### METHODES PUBLIQUES ########################################################### def creerUser(self, params): """Cree un utilisateur connect. Arguments: params -- un dictionnaire contenant les attributs de l'utilisateur. """ #print params # Création du compte Connect de l'utilisateur firstname, lastname = params["fullname"].split(" ", 1) rep = self.requete( { 'action': 'principal-update', 'first-name': firstname, 'last-name': lastname, 'email': params['email'], 'login': params['userid'], 'password': params['password'], 'type': 'user', 'send-email': 'false', 'has-children': '0', }, "principal/[0]") if not rep["error"]: #Ajout de l'utilisateur au groupe "hotes de réunion" supprimé => inutile (les hotes de réunions peuvent créer de nouvelles réunions) # Recherche de l'ID de l'utilisateur idUser = rep["response"].attrib["principal-id"] # Recherche du groupe des hotes de réunions rep = self.requete( { 'action': 'principal-list', 'filter-type': 'live-admins' }, "principal-list/principal/[0]") idGroupe = rep["response"].attrib["principal-id"] # Ajout de l'utilisateur au groupe des hotes de réunions rep = self.requete({ 'action': 'group-membership-update', 'group-id': idGroupe, 'principal-id': idUser, 'is-member': 'true' }) return rep["error"] def creerReunion(self, params): """ creerReunion. """ #print "creerReunion" # Recherche de l'id et du dossier de l'utilisateur idUser = self.rechercherIdUser(params["userid"]) #print "idUser : %s" % str(idUser) idFolder = self.rechercherDossierUser(params["userid"], "user-meetings") #print "idFolder : %s" % str(idFolder) if idUser and idFolder: # Création des infos de la réunion titre = url = None if params["repertoire"] == "Webconference": titre = " ".join([ "Réunion de", params["fullname"], "(%s)" % self.etablissement.encode('utf8') ]) url = params["userid"] if params["repertoire"] == "Sonorisation": titre = " ".join([ "Sonorisation de", params["fullname"], "(%s)" % self.etablissement.encode('utf8') ]) url = params["userid"] + "sonorisation" # Supprime les caracteres non alpha-num url = url.replace("@", "") url = url.replace(".", "") url = url.replace("-", "") url = url.replace("_", "") # Création de la réunion rep = self.requete( { 'action': 'sco-update', 'type': 'meeting', 'name': titre, 'url-path': url, 'folder-id': idFolder, 'source-sco-id': params["modele"], }, "sco/[0]") if not rep["error"]: # Ajout de l'utilisateur comme hôte de la réunion meeting = rep["response"] idReunion = meeting.attrib["sco-id"] urlReunion = self.xpath(meeting, "url-path/[0]").text titleReunion = self.xpath(meeting, "name/[0]").text.encode("utf-8") #print "@@@ AJOUT HOTE" #print idReunion rep = self.requete({ 'action': 'permissions-update', 'principal-id': idUser, 'acl-id': idReunion, 'permission-id': 'host' }) #print rep if not rep["error"]: #print "@@@ NOUVELLE REUNION" #print {"id" : idReunion, "title" : titleReunion, "url" : self.urlAbsolu(urlReunion) } # Réunion publique rep = self.requete({ 'action': 'permissions-update', 'acl-id': idReunion, 'principal-id': 'public-access', 'permission-id': 'view-hidden' }) if not rep["error"]: return { "id": idReunion, "title": titleReunion, "url": self.urlAbsolu(urlReunion) } else: raise Invalid(u"Connect creerReunion : %s " % str(rep["error"])) return None def majPasswordUser(self, params): """ majPasswordUser. """ #print "majPasswordUser" userid = self.rechercherIdUser(params["userid"]) return self.requete( { 'action': 'user-update-pwd', 'user-id': userid, 'password': params["password"], 'password-verify': params["password"] }, 'status/code') def rechercherReunions(self, params): """ rechercherReunions. """ print "@@@ RECHERCHER REUNIONS" print params # Vérification des paramètres if "login" in params: login = params["login"] #pwd = params["pass"] # Connexion admin if self.connexion(): # Recherche du dossier de l'utilisateur idFolder = self.rechercherDossierUser(login, "user-meetings") if not idFolder: return None # Recherche des réunions dans le dossier de l'utilisateur meetings = self.requete( { 'action': 'sco-contents', 'sco-id': idFolder, 'filter-type': 'meeting' }, 'scos/sco')["response"] modeles = {} for meeting in meetings: # Recherche du modèle modele = meeting.attrib["source-sco-id"] if not modele in modeles: modeles[modele] = [] url = self.xpath(meeting, "url-path/[0]").text modeles[modele].append({ "id": meeting.attrib["sco-id"], "title": self.xpath(meeting, "name/[0]").text.encode("utf-8"), "url": self.urlAbsolu(url), }) #print "@@@ REUNION %s" % str({"id": meeting.attrib["sco-id"], "title": self.xpath(meeting, "name/[0]").text.encode("utf-8"),"url": self.urlAbsolu(url),}) #print "@@@ REUNIONS" if params['modele'] in modeles: #print modeles[params['modele']] return modeles[params['modele']] else: #print modeles return None return None def rechercherEnregistrements(self, params): """ rechercherEnregistrements.""" logger = logging.getLogger("Jalon.Connect[rechercherEnregistrements]") rep = self.requete( { 'action': 'sco-contents', 'sco-id': params['id'], 'filter-icon': 'archive', 'sort-date-created': 'desc' }, "scos/sco") if not rep["error"]: e = [] #logger.info("--Reponse de Connect : %s" % rep["response"]) version = int(self.version) #logger.info("--CONNECT version=%s" % version) for enregistrement in rep["response"]: url = self.xpath(enregistrement, "url-path/[0]").text #logger.info(url) # Conversion de la durée if version < 9: duree = self.xpath(enregistrement, "duration/[0]") if not duree == None: duree = duree.text else: duree = enregistrement.attrib["duration"] # Exclut les enregistrements sans durée (en cours) if duree: idEnregistrement = enregistrement.attrib["sco-id"] if version < 9: duree_f = self.convertirDuree(duree) else: duree_f = strftime('%Hh%Mm%Ss', gmtime(int(duree))) # Enregistrement public rep = self.requete({ 'action': 'permissions-update', 'acl-id': idEnregistrement, 'principal-id': 'public-access', 'permission-id': 'view' }) # Conversion de la date de création created = self.xpath(enregistrement, "date-created/[0]").text created_f = self.convertirDate(created) created_us = self.convertirDate(created, us=True) e.append({ "id": "%s-s%s" % (idEnregistrement, self.num_serveur), "title": self.xpath(enregistrement, "name/[0]").text.encode("utf-8"), "url": self.urlAbsolu(url), "created": created_f, "dateUS": created_us, "duration": duree_f, }) return e # Quand la ressource "sco-id demandee n'existe pas, connect8 peut renvoyer <status code="no-access" subcode="denied"/> logger.error("reunion : %s -- retour : %s " % (params['id'], rep["error"])) return [] def genererSessionUser(self, params): """ genererSessionUser. """ #logger = logging.getLogger("Jalon.Connect[genererSessionUser]") url = "%s?action=login&login=%s&password=%s" % ( self.url_connexion, params["userid"], params["password"]) req = urllib2.Request(url) handle = urllib2.urlopen(req) cookie = handle.info()["Set-Cookie"] tab = cookie.split(";") return tab[0].split("=")[1] ### METHODES PRIVEES ########################################################### #security.declarePrivate('connexion') def connexion(self, param=None): """ connexion. """ #logger = logging.getLogger("Jalon.Connect[connexion]") # Ré-init de la session self.session = None # Recherche du numéro de session rep = self.requete({"action": "common-info"}) if not rep["error"]: # Connection admin rep = self.requete({ 'action': 'login', 'login': self.login, 'password': self.password }) return not rep["error"] # Si erreur raise Invalid(u"Connexion Connect : %s " % str(rep)) security.declarePrivate('rechercherIdUser') def rechercherIdUser(self, loginUser): """ rechercherIdUser. """ rep = self.requete( { 'action': 'principal-list', 'filter-login': loginUser }, "principal-list/principal/[0]") if not rep["error"]: return rep["response"].attrib["principal-id"] return None security.declarePrivate('rechercherShortcuts') def rechercherShortcuts(self, typesShortcuts): """ rechercherShortcuts. """ # Recherche des raccourcis des dossiers shortcuts = self.requete({'action': 'sco-shortcuts'}, "shortcuts/sco") s = [] for typeShortcut in typesShortcuts: try: for shortcut in shortcuts["response"]: # Recherche du dossier des reunions if shortcut.attrib["type"] == typeShortcut: s.append(shortcut.attrib["sco-id"]) except: raise Invalid(u"shortcuts : %s" % str(shortcuts)) return s security.declarePrivate('rechercherShortcuts') def rechercherDossierUser(self, login, shortcut): """ rechercherDossierUser. """ # Recherche du raccourci idShortcut = self.rechercherShortcuts([shortcut])[0] # Recherche du dossier de l'utilisateur folder = self.requete( { 'action': 'sco-contents', 'sco-id': idShortcut, 'filter-name': login }, "scos/sco/[0]")["response"] if folder is not None: return folder.attrib["sco-id"] return None security.declarePrivate('supprimerEnregistrement') def supprimerEnregistrement(self, params): """supprimerEnregistrement. """ #print params["idEnregistrement"] rep = self.requete({ 'action': 'sco-delete', 'sco-id': params["idEnregistrement"] }) return not rep["error"] ### UTILITAIRES ########################################################### security.declarePrivate('requete') def requete(self, params, xpath=None): """ requete. """ #logger = logging.getLogger("Jalon.Connect[requete]") # Ajout du n° de session aux params if self.session is not None: params['session'] = self.session #logger.info("--session = %s" % params['session']) # Requete au serveur data = urllib.urlencode(params) req = urllib2.Request("%s?%s" % (self.url_connexion, data)) #logger.info("REQUETE : %s?%s" % (self.url_connexion, data)) try: handle = urllib2.urlopen(req) rep = handle.read() except: # Erreur HTTP raise Invalid(u"HTTP ; response : None ; error : %s" % str(sys.exc_info())) #logger.info("REPONSE : %s" % rep) # Conversion XML try: xml = XML(rep) except: # Erreur XML raise Invalid(u"XML ; response : None ; error : %s" % str(sys.exc_info())) # Analyse de la réponse error = self.chercherErreur(xml) if not error: #print tostring(xml) self.analyserReponse(params, xml) # Extraction si necessaire if xpath: xml = self.xpath(xml, xpath) if xml is None: error = "absence de resultat xpath" # Pas d'erreur, renvoie réponse return {"response": xml, "error": error} elif "error" in error: return {"response": xml, "error": error} else: # Si erreur, renvoie réponse+erreur raise Invalid(u"Erreur Connect; response : %s ; error : %s" % (rep, str(sys.exc_info()))) security.declarePrivate('chercherErreur') def chercherErreur(self, xml): """ chercherErreur. """ logger = logging.getLogger("Jalon.Connect[chercherErreur]") # Recherche du statut statut = self.xpath(xml, "status/@code") #logger.info("-- STATUT : %s" % statut) if statut == "invalid": field = self.xpath(xml, "status/invalid/@field") subcode = self.xpath(xml, "status/invalid/@subcode") logger.error("ERREUR Connect / %s : %s" % (field, subcode)) return "%s : %s" % (field, subcode) elif statut == "no-access": subcode = self.xpath(xml, "status/@subcode") logger.error("ERREUR Connect / %s : %s" % (statut, subcode)) return {"error": "%s : %s" % (statut, subcode)} else: return None security.declarePrivate('analyserReponse') def analyserReponse(self, params, xml): """ analyserReponse. """ # Recherche du n° de session if not self.session and params['action'] == 'common-info': self.session = self.xpath(xml, "common/cookie/text()") #print "--- SESSION : %s" % self.session security.declarePrivate('xpath') def xpath(self, xml, xpath): """ xpath. """ # Conversion en liste xpaths = xpath.split('/') xpathparent = '/'.join(xpaths[0:-1]) # Recherche d'un attribut ( .../@attr ) if re.match(r"^@.*$", xpaths[-1]): parents = xml.findall(xpathparent) if len(parents) > 0: key = xpaths[-1][1:] if key in parents[0].attrib: return parents[0].attrib[key] # Recherche du contenu ( .../text() ) elif xpaths[-1] == "text()": parents = xml.findall(xpathparent) if len(parents) > 0: return parents[0].text # Recherche du n-ieme node ( .../[n] ) elif re.match(r"^\[.*\]", xpaths[-1]): parents = xml.findall(xpathparent) if len(parents) > 0: return parents[int(xpaths[-1][1:-1])] # Recherche d'une liste de nodes else: return xml.findall(xpath) return None security.declarePrivate('urlAbsolu') def urlAbsolu(self, url, session=None): """ urlAbsolu. """ o = urlparse(self.url_connexion) #return "%s://%s%s%s" % (o.scheme, o.netloc, url, "?session=%s" % self.session if session else "") return "%s://%s%s%s" % (o.scheme, o.netloc, url, "?session=%s" % session if session else "") # A faire : generer session user security.declarePrivate('convertirDate') def convertirDate(self, d, us=False): """ convertirDate. """ if not us: return DateTime(d).strftime("%d.%m.%Y - %Hh%M") else: return DateTime(d).strftime("%Y-%m") security.declarePrivate('convertirDuree') def convertirDuree(self, d): """ convertirDuree. """ if d: m = re.match(r"(\d{2}):(\d{2}):(\d{2})\.\d{3}", d) return "%sh%sm%ss" % m.groups() else: return "-"
class Dict(Item, Plugin): """ Dictionary functionality. Provides commands for performing tasks such as defining words and checking the spelling of a word. """ classProvides(IPlugin, IEridanusPluginProvider) typeName = 'eridanus_plugins_dict' dummy = integer() def formatResults(self, results): """ Format dictionary definition results. """ formatted = (u'\002%s\002: %s' % (db, defn) for db, defn in results) return u' '.join(formatted) def suggest(self, word, language): """ Suggest spellings for a word in a specific language. """ suggestions = dict.spell(word, language) if suggestions is None: msg = u'"%s" is spelled correctly.' % (word, ) else: msg = u'Suggestions: ' + u', '.join(suggestions) return msg @usage(u'dicts') def cmd_dicts(self, source): """ List available dictionaries. """ def gotDicts(dicts): descs = (u'\002%s\002: %s' % (db, desc) for db, desc in dicts) source.reply(u' '.join(descs)) return dict.getDicts().addCallback(gotDicts) @rest @usage(u'define <word>') def cmd_define(self, source, word): """ Define a word from a dictionary. All available dictionaries are consulted, in order to only look up a word in a specific dictionary see the "definefor" command. """ return dict.define(word, None).addCallback( self.formatResults).addCallback(source.reply) @rest @usage(u'definefor <database> <word>') def cmd_definefor(self, source, database, word): """ Define a word for a specific dictionary. Look <word> up in <database>, if <database> is not specified then all available dictionaries are consulted. """ return dict.define(word, database).addCallback( self.formatResults).addCallback(source.reply) @rest @usage(u'spell <word>') def cmd_spell(self, source, word): """ Check the spelling of a word in English (UK). If <word> is spelt incorrectly, a list of suggestions are given. Checking the spelling of a word in a specific language can be done with the "spellfor" command. """ source.reply(self.suggest(word, u'en_GB')) @rest @usage(u'spell <language> <word>') def cmd_spellfor(self, source, language, word): """ Check the spelling of a word in a specific language. If <word> is spelt incorrectly, a list of suggestions are given. """ source.reply(self.suggest(word, language))
class ManifestImporterSection(object): classProvides(ISectionBlueprint) implements(ISection) def __init__(self, transmogrifier, name, options, previous): self.previous = previous self.context = transmogrifier.context self.pathkey = defaultMatcher(options, 'path-key', name, 'path') self.fileskey = defaultMatcher(options, 'files-key', name, 'files') self.typekey = options.get('type-key', '_type').strip() self.enable_source_behaviour = options.get( 'enable-source-behaviour', 'true') == 'true' and True or False # communication with logger self.anno = IAnnotations(transmogrifier) self.storage = self.anno.setdefault(VALIDATIONKEY, []) # we need this dictionary to store manifest data, because reader section # uses recursion when walking through content folders self.manifests = {} # The reader section spits out manifests in filesystem order, # we need to emit them in manifest order. self.buffer = {} extractor = self.iterExtractingManifests(previous) self.it = IteratorWithLookahead(extractor) def __iter__(self): item = None folder_path = None while True: if item: yield item manifest = self.manifests.get(folder_path, {}) for id_ in manifest.keys(): self.bufferTo(folder_path, id_, manifest) item = self.buffer.pop(id_, None) if item is None: if folder_path == '': path = id_ else: path = '/'.join([folder_path, id_]) self.storage.append(path) item = {pathkey: path} item[self.typekey] = manifest[id_] yield item manifest = {} # consume any remaining unlisted entries of this folder self.bufferTo(folder_path, None, manifest) if self.it.lookahead is None: break item = self.it.lookahead pathkey = self.pathkey(*item.keys())[0] path = item[pathkey] folder_path, item_id = os.path.split(path) # cleanup if VALIDATIONKEY in self.anno: del self.anno[VALIDATIONKEY] def iterExtractingManifests(self, previous): for item in previous: pathkey = self.pathkey(*item.keys())[0] fileskey = self.fileskey(*item.keys())[0] if pathkey in item and fileskey in item and 'manifest' in item[ fileskey]: path = item[pathkey] data = item[fileskey]['manifest']['data'] doc = minidom.parseString(data) objects = OrderedDict() for record in doc.getElementsByTagName('record'): type_ = str(record.getAttribute('type')) object_id = str(record.firstChild.nodeValue.strip()) objects[object_id] = type_ self.manifests[path] = objects yield item def bufferTo(self, folder_path, id_, manifest): self.consumeMissingPaths() while self.it.lookahead is not None and id_ not in self.buffer: item = self.it.lookahead pathkey = self.pathkey(*item.keys())[0] path = item[pathkey] parent, item_id = os.path.split(path) if folder_path != parent: break self.it.next() if item_id not in manifest: self.consumeMissingPaths() continue self.buffer[item_id] = item def consumeMissingPaths(self): while self.it.lookahead is not None: item = self.it.lookahead pathkey = self.pathkey(*item.keys())[0] if pathkey: break self.it.next()
class BranchUpgradeJob(BranchJobDerived): """A Job that upgrades branches to the current stable format.""" implements(IBranchUpgradeJob) classProvides(IBranchUpgradeJobSource) class_job_type = BranchJobType.UPGRADE_BRANCH user_error_types = (NotBranchError, ) task_queue = 'branch_write_job' config = config.IBranchUpgradeJobSource def getOperationDescription(self): return 'upgrading a branch' @classmethod def create(cls, branch, requester): """See `IBranchUpgradeJobSource`.""" branch.checkUpgrade() branch_job = BranchJob(branch, cls.class_job_type, {}, requester=requester) return cls(branch_job) def run(self, _check_transaction=False): """See `IBranchUpgradeJob`.""" # Set up the new branch structure with server(get_rw_server(), no_replace=True): upgrade_branch_path = tempfile.mkdtemp() try: upgrade_transport = get_transport(upgrade_branch_path) upgrade_transport.mkdir('.bzr') source_branch_transport = get_transport( self.branch.getInternalBzrUrl()) source_branch_transport.clone('.bzr').copy_tree_to_transport( upgrade_transport.clone('.bzr')) transaction.commit() upgrade_branch = BzrBranch.open_from_transport( upgrade_transport) # No transactions are open so the DB connection won't be # killed. with TransactionFreeOperation(): # Perform the upgrade. upgrade(upgrade_branch.base) # Re-open the branch, since its format has changed. upgrade_branch = BzrBranch.open_from_transport( upgrade_transport) source_branch = BzrBranch.open_from_transport( source_branch_transport) source_branch.lock_write() upgrade_branch.pull(source_branch) upgrade_branch.fetch(source_branch) source_branch.unlock() # Move the branch in the old format to backup.bzr try: source_branch_transport.delete_tree('backup.bzr') except NoSuchFile: pass source_branch_transport.rename('.bzr', 'backup.bzr') source_branch_transport.mkdir('.bzr') upgrade_transport.clone('.bzr').copy_tree_to_transport( source_branch_transport.clone('.bzr')) # Re-open the source branch again. source_branch = BzrBranch.open_from_transport( source_branch_transport) formats = get_branch_formats(source_branch) self.branch.branchChanged(self.branch.stacked_on, self.branch.last_scanned_id, *formats) finally: shutil.rmtree(upgrade_branch_path)
class TinyMCE(SimpleItem): """TinyMCE Utility""" implements(ITinyMCE) classProvides( ITinyMCELayout, ITinyMCEToolbar, ITinyMCELibraries, ITinyMCEResourceTypes ) security = ClassSecurityInfo() resizing = FieldProperty(ITinyMCELayout['resizing']) autoresize = FieldProperty(ITinyMCELayout['autoresize']) editor_width = FieldProperty(ITinyMCELayout['editor_width']) editor_height = FieldProperty(ITinyMCELayout['editor_height']) contextmenu = FieldProperty(ITinyMCELayout['contextmenu']) content_css = FieldProperty(ITinyMCELayout['content_css']) styles = FieldProperty(ITinyMCELayout['styles']) formats = FieldProperty(ITinyMCELayout['formats']) tablestyles = FieldProperty(ITinyMCELayout['tablestyles']) toolbar_width = FieldProperty(ITinyMCEToolbar['toolbar_width']) toolbar_external = FieldProperty(ITinyMCEToolbar['toolbar_external']) toolbar_save = FieldProperty(ITinyMCEToolbar['toolbar_save']) toolbar_cut = FieldProperty(ITinyMCEToolbar['toolbar_cut']) toolbar_copy = FieldProperty(ITinyMCEToolbar['toolbar_copy']) toolbar_paste = FieldProperty(ITinyMCEToolbar['toolbar_paste']) toolbar_pastetext = FieldProperty(ITinyMCEToolbar['toolbar_pastetext']) toolbar_pasteword = FieldProperty(ITinyMCEToolbar['toolbar_pasteword']) toolbar_undo = FieldProperty(ITinyMCEToolbar['toolbar_undo']) toolbar_redo = FieldProperty(ITinyMCEToolbar['toolbar_redo']) toolbar_search = FieldProperty(ITinyMCEToolbar['toolbar_search']) toolbar_replace = FieldProperty(ITinyMCEToolbar['toolbar_replace']) toolbar_style = FieldProperty(ITinyMCEToolbar['toolbar_style']) toolbar_bold = FieldProperty(ITinyMCEToolbar['toolbar_bold']) toolbar_italic = FieldProperty(ITinyMCEToolbar['toolbar_italic']) toolbar_underline = FieldProperty(ITinyMCEToolbar['toolbar_underline']) toolbar_strikethrough = FieldProperty(ITinyMCEToolbar['toolbar_strikethrough']) toolbar_sub = FieldProperty(ITinyMCEToolbar['toolbar_sub']) toolbar_sup = FieldProperty(ITinyMCEToolbar['toolbar_sup']) toolbar_forecolor = FieldProperty(ITinyMCEToolbar['toolbar_forecolor']) toolbar_backcolor = FieldProperty(ITinyMCEToolbar['toolbar_backcolor']) toolbar_justifyleft = FieldProperty(ITinyMCEToolbar['toolbar_justifyleft']) toolbar_justifycenter = FieldProperty(ITinyMCEToolbar['toolbar_justifycenter']) toolbar_justifyright = FieldProperty(ITinyMCEToolbar['toolbar_justifyright']) toolbar_justifyfull = FieldProperty(ITinyMCEToolbar['toolbar_justifyfull']) toolbar_bullist = FieldProperty(ITinyMCEToolbar['toolbar_bullist']) toolbar_numlist = FieldProperty(ITinyMCEToolbar['toolbar_numlist']) toolbar_outdent = FieldProperty(ITinyMCEToolbar['toolbar_outdent']) toolbar_indent = FieldProperty(ITinyMCEToolbar['toolbar_indent']) toolbar_tablecontrols = FieldProperty(ITinyMCEToolbar['toolbar_tablecontrols']) toolbar_link = FieldProperty(ITinyMCEToolbar['toolbar_link']) toolbar_unlink = FieldProperty(ITinyMCEToolbar['toolbar_unlink']) toolbar_anchor = FieldProperty(ITinyMCEToolbar['toolbar_anchor']) toolbar_image = FieldProperty(ITinyMCEToolbar['toolbar_image']) toolbar_media = FieldProperty(ITinyMCEToolbar['toolbar_media']) toolbar_charmap = FieldProperty(ITinyMCEToolbar['toolbar_charmap']) toolbar_hr = FieldProperty(ITinyMCEToolbar['toolbar_hr']) toolbar_advhr = FieldProperty(ITinyMCEToolbar['toolbar_advhr']) toolbar_insertdate = FieldProperty(ITinyMCEToolbar['toolbar_insertdate']) toolbar_inserttime = FieldProperty(ITinyMCEToolbar['toolbar_inserttime']) toolbar_emotions = FieldProperty(ITinyMCEToolbar['toolbar_emotions']) toolbar_nonbreaking = FieldProperty(ITinyMCEToolbar['toolbar_nonbreaking']) toolbar_pagebreak = FieldProperty(ITinyMCEToolbar['toolbar_pagebreak']) toolbar_print = FieldProperty(ITinyMCEToolbar['toolbar_print']) toolbar_preview = FieldProperty(ITinyMCEToolbar['toolbar_preview']) toolbar_spellchecker = FieldProperty(ITinyMCEToolbar['toolbar_spellchecker']) toolbar_removeformat = FieldProperty(ITinyMCEToolbar['toolbar_removeformat']) toolbar_cleanup = FieldProperty(ITinyMCEToolbar['toolbar_cleanup']) toolbar_visualaid = FieldProperty(ITinyMCEToolbar['toolbar_visualaid']) toolbar_visualchars = FieldProperty(ITinyMCEToolbar['toolbar_visualchars']) toolbar_attribs = FieldProperty(ITinyMCEToolbar['toolbar_attribs']) toolbar_code = FieldProperty(ITinyMCEToolbar['toolbar_code']) toolbar_fullscreen = FieldProperty(ITinyMCEToolbar['toolbar_fullscreen']) customtoolbarbuttons = FieldProperty(ITinyMCEToolbar['customtoolbarbuttons']) libraries_spellchecker_choice = FieldProperty(ITinyMCELibraries['libraries_spellchecker_choice']) libraries_atd_show_types = FieldProperty(ITinyMCELibraries['libraries_atd_show_types']) libraries_atd_ignore_strings = FieldProperty(ITinyMCELibraries['libraries_atd_ignore_strings']) libraries_atd_service_url = FieldProperty(ITinyMCELibraries['libraries_atd_service_url']) link_using_uids = FieldProperty(ITinyMCEResourceTypes['link_using_uids']) allow_captioned_images = FieldProperty(ITinyMCEResourceTypes['allow_captioned_images']) rooted = FieldProperty(ITinyMCEResourceTypes['rooted']) containsobjects = FieldProperty(ITinyMCEResourceTypes['containsobjects']) containsanchors = FieldProperty(ITinyMCEResourceTypes['containsanchors']) linkable = FieldProperty(ITinyMCEResourceTypes['linkable']) imageobjects = FieldProperty(ITinyMCEResourceTypes['imageobjects']) plugins = FieldProperty(ITinyMCEResourceTypes['plugins']) customplugins = FieldProperty(ITinyMCEResourceTypes['customplugins']) link_shortcuts = FieldProperty(ITinyMCEContentBrowser['link_shortcuts']) image_shortcuts = FieldProperty(ITinyMCEContentBrowser['image_shortcuts']) num_of_thumb_columns = FieldProperty(ITinyMCEContentBrowser['num_of_thumb_columns']) thumbnail_size = FieldProperty(ITinyMCEContentBrowser['thumbnail_size']) def getImageScales(self, field=None, context=None): """Return the image sizes for the drawer""" if field is None: from Products.ATContentTypes.content.image import ATImage field = ATImage.schema['image'] # in Archetypes 1.5.x ImageField doesn't actually provide IImageField o.O if not isinstance(field, ImageField) and not implementedOrProvidedBy(IImageField, field): raise TypeError("Can't retrieve image scale info for non-image field.") field_name = field.getName() sizes = field.getAvailableSizes(field) # Extract image dimensions from context. if context is not None: width, height = context.getField(field_name).getSize(context) else: width, height = 0, 0 scales = [{'value': '@@images/%s/%s' % (field_name, key), 'size': [value[0], value[1]], 'title': key.capitalize()} for key, value in sizes.items()] scales.sort(key=lambda x: x['size'][0]) scales.insert(0, {'value': '', 'title': _(u'Original'), 'size': [width, height]}) return scales security.declarePrivate('getEnabledButtons') def getEnabledButtons(self, context): buttons = [] # Get enabled buttons from control panel if self.toolbar_save: if getattr(aq_base(context), 'checkCreationFlag', None): if not context.checkCreationFlag(): buttons.append('save') if self.toolbar_cut: buttons.append('cut') if self.toolbar_copy: buttons.append('copy') if self.toolbar_paste: buttons.append('paste') if self.toolbar_pastetext: buttons.append('pastetext') if self.toolbar_pasteword: buttons.append('pasteword') if self.toolbar_undo: buttons.append('undo') if self.toolbar_redo: buttons.append('redo') if self.toolbar_search: buttons.append('search') if self.toolbar_replace: buttons.append('replace') if self.toolbar_style: buttons.append('style') if self.toolbar_bold: buttons.append('bold') if self.toolbar_italic: buttons.append('italic') if self.toolbar_underline: buttons.append('underline') if self.toolbar_strikethrough: buttons.append('strikethrough') if self.toolbar_sub: buttons.append('sub') if self.toolbar_sup: buttons.append('sup') if self.toolbar_forecolor: buttons.append('forecolor') if self.toolbar_backcolor: buttons.append('backcolor') if self.toolbar_justifyleft: buttons.append('justifyleft') if self.toolbar_justifycenter: buttons.append('justifycenter') if self.toolbar_justifyright: buttons.append('justifyright') if self.toolbar_justifyfull: buttons.append('justifyfull') if self.toolbar_bullist: buttons.append('bullist') if self.toolbar_numlist: buttons.append('numlist') if self.toolbar_definitionlist: buttons.append('definitionlist') if self.toolbar_outdent: buttons.append('outdent') if self.toolbar_indent: buttons.append('indent') if self.toolbar_image: buttons.append('image') if self.toolbar_media: buttons.append('media') if self.toolbar_link: buttons.append('link') if self.toolbar_unlink: buttons.append('unlink') if self.toolbar_anchor: buttons.append('anchor') if self.toolbar_tablecontrols: buttons.append('tablecontrols') if self.toolbar_charmap: buttons.append('charmap') if self.toolbar_hr: buttons.append('hr') if self.toolbar_advhr: buttons.append('advhr') if self.toolbar_insertdate: buttons.append('insertdate') if self.toolbar_inserttime: buttons.append('inserttime') if self.toolbar_emotions: buttons.append('emotions') if self.toolbar_nonbreaking: buttons.append('nonbreaking') if self.toolbar_pagebreak: buttons.append('pagebreak') if self.toolbar_print: buttons.append('print') if self.toolbar_preview: buttons.append('preview') if self.toolbar_spellchecker: buttons.append(self.libraries_spellchecker_choice) if self.toolbar_removeformat: buttons.append('removeformat') if self.toolbar_cleanup: buttons.append('cleanup') if self.toolbar_visualaid: buttons.append('visualaid') if self.toolbar_visualchars: buttons.append('visualchars') if self.toolbar_attribs: buttons.append('attribs') if self.toolbar_code: buttons.append('code') if self.toolbar_fullscreen: buttons.append('fullscreen') if self.customtoolbarbuttons is not None: buttons.extend(self.customtoolbarbuttons.split('\n')) # Return the buttons return buttons security.declarePrivate('translateButtonsFromKupu') def translateButtonsFromKupu(self, context, buttons): """Given a set of buttons in Kupu, translate them to a set for TinyMCE toolbar """ return_buttons = [] for button in buttons: if button == 'save-button': try: if not context.checkCreationFlag(): return_buttons.append('save') except AttributeError: pass elif button == 'bg-basicmarkup': pass elif button == 'bold-button': return_buttons.append('bold') elif button == 'italic-button': return_buttons.append('italic') elif button == 'bg-supsuper-button': pass elif button == 'subscript': return_buttons.append('sub') elif button == 'supscript': return_buttons.append('sup') elif button == 'bg-colorchooser': pass elif button == 'forecolor-button': return_buttons.append('forecolor') elif button == 'hilitecolor-button': return_buttons.append('backcolor') elif button == 'bg-justify': pass elif button == 'justifyleft-button': return_buttons.append('justifyleft') elif button == 'justifycenter-button': return_buttons.append('justifycenter') elif button == 'justifyright-button': return_buttons.append('justifyright') elif button == 'bg-list': pass elif button == 'list-ol-addbutton': return_buttons.append('numlist') elif button == 'list-ul-addbutton': return_buttons.append('bullist') elif button == 'definitionlist': pass elif button == 'bg-indent': pass elif button == 'outdent-button': return_buttons.append('outdent') elif button == 'indent-button': return_buttons.append('indent') elif button == 'bg-drawers': pass elif button == 'imagelibdrawer-button': return_buttons.append('image') elif button == 'linklibdrawer-button' or button == 'linkdrawer-button' or button == 'anchors-button': if 'link' not in return_buttons: return_buttons.append('link') elif button == 'embed-tab': return_buttons.append('media') elif button == 'manage-anchors-tab': return_buttons.append('anchor') elif button == 'toc-tab': pass elif button == 'tabledrawer-button': return_buttons.append('tablecontrols') elif button == 'bg-remove': pass elif button == 'removeimage-button': pass elif button == 'removelink-button': return_buttons.append('unlink') elif button == 'bg-undo': pass elif button == 'undo-button': return_buttons.append('undo') elif button == 'redo-button': return_buttons.append('redo') elif button == 'spellchecker': return_buttons.append('iespell') elif button == 'source': return_buttons.append('code') elif button == 'styles' or button == 'ulstyles' or button == 'olstyles': if 'style' not in return_buttons: return_buttons.append('style') elif button == 'zoom': return_buttons.append('fullscreen') else: if button not in return_buttons: return_buttons.append(button) return return_buttons security.declarePrivate('getValidElements') def getValidElements(self): """Return valid (X)HTML elements and their attributes that can be used within TinyMCE """ XHTML_TAGS = set( 'a abbr acronym address area b base bdo big blockquote body br ' 'button caption cite code col colgroup dd del div dfn dl dt em ' 'fieldset form h1 h2 h3 h4 h5 h6 head hr html i img input ins kbd ' 'label legend li link map meta noscript object ol optgroup option ' 'p param pre q samp script select small span strong style sub sup ' 'table tbody td textarea tfoot th thead title tr tt ul var'.split()) CORE_ATTRS = set( 'id style title class'.split()) I18N_ATTRS = set( 'lang dir'.split()) FOCUS_ATTRS = set( 'accesskey tabindex'.split()) COMMON_ATTRS = CORE_ATTRS | I18N_ATTRS valid_elements = { 'a': COMMON_ATTRS | FOCUS_ATTRS | set('charset type name href hreflang rel rev shape coords target'.split()), 'abbr': COMMON_ATTRS.copy(), 'acronym': COMMON_ATTRS.copy(), 'address': COMMON_ATTRS.copy(), 'applet': CORE_ATTRS | set('codebase archive code object alt name width height align hspace vspace'.split()), 'area': COMMON_ATTRS | FOCUS_ATTRS | set('shape coords href nohref alt target'.split()), 'b': COMMON_ATTRS.copy(), 'base': set('id href target'.split()), 'bdo': CORE_ATTRS | set('lang dir'.split()), 'big': COMMON_ATTRS.copy(), 'blockquote': COMMON_ATTRS | set('cite'.split()), 'body': COMMON_ATTRS | set('background bgcolor text link vlink alink'.split()), 'br': CORE_ATTRS | set('clear'.split()), 'button': COMMON_ATTRS | FOCUS_ATTRS | set('name value type disabled'.split()), 'caption': COMMON_ATTRS | set('align'.split()), 'center': COMMON_ATTRS.copy(), 'cite': COMMON_ATTRS.copy(), 'code': COMMON_ATTRS.copy(), 'col': COMMON_ATTRS | set('span width align char charoff valign'.split()), 'colgroup': COMMON_ATTRS | set('span width align char charoff valign'.split()), 'dd': COMMON_ATTRS.copy(), 'del': COMMON_ATTRS | set('cite datetime'.split()), 'dfn': COMMON_ATTRS.copy(), 'div': COMMON_ATTRS | set('align'.split()), 'dl': COMMON_ATTRS | set('compact'.split()), 'dt': COMMON_ATTRS.copy(), 'em': COMMON_ATTRS.copy(), 'embed': '*', 'fieldset': COMMON_ATTRS.copy(), 'form': COMMON_ATTRS | set('action method name enctype accept accept-charset target'.split()), 'h1': COMMON_ATTRS | set('align'.split()), 'h2': COMMON_ATTRS | set('align'.split()), 'h3': COMMON_ATTRS | set('align'.split()), 'h4': COMMON_ATTRS | set('align'.split()), 'h5': COMMON_ATTRS | set('align'.split()), 'h6': COMMON_ATTRS | set('align'.split()), 'head': I18N_ATTRS | set('id profile'.split()), 'hr': COMMON_ATTRS | set('align noshade size width'.split()), 'html': I18N_ATTRS | set('id xmlns'.split()), 'i': COMMON_ATTRS.copy(), 'img': COMMON_ATTRS | set('src alt name longdesc height width usemap ismap<ismap align border hspace vspace'.split()), 'input': COMMON_ATTRS | FOCUS_ATTRS | set('type name value checked disabled readonly size maxlength src alt usemap accept align'.split()), 'ins': COMMON_ATTRS | set('cite datetime'.split()), 'kbd': COMMON_ATTRS.copy(), 'label': COMMON_ATTRS | FOCUS_ATTRS | set('for'.split()), 'legend': COMMON_ATTRS | set('accesskey align'.split()), 'li': COMMON_ATTRS | set('type'.split()), 'link': COMMON_ATTRS | set('charset href hreflang type rel rev media target'.split()), 'map': I18N_ATTRS | set('id title name class'.split()), 'meta': I18N_ATTRS | set('id http-equiv name content scheme'.split()), 'noscript': COMMON_ATTRS.copy(), 'object': COMMON_ATTRS | set('declare classid codebase data type codetype archive standby height width usemap name tabindex align border hspace vspace'.split()), 'ol': COMMON_ATTRS | set('compact type'.split()), 'optgroup': COMMON_ATTRS | set('disabled label'.split()), 'option': COMMON_ATTRS | set('selected disabled label value'.split()), 'p': COMMON_ATTRS | set('align'.split()), 'param': set('id name value valuetype type'.split()), 'pre': COMMON_ATTRS | set('width'.split()), 'q': COMMON_ATTRS | set('cite'.split()), 'samp': COMMON_ATTRS.copy(), 'script': set('id charset type language src defer'.split()), 'select': COMMON_ATTRS | FOCUS_ATTRS | set('type name value checked disabled readonly size maxlength src alt usemap accept align multiple'.split()), 'small': COMMON_ATTRS.copy(), 'span': COMMON_ATTRS.copy(), 'strong': COMMON_ATTRS.copy(), 'style': I18N_ATTRS | set('id type media title'.split()), 'sub': COMMON_ATTRS.copy(), 'sup': COMMON_ATTRS.copy(), 'table': COMMON_ATTRS | set('summary width border frame rules cellspacing cellpadding align bgcolor'.split()), 'tbody': COMMON_ATTRS | set('align char charoff valign'.split()), 'td': COMMON_ATTRS | set('align char charoff valign bgcolor abbr axis headers scope rowspan colspan nowrap width height'.split()), 'textarea': COMMON_ATTRS | FOCUS_ATTRS | set('name rows cols disabled readonly'.split()), 'tfoot': COMMON_ATTRS | set('align char charoff valign'.split()), 'th': COMMON_ATTRS | set('align char charoff valign bgcolor abbr axis headers scope rowspan colspan nowrap width height'.split()), 'thead': COMMON_ATTRS | set('align char charoff valign'.split()), 'title': I18N_ATTRS | set('id'.split()), 'tr': COMMON_ATTRS | set('align char charoff valign bgcolor'.split()), 'tt': COMMON_ATTRS.copy(), 'ul': COMMON_ATTRS | set('compact type'.split()), 'var': COMMON_ATTRS.copy(), 'iframe': COMMON_ATTRS | set('src name scrolling frameborder longdesc align height width marginheight marginwidth'.split()) } # Get safe html transform safe_html = getattr(getToolByName(self, 'portal_transforms'), 'safe_html') # Get custom tags valid_tags = set(safe_html.get_parameter_value('valid_tags')) custom_tags = valid_tags - XHTML_TAGS # Add custom tags for custom_tag in custom_tags: if custom_tag not in valid_elements: valid_elements[custom_tag] = COMMON_ATTRS # Get kupu library tool filter # Settings are stored on safe_html transform in Plone 4 and # on kupu tool in Plone 3. kupu_library_tool = getToolByName(self, 'kupu_library_tool', None) stripped_combinations = [] # Get stripped combinations try try: sc = safe_html.get_parameter_value('stripped_combinations') for ta in sc.keys(): tags = ta.replace(',', ' ').split() attributes = sc[ta].replace(',', ' ').split() stripped_combinations.append((tags, attributes)) except(KeyError, AttributeError): if kupu_library_tool is not None: stripped_combinations = kupu_library_tool.get_stripped_combinations() # Strip combinations for (stripped_combination_tags, stripped_combination_attributes) in stripped_combinations: stripped_combination_attributes_set = set(stripped_combination_attributes) for stripped_combination_tag in stripped_combination_tags: if stripped_combination_tag in valid_elements: valid_elements[stripped_combination_tag] -= stripped_combination_attributes_set # Remove to be stripped attributes try: stripped_attributes = set(safe_html.get_parameter_value('stripped_attributes')) #style_whitelist = safe_html.get_parameter_value('style_whitelist') except (KeyError, AttributeError): if kupu_library_tool is not None: stripped_attributes = set(kupu_library_tool.get_stripped_attributes()) #style_whitelist = kupu_library_tool.getStyleWhitelist() else: stripped_attributes = set() #style_whitelist = () #style_attribute = "style" #if len(style_whitelist) > 0: #style_attribute = 'style<' + '?'.join(style_whitelist) # Remove elements which are not in valid_tags for valid_element in valid_elements.keys(): if valid_element not in valid_tags: del valid_elements[valid_element] else: if valid_elements[valid_element] != '*': valid_elements[valid_element] -= stripped_attributes #if 'style' in valid_elements[valid_element]: # valid_elements[valid_element].remove('style') # valid_elements[valid_element].add(style_attribute) # Convert sets to lists for valid_element in valid_elements.keys(): valid_elements[valid_element] = sorted(valid_elements[valid_element]) # p needs to be prepended with # to allow empty p tags http://www.tinymce.com/wiki.php/Configuration:valid_elements valid_elements['#p'] = valid_elements.pop('p') return valid_elements security.declarePrivate('getPlugins') def getPlugins(self): """ See ITinyMCE interface """ plugins = self.plugins[:] sp = self.libraries_spellchecker_choice if sp and sp != "browser": plugins.append(sp) if self.customplugins is not None: for plugin in self.customplugins.splitlines(): if '|' in plugin: plugin = plugin.split('|', 1)[0] if plugin not in plugins: plugins.append(plugin) if self.contextmenu: plugins.append('contextmenu') if self.autoresize: plugins.append('autoresize') return ','.join(plugins) security.declarePrivate('getStyles') def getStyles(self, styles, labels): """ See ITinyMCE interface """ h = {'Text': [], 'Selection': [], 'Tables': [], 'Lists': [], 'Print': []} styletype = "" # Push title h['Text'].append('{ title: "Text", tag: "", className: "-", type: "Text" }') h['Selection'].append('{ title: "Selection", tag: "", className: "-", type: "Selection" }') h['Tables'].append('{ title: "Tables", tag: "table", className: "-", type: "Tables" }') h['Lists'].append('{ title: "Lists", tag: "ul", className: "-", type: "Lists" }') h['Lists'].append('{ title: "Lists", tag: "ol", className: "-", type: "Lists" }') h['Lists'].append('{ title: "Lists", tag: "dl", className: "-", type: "Lists" }') h['Print'].append('{ title: "Print", tag: "", className: "-", type: "Print" }') # Add defaults h['Text'].append('{ title: "' + labels['label_paragraph'] + '", tag: "p", className: "-", type: "Text" }') h['Selection'].append('{ title: "' + labels['label_styles'] + '", tag: "", className: "-", type: "Selection" }') h['Tables'].append('{ title: "' + labels['label_plain_cell'] + '", tag: "td", className: "-", type: "Tables" }') h['Lists'].append('{ title: "' + labels['label_lists'] + '", tag: "dl", className: "-", type: "Lists" }') for i in styles: e = i.split('|') while len(e) <= 2: e.append("") if e[1].lower() in ('del', 'ins', 'span'): styletype = "Selection" elif e[1].lower() in ('table', 'tr', 'td', 'th'): styletype = "Tables" elif e[1].lower() in ('ul', 'ol', 'li', 'dt', 'dd', 'dl'): styletype = "Lists" else: styletype = "Text" if e[2] == "pageBreak": styletype = "Print" h[styletype].append('{ title: "' + e[0] + '", tag: "' + e[1] + '", className: "' + e[2] + '", type: "' + styletype + '" }') # Add items to list a = [] if len(h['Text']) > 1: a.extend(h['Text']) if len(h['Selection']) > 1: a.extend(h['Selection']) if len(h['Tables']) > 1: a.extend(h['Tables']) if len(h['Lists']) > 1: a.extend(h['Lists']) if len(h['Print']) > 1: a.extend(h['Print']) return '[' + ','.join(a) + ']' security.declarePrivate('getToolbars') def getToolbars(self, config): """Calculate number of toolbar rows from length of buttons""" t = [[], [], [], []] cur_toolbar = 0 cur_x = 0 for i in config['buttons']: button_width = BUTTON_WIDTHS.get(i, 23) if cur_x + button_width > int(config['toolbar_width']): cur_x = button_width cur_toolbar += 1 else: cur_x += button_width if cur_toolbar <= 3: t[cur_toolbar].append(i) return [','.join(toolbar) for toolbar in t] security.declareProtected('View', 'getContentType') def getContentType(self, object=None, fieldname=None): context = aq_base(object) if IBaseObject.providedBy(context): # support Archetypes fields if fieldname is None: field = context.getPrimaryField() else: field = context.getField(fieldname) or getattr(context, fieldname, None) if field and hasattr(aq_base(field), 'getContentType'): return field.getContentType(context) elif '.widgets.' in fieldname: # support plone.app.textfield RichTextValues fieldname = fieldname.split('.widgets.')[-1] field = getattr(context, fieldname, None) mimetype = getattr(field, 'mimeType', None) if mimetype is not None: return mimetype return 'text/html' security.declareProtected('View', 'getConfiguration') def getConfiguration(self, context=None, field=None, request=None, script_url=None): """Return JSON configuration that is passed to javascript tinymce constructor """ results = {} # Get widget attributes widget = getattr(field, 'widget', None) filter_buttons = getattr(widget, 'filter_buttons', None) allow_buttons = getattr(widget, 'allow_buttons', None) redefine_parastyles = getattr(widget, 'redefine_parastyles', None) parastyles = getattr(widget, 'parastyles', None) rooted = getattr(widget, 'rooted', False) toolbar_width = getattr(widget, 'toolbar_width', self.toolbar_width) # Get safe html transform safe_html = getattr(getToolByName(self, 'portal_transforms'), 'safe_html') # Get kupu library tool filter # Settings are stored on safe_html transform in Plone 4 and # on kupu tool in Plone 3. kupu_library_tool = getToolByName(self, 'kupu_library_tool', None) # Remove to be stripped attributes try: style_whitelist = safe_html.get_parameter_value('style_whitelist') except (KeyError, AttributeError): if kupu_library_tool is not None: style_whitelist = kupu_library_tool.getStyleWhitelist() else: style_whitelist = [] results['valid_inline_styles'] = ','.join(style_whitelist) # tinymce format # Replacing some hardcoded translations labels = {} labels['label_browseimage'] = translate(_('Image Browser'), context=request) labels['label_browselink'] = translate(_('Link Browser'), context=request) labels['label_addnewimage'] = translate(_('Add new Image'), context=request) labels['label_addnewfile'] = translate(_('Add new File'), context=request) labels['label_styles'] = translate(_('(remove style)'), context=request) labels['label_paragraph'] = translate(_('Normal paragraph'), context=request) labels['label_plain_cell'] = translate(_('Plain cell'), context=request) labels['label_style_ldots'] = translate(_('Style...'), context=request) labels['label_text'] = translate(_('Text'), context=request) labels['label_tables'] = translate(_('Tables'), context=request) labels['label_selection'] = translate(_('Selection'), context=request) labels['label_lists'] = translate(_('Lists'), context=request) labels['label_print'] = translate(_('Print'), context=request) labels['label_no_items'] = translate(_('No items in this folder'), context=request) labels['label_no_anchors'] = translate(_('No anchors in this page'), context=request) labels['label_browser'] = translate(_('Browser'), context=request) labels['label_shortcuts'] = translate(_('Shortcuts'), context=request) labels['label_search_results'] = translate(_('Search results:'), context=request) labels['label_internal_path'] = translate(_('You are here:'), context=request) results['labels'] = labels # Add styles to results results['styles'] = [] table_styles = [] if not redefine_parastyles: if isinstance(self.tablestyles, StringTypes): for tablestyle in self.tablestyles.split('\n'): if not tablestyle: # empty line continue tablestylefields = tablestyle.split('|') tablestyletitle = tablestylefields[0] tablestyleid = tablestylefields[1] if tablestyleid == 'plain': # Do not duplicate the default style hardcoded in the # table.htm.pt continue if request is not None: tablestyletitle = translate(_(tablestylefields[0]), context=request) results['styles'].append(tablestyletitle + '|table|' + tablestyleid) table_styles.append(tablestyletitle + '=' + tablestyleid) if isinstance(self.styles, StringTypes): styles = [] for style in self.styles.split('\n'): if not style: # empty line continue stylefields = style.split('|') styletitle = stylefields[0] if request is not None: styletitle = translate(_(stylefields[0]), context=request) merge = styletitle + '|' + '|'.join(stylefields[1:]) styles.append(merge) results['styles'].extend(styles) results['table_styles'] = ';'.join(table_styles) # tinymce config if parastyles is not None: results['styles'].extend(parastyles) styles = results.pop('styles') # Get buttons from control panel results['buttons'] = self.getEnabledButtons(context=context) # Filter buttons if allow_buttons is not None: allow_buttons = self.translateButtonsFromKupu(context=context, buttons=allow_buttons) results['buttons'] = filter(lambda x: x in results['buttons'], allow_buttons) if filter_buttons is not None: filter_buttons = self.translateButtonsFromKupu(context=context, buttons=filter_buttons) results['buttons'] = filter(lambda x: x not in filter_buttons, results['buttons']) # Get valid html elements valid_elements = self.getValidElements() results['valid_elements'] = ','.join(["%s[%s]" % (key, '|'.join(value)) for key, value in valid_elements.iteritems()]) results['customplugins'] = self.customplugins.splitlines() # Set toolbar_location if self.toolbar_external: results['theme_advanced_toolbar_location'] = 'external' else: results['theme_advanced_toolbar_location'] = 'top' if self.autoresize: results['theme_advanced_path_location'] = 'none' results['theme_advanced_resizing_use_cookie'] = False results['theme_advanced_resizing'] = False results['autoresize'] = True else: results['theme_advanced_path_location'] = 'bottom' results['theme_advanced_resizing_use_cookie'] = True results['theme_advanced_resizing'] = self.resizing results['autoresize'] = False if '%' in self.editor_width: results['theme_advanced_resize_horizontal'] = False else: results['theme_advanced_resize_horizontal'] = True try: results['theme_advanced_source_editor_width'] = int(self.editor_width) except (TypeError, ValueError): results['theme_advanced_source_editor_width'] = 600 try: results['theme_advanced_source_editor_height'] = int(self.editor_height) except (TypeError, ValueError): results['theme_advanced_source_editor_height'] = 400 try: results['toolbar_width'] = int(toolbar_width) except (TypeError, ValueError): results['toolbar_width'] = 440 portal_state = context.restrictedTraverse('@@plone_portal_state') # is_rtl handles every possible setting as far as RTL/LTR is concerned # pass that to tinmyce results['directionality'] = portal_state.is_rtl() and 'rtl' or 'ltr' portal = portal_state.portal() request = context.REQUEST portal_path = portal.getPhysicalPath() results['portal_url'] = request.physicalPathToURL(portal_path) results['navigation_root_url'] = portal_state.navigation_root_url() if self.content_css and self.content_css.strip() != "": results['content_css'] = self.content_css else: results['content_css'] = '/'.join([ results['portal_url'], self.getId(), "@@tinymce-getstyle"]) results['link_using_uids'] = self.link_using_uids results['contextmenu'] = self.contextmenu results['entity_encoding'] = self.entity_encoding if script_url: results['script_url'] = script_url if self.allow_captioned_images: results['allow_captioned_images'] = True else: results['allow_captioned_images'] = False if self.rooted or rooted: results['rooted'] = True else: results['rooted'] = False props = getToolByName(self, 'portal_properties') livesearch = props.site_properties.getProperty('enable_livesearch', False) if livesearch: results['livesearch'] = True else: results['livesearch'] = False AVAILABLE_LANGUAGES = set( 'sq ar hy az eu be bn nb bs br bg ca ch zh hr cs da dv nl en et fi fr gl ' 'ka de el gu he hi hu is id ia it ja ko lv lt lb mk ms ml mn se no nn fa ' 'pl pt ps ro ru sc sr ii si sk sl es sv ta tt te th tr tw uk ur cy vi zu'.split()) if 'LANGUAGE' in context.REQUEST: results['language'] = context.REQUEST.LANGUAGE[:2] if results['language'] not in AVAILABLE_LANGUAGES: results['language'] = "en" else: results['language'] = "en" try: results['document_url'] = context.absolute_url() if getattr(aq_base(context), 'checkCreationFlag', None): parent = aq_parent(aq_inner(context)) if context.checkCreationFlag(): parent = aq_parent(aq_parent(parent)) results['document_base_url'] = parent.absolute_url() + "/" else: if IFolderish.providedBy(context): results['document_base_url'] = context.absolute_url() + "/" else: results['document_base_url'] = parent.absolute_url() + "/" else: results['document_base_url'] = results['portal_url'] + "/" except AttributeError: results['document_base_url'] = results['portal_url'] + "/" results['document_url'] = results['portal_url'] # Get Library options results['gecko_spellcheck'] = self.libraries_spellchecker_choice == 'browser' # Content Browser shortcuts_dict = dict(getUtilitiesFor(ITinyMCEShortcut)) results['link_shortcuts_html'] = [] results['image_shortcuts_html'] = [] results['num_of_thumb_columns'] = self.num_of_thumb_columns results['thumbnail_size'] = self.thumbnail_size for name in self.link_shortcuts: results['link_shortcuts_html'].extend(shortcuts_dict.get(name).render(context)) for name in self.image_shortcuts: results['image_shortcuts_html'].extend(shortcuts_dict.get(name).render(context)) # init vars specific for "After the Deadline" spellchecker mtool = getToolByName(portal, 'portal_membership') member = mtool.getAuthenticatedMember() results['atd_rpc_id'] = 'Products.TinyMCE-' + (member.getId() or '') # None when Anonymous User results['atd_rpc_url'] = "%s/@@" % portal.absolute_url() results['atd_show_types'] = self.libraries_atd_show_types.strip().replace('\n', ',') results['atd_ignore_strings'] = self.libraries_atd_ignore_strings.strip().replace('\n', ',') # generic configuration results['mode'] = "exact" results['theme'] = "advanced" results['skin'] = "plone" results['inlinepopups_skin'] = "plonepopup" results['body_class'] = "documentContent" results['body_id'] = "content" results['table_firstline_th'] = True results['fix_list_elements'] = False # allow embed tag if user removes it from # list of nasty tags - see #10681 results['media_strict'] = False results['theme_advanced_path'] = False results['theme_advanced_toolbar_align'] = "left" results['plugins'] = self.getPlugins() results['theme_advanced_styles'] = self.getStyles(styles, labels) results['theme_advanced_buttons1'], results['theme_advanced_buttons2'], \ results['theme_advanced_buttons3'], results['theme_advanced_buttons4'] = self.getToolbars(results) if self.formats and self.formats.strip(): results['formats'] = json.loads(self.formats) return json.dumps(results)
class BranchMergeProposalJobSource(BaseRunnableJobSource): """Provide a job source for all merge proposal jobs. Only one job for any particular merge proposal is returned. """ classProvides(IBranchMergeProposalJobSource) @staticmethod def get(job_id): """Get a job by id. :return: the BranchMergeProposalJob with the specified id, as the current BranchMergeProposalJobDereived subclass. :raises: SQLObjectNotFound if there is no job with the specified id, or its job_type does not match the desired subclass. """ job = BranchMergeProposalJob.get(job_id) return job.makeDerived() @staticmethod def iterReady(job_type=None): from lp.code.model.branch import Branch SourceBranch = ClassAlias(Branch) TargetBranch = ClassAlias(Branch) clauses = [ BranchMergeProposalJob.job == Job.id, Job._status.is_in([JobStatus.WAITING, JobStatus.RUNNING]), BranchMergeProposalJob.branch_merge_proposal == BranchMergeProposal.id, BranchMergeProposal.source_branch == SourceBranch.id, BranchMergeProposal.target_branch == TargetBranch.id, ] if job_type is not None: clauses.append(BranchMergeProposalJob.job_type == job_type) jobs = IMasterStore(Branch).find( (BranchMergeProposalJob, Job, BranchMergeProposal, SourceBranch, TargetBranch), And(*clauses)) # Order by the job status first (to get running before waiting), then # the date_created, then job type. This should give us all creation # jobs before comment jobs. jobs = jobs.order_by(Desc(Job._status), Job.date_created, Desc(BranchMergeProposalJob.job_type)) # Now only return one job for any given merge proposal. ready_jobs = [] seen_merge_proposals = set() for bmp_job, job, bmp, source, target in jobs: # If we've seen this merge proposal already, skip this job. if bmp.id in seen_merge_proposals: continue # We have now seen this merge proposal. seen_merge_proposals.add(bmp.id) # If the job is running, then skip it if job.status == JobStatus.RUNNING: continue derived_job = bmp_job.makeDerived() # If the job is an update preview diff, then check that it is # ready. if IUpdatePreviewDiffJob.providedBy(derived_job): try: derived_job.checkReady() except (UpdatePreviewDiffNotReady, BranchHasPendingWrites): # If the job was created under 15 minutes ago wait a bit. minutes = ( config.codehosting.update_preview_diff_ready_timeout) cut_off_time = (datetime.now(pytz.UTC) - timedelta(minutes=minutes)) if job.date_created > cut_off_time: continue ready_jobs.append(derived_job) return ready_jobs
class SourcePackage(BugTargetBase, HasCodeImportsMixin, HasTranslationImportsMixin, HasTranslationTemplatesMixin, HasBranchesMixin, HasMergeProposalsMixin, HasDriversMixin): """A source package, e.g. apache2, in a distroseries. This object is not a true database object, but rather attempts to represent the concept of a source package in a distro series, with links to the relevant database objects. """ implements(IBugSummaryDimension, ISourcePackage, IHasBuildRecords, ISeriesBugTarget) classProvides(ISourcePackageFactory) def __init__(self, sourcepackagename, distroseries): # We store the ID of the sourcepackagename and distroseries # simply because Storm can break when accessing them # with implicit flush is blocked (like in a permission check when # storing the object in the permission cache). self.sourcepackagenameID = sourcepackagename.id self.sourcepackagename = sourcepackagename self.distroseries = distroseries self.distroseriesID = distroseries.id @classmethod def new(cls, sourcepackagename, distroseries): """See `ISourcePackageFactory`.""" return cls(sourcepackagename, distroseries) def __repr__(self): return '<%s %r %r %r>' % (self.__class__.__name__, self.distribution, self.distroseries, self.sourcepackagename) def _getPublishingHistory(self, version=None, include_status=None, exclude_status=None, order_by=None): """Build a query and return a list of SourcePackagePublishingHistory. This is mainly a helper function for this class so that code is not duplicated. include_status and exclude_status must be a sequence. """ clauses = [] clauses.append("""SourcePackagePublishingHistory.sourcepackagerelease = SourcePackageRelease.id AND SourcePackagePublishingHistory.sourcepackagename = %s AND SourcePackagePublishingHistory.distroseries = %s AND SourcePackagePublishingHistory.archive IN %s """ % sqlvalues(self.sourcepackagename, self.distroseries, self.distribution.all_distro_archive_ids)) if version: clauses.append("SourcePackageRelease.version = %s" % sqlvalues(version)) if include_status: if not isinstance(include_status, list): include_status = list(include_status) clauses.append("SourcePackagePublishingHistory.status IN %s" % sqlvalues(include_status)) if exclude_status: if not isinstance(exclude_status, list): exclude_status = list(exclude_status) clauses.append("SourcePackagePublishingHistory.status NOT IN %s" % sqlvalues(exclude_status)) query = " AND ".join(clauses) if not order_by: order_by = '-datepublished' return SourcePackagePublishingHistory.select( query, orderBy=order_by, clauseTables=['SourcePackageRelease'], prejoinClauseTables=['SourcePackageRelease']) def _getFirstPublishingHistory(self, version=None, include_status=None, exclude_status=None, order_by=None): """As _getPublishingHistory, but just returns the first item.""" try: package = self._getPublishingHistory(version, include_status, exclude_status, order_by)[0] except IndexError: return None else: return package @property def currentrelease(self): releases = self.distroseries.getCurrentSourceReleases( [self.sourcepackagename]) return releases.get(self) def __getitem__(self, version): """See `ISourcePackage`.""" latest_package = self._getFirstPublishingHistory(version=version) if latest_package: return DistroSeriesSourcePackageRelease( self.distroseries, latest_package.sourcepackagerelease) else: return None @property def path(self): """See `ISourcePackage`.""" return '/'.join([ self.distribution.name, self.distroseries.name, self.sourcepackagename.name ]) @property def displayname(self): return "%s in %s %s" % (self.sourcepackagename.name, self.distribution.displayname, self.distroseries.displayname) @property def bugtargetdisplayname(self): """See IBugTarget.""" return "%s (%s)" % (self.name, self.distroseries.fullseriesname) @property def bugtargetname(self): """See `IBugTarget`.""" return "%s (%s)" % (self.name, self.distroseries.fullseriesname) @property def bugtarget_parent(self): """See `ISeriesBugTarget`.""" return self.distribution_sourcepackage @property def title(self): """See `ISourcePackage`.""" return smartquote('"%s" source package in %s') % ( self.sourcepackagename.name, self.distroseries.displayname) @property def summary(self): """See `ISourcePackage`.""" releases = self.releases if len(releases) == 0: return None current = releases[0] name_summaries = [ '%s: %s' % (binary.name, binary.summary) for binary in current.sample_binary_packages ] if name_summaries == []: return None return '\n'.join(name_summaries) @property def distribution(self): return self.distroseries.distribution @property def format(self): if not self.currentrelease: return None return self.currentrelease.format @property def releases(self): """See `ISourcePackage`.""" packages = self._getPublishingHistory(order_by=[ "SourcePackageRelease.version", "SourcePackagePublishingHistory.datepublished" ]) return [ DistributionSourcePackageRelease( distribution=self.distribution, sourcepackagerelease=package.sourcepackagerelease) for package in packages ] @property def distinctreleases(self): """Return all distinct `SourcePackageReleases` for this sourcepackage. The results are ordered by descending version. """ return IStore(SourcePackageRelease).using( SourcePackageRelease, Join( SourcePackagePublishingHistory, SourcePackagePublishingHistory.sourcepackagereleaseID == SourcePackageRelease.id)).find( SourcePackageRelease, SourcePackagePublishingHistory.archiveID.is_in( self.distribution.all_distro_archive_ids), SourcePackagePublishingHistory.distroseries == self.distroseries, SourcePackagePublishingHistory.sourcepackagename == self.sourcepackagename).config(distinct=True).order_by( Desc(SourcePackageRelease.version)) @property def name(self): return self.sourcepackagename.name @property def productseries(self): # See if we can find a relevant packaging record packaging = self.direct_packaging if packaging is None: return None return packaging.productseries @property def direct_packaging(self): """See `ISourcePackage`.""" store = Store.of(self.sourcepackagename) return store.find(Packaging, sourcepackagename=self.sourcepackagename, distroseries=self.distroseries).one() @property def packaging(self): """See `ISourcePackage`""" # First we look to see if there is packaging data for this # distroseries and sourcepackagename. If not, we look up through # parent distroseries. result = self.direct_packaging if result is not None: return result # If we have a parent distroseries, try that. if self.distroseries.previous_series is not None: sp = SourcePackage(sourcepackagename=self.sourcepackagename, distroseries=self.distroseries.previous_series) return sp.packaging @property def published_by_pocket(self): """See `ISourcePackage`.""" result = self._getPublishingHistory( include_status=[PackagePublishingStatus.PUBLISHED]) # create the dictionary with the set of pockets as keys thedict = {} for pocket in PackagePublishingPocket.items: thedict[pocket] = [] # add all the sourcepackagereleases in the right place for spr in result: thedict[spr.pocket].append( DistroSeriesSourcePackageRelease(spr.distroseries, spr.sourcepackagerelease)) return thedict @property def development_version(self): """See `ISourcePackage`.""" return self.__class__(self.sourcepackagename, self.distribution.currentseries) @property def distribution_sourcepackage(self): """See `ISourcePackage`.""" return self.distribution.getSourcePackage(self.sourcepackagename) @property def bug_reporting_guidelines(self): """See `IBugTarget`.""" return self.distribution.bug_reporting_guidelines @property def bug_reported_acknowledgement(self): """See `IBugTarget`.""" return self.distribution.bug_reported_acknowledgement @property def enable_bugfiling_duplicate_search(self): """See `IBugTarget`.""" return ( self.distribution_sourcepackage.enable_bugfiling_duplicate_search) def _customizeSearchParams(self, search_params): """Customize `search_params` for this source package.""" search_params.setSourcePackage(self) def _getOfficialTagClause(self): return self.distroseries._getOfficialTagClause() @property def official_bug_tags(self): """See `IHasBugs`.""" return self.distroseries.official_bug_tags def getUsedBugTagsWithOpenCounts(self, user, tag_limit=0, include_tags=None): """See IBugTarget.""" # Circular fail. from lp.bugs.model.bugsummary import BugSummary return get_bug_tags_open_count(And( BugSummary.distroseries == self.distroseries, BugSummary.sourcepackagename == self.sourcepackagename), user, tag_limit=tag_limit, include_tags=include_tags) @property def drivers(self): """See `IHasDrivers`.""" return self.distroseries.drivers @property def owner(self): """See `IHasOwner`.""" return self.distroseries.owner @property def pillar(self): """See `IBugTarget`.""" return self.distroseries.distribution @property def series(self): """See `ISeriesBugTarget`.""" return self.distroseries def getBugSummaryContextWhereClause(self): """See BugTargetBase.""" # Circular fail. from lp.bugs.model.bugsummary import BugSummary return And(BugSummary.distroseries == self.distroseries, BugSummary.sourcepackagename == self.sourcepackagename) def setPackaging(self, productseries, owner): """See `ISourcePackage`.""" target = self.direct_packaging if target is not None: if target.productseries == productseries: return # Delete the current packaging and create a new one so # that the translation sharing jobs are started. self.direct_packaging.destroySelf() PackagingUtil.createPackaging(distroseries=self.distroseries, sourcepackagename=self.sourcepackagename, productseries=productseries, owner=owner, packaging=PackagingType.PRIME) # and make sure this change is immediately available flush_database_updates() def setPackagingReturnSharingDetailPermissions(self, productseries, owner): """See `ISourcePackage`.""" self.setPackaging(productseries, owner) return self.getSharingDetailPermissions() def getSharingDetailPermissions(self): user = getUtility(ILaunchBag).user productseries = self.productseries permissions = { 'user_can_change_product_series': False, 'user_can_change_branch': False, 'user_can_change_translation_usage': False, 'user_can_change_translations_autoimport_mode': False } if user is None: pass elif productseries is None: permissions['user_can_change_product_series'] = user.canAccess( self, 'setPackaging') else: permissions.update({ 'user_can_change_product_series': self.direct_packaging.userCanDelete(), 'user_can_change_branch': user.canWrite(productseries, 'branch'), 'user_can_change_translation_usage': user.canWrite(productseries.product, 'translations_usage'), 'user_can_change_translations_autoimport_mode': user.canWrite(productseries, 'translations_autoimport_mode'), }) return permissions def deletePackaging(self): """See `ISourcePackage`.""" if self.direct_packaging is None: return self.direct_packaging.destroySelf() def __hash__(self): """See `ISourcePackage`.""" return hash(self.distroseriesID) ^ hash(self.sourcepackagenameID) def __eq__(self, other): """See `ISourcePackage`.""" return ((ISourcePackage.providedBy(other)) and (self.distroseries.id == other.distroseries.id) and (self.sourcepackagename.id == other.sourcepackagename.id)) def __ne__(self, other): """See `ISourcePackage`.""" return not self.__eq__(other) def getBuildRecords(self, build_state=None, name=None, pocket=None, arch_tag=None, user=None, binary_only=True): """See `IHasBuildRecords`""" # Ignore "user", since it would not make any difference to the # records returned here (private builds are only in PPA right # now and this method only returns records for SPRs in a # distribution). # We also ignore the name parameter (required as part of the # IHasBuildRecords interface) and use our own name and the # binary_only parameter as a source package can only have # binary builds. clauseTables = [ 'SourcePackageRelease', 'SourcePackagePublishingHistory' ] condition_clauses = [ """ BinaryPackageBuild.source_package_release = SourcePackageRelease.id AND SourcePackagePublishingHistory.sourcepackagename = %s AND SourcePackagePublishingHistory.distroseries = %s AND SourcePackagePublishingHistory.archive IN %s AND SourcePackagePublishingHistory.sourcepackagerelease = SourcePackageRelease.id AND SourcePackagePublishingHistory.archive = BinaryPackageBuild.archive """ % sqlvalues(self.sourcepackagename, self.distroseries, list(self.distribution.all_distro_archive_ids)) ] # We re-use the optional-parameter handling provided by BuildSet # here, but pass None for the name argument as we've already # matched on exact source package name. BinaryPackageBuildSet().handleOptionalParamsForBuildQueries( condition_clauses, clauseTables, build_state, name=None, pocket=pocket, arch_tag=arch_tag) # exclude gina-generated and security (dak-made) builds # buildstate == FULLYBUILT && datebuilt == null condition_clauses.append( "NOT (BinaryPackageBuild.status=%s AND " " BinaryPackageBuild.date_finished is NULL)" % sqlvalues(BuildStatus.FULLYBUILT)) # Ordering according status # * NEEDSBUILD, BUILDING & UPLOADING by -lastscore # * SUPERSEDED by -datecreated # * FULLYBUILT & FAILURES by -datebuilt # It should present the builds in a more natural order. if build_state in [ BuildStatus.NEEDSBUILD, BuildStatus.BUILDING, BuildStatus.UPLOADING, ]: orderBy = ["-BuildQueue.lastscore"] clauseTables.append('BuildPackageJob') condition_clauses.append( 'BuildPackageJob.build = BinaryPackageBuild.id') clauseTables.append('BuildQueue') condition_clauses.append('BuildQueue.job = BuildPackageJob.job') elif build_state == BuildStatus.SUPERSEDED or build_state is None: orderBy = [Desc("BinaryPackageBuild.date_created")] else: orderBy = [Desc("BinaryPackageBuild.date_finished")] # Fallback to ordering by -id as a tie-breaker. orderBy.append(Desc("id")) # End of duplication (see XXX cprov 2006-09-25 above). return IStore(BinaryPackageBuild).using(clauseTables).find( BinaryPackageBuild, *condition_clauses).order_by(*orderBy) @property def latest_published_component(self): """See `ISourcePackage`.""" latest_publishing = self._getFirstPublishingHistory( include_status=[PackagePublishingStatus.PUBLISHED]) if latest_publishing is not None: return latest_publishing.component else: return None @property def latest_published_component_name(self): """See `ISourcePackage`.""" if self.latest_published_component is not None: return self.latest_published_component.name else: return None def get_default_archive(self, component=None): """See `ISourcePackage`.""" if component is None: component = self.latest_published_component distribution = self.distribution if component is not None and component.name == 'partner': archive = getUtility(IArchiveSet).getByDistroPurpose( distribution, ArchivePurpose.PARTNER) if archive is None: raise NoPartnerArchive(distribution) else: return archive else: return distribution.main_archive def getTemplatesCollection(self): """See `IHasTranslationTemplates`.""" collection = TranslationTemplatesCollection() collection = collection.restrictDistroSeries(self.distroseries) return collection.restrictSourcePackageName(self.sourcepackagename) def getSharingPartner(self): """See `IHasTranslationTemplates`.""" return self.productseries def getBranch(self, pocket): """See `ISourcePackage`.""" store = Store.of(self.sourcepackagename) return store.find( Branch, SeriesSourcePackageBranch.distroseries == self.distroseries.id, (SeriesSourcePackageBranch.sourcepackagename == self.sourcepackagename.id), SeriesSourcePackageBranch.pocket == pocket, SeriesSourcePackageBranch.branch == Branch.id).one() def setBranch(self, pocket, branch, registrant): """See `ISourcePackage`.""" SeriesSourcePackageBranchSet.delete(self, pocket) if branch is not None: SeriesSourcePackageBranchSet.new(self.distroseries, pocket, self.sourcepackagename, branch, registrant) # Avoid circular imports. from lp.registry.model.distributionsourcepackage import ( DistributionSourcePackage, ) DistributionSourcePackage.ensure(sourcepackage=self) else: # Delete the official DSP if there is no publishing history. self.distribution_sourcepackage.delete() @property def linked_branches(self): """See `ISourcePackage`.""" store = Store.of(self.sourcepackagename) return store.find( (SeriesSourcePackageBranch.pocket, Branch), SeriesSourcePackageBranch.distroseries == self.distroseries.id, (SeriesSourcePackageBranch.sourcepackagename == self.sourcepackagename.id), SeriesSourcePackageBranch.branch == Branch.id).order_by( SeriesSourcePackageBranch.pocket) def getSuiteSourcePackage(self, pocket): """See `ISourcePackage`.""" return SuiteSourcePackage(self.distroseries, pocket, self.sourcepackagename) def getPocketPath(self, pocket): """See `ISourcePackage`.""" return '%s/%s/%s' % (self.distribution.name, self.distroseries.getSuite(pocket), self.name) def getLatestTranslationsUploads(self): """See `ISourcePackage`.""" our_format = PackageUploadCustomFormat.ROSETTA_TRANSLATIONS packagename = self.sourcepackagename.name distro = self.distroseries.distribution histories = distro.main_archive.getPublishedSources( name=packagename, distroseries=self.distroseries, status=PackagePublishingStatus.PUBLISHED, exact_match=True) histories = list(histories) builds = [] for history in histories: builds += list(history.getBuilds()) uploads = [ build.package_upload for build in builds if build.package_upload ] custom_files = [] for upload in uploads: custom_files += [ custom for custom in upload.customfiles if custom.customformat == our_format ] custom_files.sort(key=attrgetter('id')) return [custom.libraryfilealias for custom in custom_files] def linkedBranches(self): """See `ISourcePackage`.""" return dict((p.name, b) for (p, b) in self.linked_branches) def getBugTaskWeightFunction(self): """Provide a weight function to determine optimal bug task. We look for the source package task, followed by the distro source package, then the distroseries task, and lastly the distro task. """ sourcepackagenameID = self.sourcepackagename.id seriesID = self.distroseries.id distributionID = self.distroseries.distributionID def weight_function(bugtask): if bugtask.sourcepackagenameID == sourcepackagenameID: if bugtask.distroseriesID == seriesID: return OrderedBugTask(1, bugtask.id, bugtask) elif bugtask.distributionID == distributionID: return OrderedBugTask(2, bugtask.id, bugtask) elif bugtask.distroseriesID == seriesID: return OrderedBugTask(3, bugtask.id, bugtask) elif bugtask.distributionID == distributionID: return OrderedBugTask(4, bugtask.id, bugtask) # Catch the default case, and where there is a task for the same # sourcepackage on a different distro. return OrderedBugTask(5, bugtask.id, bugtask) return weight_function
class MembershipNotificationJob(PersonTransferJobDerived): """A Job that sends notifications about team membership changes.""" implements(IMembershipNotificationJob) classProvides(IMembershipNotificationJobSource) class_job_type = PersonTransferJobType.MEMBERSHIP_NOTIFICATION config = config.IMembershipNotificationJobSource @classmethod def create(cls, member, team, reviewer, old_status, new_status, last_change_comment=None): if not ITeam.providedBy(team): raise TypeError('team must be ITeam: %s' % repr(team)) if not IPerson.providedBy(reviewer): raise TypeError('reviewer must be IPerson: %s' % repr(reviewer)) if old_status not in TeamMembershipStatus: raise TypeError("old_status must be TeamMembershipStatus: %s" % repr(old_status)) if new_status not in TeamMembershipStatus: raise TypeError("new_status must be TeamMembershipStatus: %s" % repr(new_status)) metadata = { 'reviewer': reviewer.id, 'old_status': old_status.name, 'new_status': new_status.name, 'last_change_comment': last_change_comment, } return super(MembershipNotificationJob, cls).create( minor_person=member, major_person=team, metadata=metadata) @property def member(self): return self.minor_person @property def team(self): return self.major_person @property def reviewer(self): return getUtility(IPersonSet).get(self.metadata['reviewer']) @property def old_status(self): return TeamMembershipStatus.items[self.metadata['old_status']] @property def new_status(self): return TeamMembershipStatus.items[self.metadata['new_status']] @property def last_change_comment(self): return self.metadata['last_change_comment'] def run(self): """See `IMembershipNotificationJob`.""" from lp.services.scripts import log from_addr = format_address( self.team.displayname, config.canonical.noreply_from_address) admin_emails = self.team.getTeamAdminsEmailAddresses() # person might be a self.team, so we can't rely on its preferredemail. self.member_email = get_contact_email_addresses(self.member) # Make sure we don't send the same notification twice to anybody. for email in self.member_email: if email in admin_emails: admin_emails.remove(email) if self.reviewer != self.member: self.reviewer_name = self.reviewer.unique_displayname else: self.reviewer_name = 'the user' if self.last_change_comment: comment = ("\n%s said:\n %s\n" % ( self.reviewer.displayname, self.last_change_comment.strip())) else: comment = "" replacements = { 'member_name': self.member.unique_displayname, 'recipient_name': self.member.displayname, 'team_name': self.team.unique_displayname, 'team_url': canonical_url(self.team), 'old_status': self.old_status.title, 'new_status': self.new_status.title, 'reviewer_name': self.reviewer_name, 'comment': comment} template_name = 'membership-statuschange' subject = ( 'Membership change: %(member)s in %(team)s' % { 'member': self.member.name, 'team': self.team.name, }) if self.new_status == TeamMembershipStatus.EXPIRED: template_name = 'membership-expired' subject = '%s expired from team' % self.member.name elif (self.new_status == TeamMembershipStatus.APPROVED and self.old_status != TeamMembershipStatus.ADMIN): if self.old_status == TeamMembershipStatus.INVITED: subject = ('Invitation to %s accepted by %s' % (self.member.name, self.reviewer.name)) template_name = 'membership-invitation-accepted' elif self.old_status == TeamMembershipStatus.PROPOSED: subject = '%s approved by %s' % ( self.member.name, self.reviewer.name) else: subject = '%s added by %s' % ( self.member.name, self.reviewer.name) elif self.new_status == TeamMembershipStatus.INVITATION_DECLINED: subject = ('Invitation to %s declined by %s' % (self.member.name, self.reviewer.name)) template_name = 'membership-invitation-declined' elif self.new_status == TeamMembershipStatus.DEACTIVATED: subject = '%s deactivated by %s' % ( self.member.name, self.reviewer.name) elif self.new_status == TeamMembershipStatus.ADMIN: subject = '%s made admin by %s' % ( self.member.name, self.reviewer.name) elif self.new_status == TeamMembershipStatus.DECLINED: subject = '%s declined by %s' % ( self.member.name, self.reviewer.name) else: # Use the default template and subject. pass # Must have someone to mail, and be a non-open team (because open # teams are unrestricted, notifications on join/ leave do not help the # admins. if (len(admin_emails) != 0 and self.team.membership_policy != TeamMembershipPolicy.OPEN): admin_template = get_email_template( "%s-bulk.txt" % template_name, app='registry') for address in admin_emails: recipient = getUtility(IPersonSet).getByEmail(address) replacements['recipient_name'] = recipient.displayname msg = MailWrapper().format( admin_template % replacements, force_wrap=True) simple_sendmail(from_addr, address, subject, msg) # The self.member can be a self.self.team without any # self.members, and in this case we won't have a single email # address to send this notification to. if self.member_email and self.reviewer != self.member: if self.member.is_team: template = '%s-bulk.txt' % template_name else: template = '%s-personal.txt' % template_name self.member_template = get_email_template( template, app='registry') for address in self.member_email: recipient = getUtility(IPersonSet).getByEmail(address) replacements['recipient_name'] = recipient.displayname msg = MailWrapper().format( self.member_template % replacements, force_wrap=True) simple_sendmail(from_addr, address, subject, msg) log.debug('MembershipNotificationJob sent email') def __repr__(self): return ( "<{self.__class__.__name__} about " "~{self.minor_person.name} in ~{self.major_person.name}; " "status={self.job.status}>").format(self=self)
class PersonMergeJob(PersonTransferJobDerived): """A Job that merges one person or team into another.""" implements(IPersonMergeJob) classProvides(IPersonMergeJobSource) class_job_type = PersonTransferJobType.MERGE config = config.IPersonMergeJobSource @classmethod def create(cls, from_person, to_person, requester, reviewer=None, delete=False): """See `IPersonMergeJobSource`.""" if (from_person.isMergePending() or (not delete and to_person.isMergePending())): return None if from_person.is_team: metadata = {'reviewer': reviewer.id} else: metadata = {} metadata['delete'] = bool(delete) if metadata['delete']: # Ideally not needed, but the DB column is not-null at the moment # and this minor bit of friction isn't worth changing that over. to_person = getUtility(ILaunchpadCelebrities).registry_experts return super(PersonMergeJob, cls).create( minor_person=from_person, major_person=to_person, metadata=metadata, requester=requester) @classmethod def find(cls, from_person=None, to_person=None, any_person=False): """See `IPersonMergeJobSource`.""" conditions = [ PersonTransferJob.job_type == cls.class_job_type, PersonTransferJob.job_id == Job.id, Job._status.is_in(Job.PENDING_STATUSES)] arg_conditions = [] if from_person is not None: arg_conditions.append( PersonTransferJob.minor_person == from_person) if to_person is not None: arg_conditions.append( PersonTransferJob.major_person == to_person) if any_person and from_person is not None and to_person is not None: arg_conditions = [Or(*arg_conditions)] conditions.extend(arg_conditions) return DecoratedResultSet( IStore(PersonTransferJob).find( PersonTransferJob, *conditions), cls) @property def from_person(self): """See `IPersonMergeJob`.""" return self.minor_person @property def to_person(self): """See `IPersonMergeJob`.""" return self.major_person @property def reviewer(self): if 'reviewer' in self.metadata: return getUtility(IPersonSet).get(self.metadata['reviewer']) else: return None @property def log_name(self): return self.__class__.__name__ def getErrorRecipients(self): """See `IPersonMergeJob`.""" return [format_address_for_person(self.requester)] def run(self): """Perform the merge.""" from_person_name = self.from_person.name to_person_name = self.to_person.name from lp.services.scripts import log if self.metadata.get('delete', False): log.debug( "%s is about to delete ~%s", self.log_name, from_person_name) merge_people( from_person=self.from_person, to_person=getUtility(ILaunchpadCelebrities).registry_experts, reviewer=self.reviewer, delete=True) log.debug( "%s has deleted ~%s", self.log_name, from_person_name) else: log.debug( "%s is about to merge ~%s into ~%s", self.log_name, from_person_name, to_person_name) merge_people( from_person=self.from_person, to_person=self.to_person, reviewer=self.reviewer) log.debug( "%s has merged ~%s into ~%s", self.log_name, from_person_name, to_person_name) def __repr__(self): return ( "<{self.__class__.__name__} to merge " "~{self.from_person.name} into ~{self.to_person.name}; " "status={self.job.status}>").format(self=self) def getOperationDescription(self): return ('merging ~%s into ~%s' % (self.from_person.name, self.to_person.name))
class PlainPackageCopyJob(PackageCopyJobDerived): """Job that copies a package from one archive to another.""" # This job type serves in different places: it supports copying # packages between archives, but also the syncing of packages from # parents into a derived distroseries. We may split these into # separate types at some point, but for now we (allenap, bigjools, # jtv) chose to keep it as one. implements(IPlainPackageCopyJob) class_job_type = PackageCopyJobType.PLAIN classProvides(IPlainPackageCopyJobSource) config = config.IPlainPackageCopyJobSource user_error_types = (CannotCopy, ) # Raised when closing bugs ends up hitting another process and # deadlocking. retry_error_types = (TransactionRollbackError, ) max_retries = 5 @classmethod def _makeMetadata(cls, target_pocket, package_version, include_binaries, sponsored=None, unembargo=False, auto_approve=False, source_distroseries=None, source_pocket=None, phased_update_percentage=None): """Produce a metadata dict for this job.""" return { 'target_pocket': target_pocket.value, 'package_version': package_version, 'include_binaries': bool(include_binaries), 'sponsored': sponsored.name if sponsored else None, 'unembargo': unembargo, 'auto_approve': auto_approve, 'source_distroseries': source_distroseries.name if source_distroseries else None, 'source_pocket': source_pocket.value if source_pocket else None, 'phased_update_percentage': phased_update_percentage, } @classmethod def create(cls, package_name, source_archive, target_archive, target_distroseries, target_pocket, include_binaries=False, package_version=None, copy_policy=PackageCopyPolicy.INSECURE, requester=None, sponsored=None, unembargo=False, auto_approve=False, source_distroseries=None, source_pocket=None, phased_update_percentage=None): """See `IPlainPackageCopyJobSource`.""" assert package_version is not None, "No package version specified." assert requester is not None, "No requester specified." metadata = cls._makeMetadata(target_pocket, package_version, include_binaries, sponsored, unembargo, auto_approve, source_distroseries, source_pocket, phased_update_percentage) job = PackageCopyJob(job_type=cls.class_job_type, source_archive=source_archive, target_archive=target_archive, target_distroseries=target_distroseries, package_name=package_name, copy_policy=copy_policy, metadata=metadata, requester=requester) IMasterStore(PackageCopyJob).add(job) derived = cls(job) derived.celeryRunOnCommit() return derived @classmethod def _composeJobInsertionTuple(cls, copy_policy, include_binaries, job_id, copy_task, sponsored, unembargo, auto_approve): """Create an SQL fragment for inserting a job into the database. :return: A string representing an SQL tuple containing initializers for a `PackageCopyJob` in the database (minus `id`, which is assigned automatically). Contents are escaped for use in SQL. """ ( package_name, package_version, source_archive, target_archive, target_distroseries, target_pocket, ) = copy_task metadata = cls._makeMetadata(target_pocket, package_version, include_binaries, sponsored, unembargo, auto_approve) data = (cls.class_job_type, target_distroseries, copy_policy, source_archive, target_archive, package_name, job_id, metadata) return data @classmethod def createMultiple(cls, copy_tasks, requester, copy_policy=PackageCopyPolicy.INSECURE, include_binaries=False, sponsored=None, unembargo=False, auto_approve=False): """See `IPlainPackageCopyJobSource`.""" store = IMasterStore(Job) job_ids = Job.createMultiple(store, len(copy_tasks), requester) job_contents = [ cls._composeJobInsertionTuple(copy_policy, include_binaries, job_id, task, sponsored, unembargo, auto_approve) for job_id, task in zip(job_ids, copy_tasks) ] return bulk.create( (PackageCopyJob.job_type, PackageCopyJob.target_distroseries, PackageCopyJob.copy_policy, PackageCopyJob.source_archive, PackageCopyJob.target_archive, PackageCopyJob.package_name, PackageCopyJob.job_id, PackageCopyJob.metadata), job_contents, get_primary_keys=True) @classmethod def getActiveJobs(cls, target_archive): """See `IPlainPackageCopyJobSource`.""" jobs = IStore(PackageCopyJob).find( PackageCopyJob, PackageCopyJob.job_type == cls.class_job_type, PackageCopyJob.target_archive == target_archive, Job.id == PackageCopyJob.job_id, Job._status == JobStatus.WAITING) jobs = jobs.order_by(PackageCopyJob.id) return DecoratedResultSet(jobs, cls) @classmethod def getPendingJobsForTargetSeries(cls, target_series): """Get upcoming jobs for `target_series`, ordered by age.""" raw_jobs = IStore(PackageCopyJob).find( PackageCopyJob, Job.id == PackageCopyJob.job_id, PackageCopyJob.job_type == cls.class_job_type, PackageCopyJob.target_distroseries == target_series, Job._status.is_in(Job.PENDING_STATUSES)) raw_jobs = raw_jobs.order_by(PackageCopyJob.id) return DecoratedResultSet(raw_jobs, cls) @classmethod def getPendingJobsPerPackage(cls, target_series): """See `IPlainPackageCopyJobSource`.""" result = {} # Go through jobs in-order, picking the first matching job for # any (package, version) tuple. Because of how # getPendingJobsForTargetSeries orders its results, the first # will be the oldest and thus presumably the first to finish. for job in cls.getPendingJobsForTargetSeries(target_series): result.setdefault(job.package_name, job) return result @classmethod def getIncompleteJobsForArchive(cls, archive): """See `IPlainPackageCopyJobSource`.""" jobs = IStore(PackageCopyJob).find( PackageCopyJob, PackageCopyJob.target_archive == archive, PackageCopyJob.job_type == cls.class_job_type, Job.id == PackageCopyJob.job_id, Job._status.is_in( [JobStatus.WAITING, JobStatus.RUNNING, JobStatus.FAILED])) return DecoratedResultSet(jobs, cls) @property def target_pocket(self): return PackagePublishingPocket.items[self.metadata['target_pocket']] @property def include_binaries(self): return self.metadata['include_binaries'] @property def error_message(self): """See `IPackageCopyJob`.""" return self.metadata.get("error_message") @property def sponsored(self): name = self.metadata['sponsored'] if name is None: return None return getUtility(IPersonSet).getByName(name) @property def unembargo(self): return self.metadata.get('unembargo', False) @property def auto_approve(self): return self.metadata.get('auto_approve', False) @property def source_distroseries(self): name = self.metadata.get('source_distroseries') if name is None: return None return self.source_archive.distribution[name] @property def source_pocket(self): name = self.metadata.get('source_pocket') if name is None: return None return PackagePublishingPocket.items[name] @property def phased_update_percentage(self): return self.metadata.get('phased_update_percentage') def _createPackageUpload(self, unapproved=False): pu = self.target_distroseries.createQueueEntry( pocket=self.target_pocket, archive=self.target_archive, package_copy_job=self.context) if unapproved: pu.setUnapproved() def addSourceOverride(self, override): """Add an `ISourceOverride` to the metadata.""" metadata_changes = {} if override.component is not None: metadata_changes['component_override'] = override.component.name if override.section is not None: metadata_changes['section_override'] = override.section.name self.context.extendMetadata(metadata_changes) def setErrorMessage(self, message): """See `IPackageCopyJob`.""" self.metadata["error_message"] = message def getSourceOverride(self): """Fetch an `ISourceOverride` from the metadata.""" name = self.package_name component_name = self.component_name section_name = self.section_name source_package_name = getUtility(ISourcePackageNameSet)[name] try: component = getUtility(IComponentSet)[component_name] except NotFoundError: component = None try: section = getUtility(ISectionSet)[section_name] except NotFoundError: section = None return SourceOverride(source_package_name, component, section) def findSourcePublication(self): """Find the appropriate origin `ISourcePackagePublishingHistory`.""" name = self.package_name version = self.package_version source_package = self.source_archive.getPublishedSources( name=name, version=version, exact_match=True, distroseries=self.source_distroseries, pocket=self.source_pocket).first() if source_package is None: raise CannotCopy("Package %r %r not found." % (name, version)) return source_package def _checkPolicies(self, source_name, source_component=None, auto_approve=False): # This helper will only return if it's safe to carry on with the # copy, otherwise it raises SuspendJobException to tell the job # runner to suspend the job. override_policy = FromExistingOverridePolicy() ancestry = override_policy.calculateSourceOverrides( self.target_archive, self.target_distroseries, self.target_pocket, [source_name]) copy_policy = self.getPolicyImplementation() if len(ancestry) == 0: # We need to get the default overrides and put them in the # metadata. defaults = UnknownOverridePolicy().calculateSourceOverrides( self.target_archive, self.target_distroseries, self.target_pocket, [source_name], source_component) self.addSourceOverride(defaults[0]) if auto_approve: auto_approve = self.target_archive.canAdministerQueue( self.requester, self.getSourceOverride().component, self.target_pocket, self.target_distroseries) approve_new = auto_approve or copy_policy.autoApproveNew( self.target_archive, self.target_distroseries, self.target_pocket) if not approve_new: # There's no existing package with the same name and the # policy says unapproved, so we poke it in the NEW queue. self._createPackageUpload() raise SuspendJobException else: # Put the existing override in the metadata. self.addSourceOverride(ancestry[0]) if auto_approve: auto_approve = self.target_archive.canAdministerQueue( self.requester, self.getSourceOverride().component, self.target_pocket, self.target_distroseries) # The package is not new (it has ancestry) so check the copy # policy for existing packages. approve_existing = auto_approve or copy_policy.autoApprove( self.target_archive, self.target_distroseries, self.target_pocket) if not approve_existing: self._createPackageUpload(unapproved=True) raise SuspendJobException def _rejectPackageUpload(self): # Helper to find and reject any associated PackageUpload. pu = getUtility(IPackageUploadSet).getByPackageCopyJobIDs( [self.context.id]).any() if pu is not None: pu.setRejected() def notifyOops(self, oops): """See `IRunnableJob`.""" if not self.error_message: transaction.abort() self.reportFailure( "Launchpad encountered an internal error while copying this" " package. It was logged with id %s. Sorry for the" " inconvenience." % oops["id"]) transaction.commit() super(PlainPackageCopyJob, self).notifyOops(oops) def run(self): """See `IRunnableJob`.""" try: self.attemptCopy() except CannotCopy as e: # Remember the target archive purpose, as otherwise aborting the # transaction will forget it. target_archive_purpose = self.target_archive.purpose self.logger.info("Job:\n%s\nraised CannotCopy:\n%s" % (self, e)) self.abort() # Abort the txn. self.reportFailure(unicode(e)) # If there is an associated PackageUpload we need to reject it, # else it will sit in ACCEPTED forever. self._rejectPackageUpload() if target_archive_purpose == ArchivePurpose.PPA: # If copying to a PPA, commit the failure and re-raise the # exception. We turn a copy failure into a job failure in # order that it can show up in the UI. transaction.commit() raise else: # Otherwise, rely on the job runner to do the final commit, # and do not consider a failure of a copy to be a failure of # the job. We will normally have a DistroSeriesDifference # in this case. pass except SuspendJobException: raise except: # Abort work done so far, but make sure that we commit the # rejection to the PackageUpload. transaction.abort() self._rejectPackageUpload() transaction.commit() raise def attemptCopy(self): """Attempt to perform the copy. :raise CannotCopy: If the copy fails for a reason that the user can deal with. """ reason = self.target_archive.checkUploadToPocket( self.target_distroseries, self.target_pocket, person=self.requester) if reason: # Wrap any forbidden-pocket error in CannotCopy. raise CannotCopy(unicode(reason)) source_package = self.findSourcePublication() # If there's a PackageUpload associated with this job then this # job has just been released by an archive admin from the queue. # We don't need to check any policies, but the admin may have # set overrides which we will get from the job's metadata. pu = getUtility(IPackageUploadSet).getByPackageCopyJobIDs( [self.context.id]).any() if pu is None: source_name = getUtility(ISourcePackageNameSet)[self.package_name] self._checkPolicies(source_name, source_package.sourcepackagerelease.component, self.auto_approve) # The package is free to go right in, so just copy it now. ancestry = self.target_archive.getPublishedSources( name=self.package_name, distroseries=self.target_distroseries, pocket=self.target_pocket, exact_match=True) override = self.getSourceOverride() copy_policy = self.getPolicyImplementation() send_email = copy_policy.send_email(self.target_archive) copied_publications = do_copy( sources=[source_package], archive=self.target_archive, series=self.target_distroseries, pocket=self.target_pocket, include_binaries=self.include_binaries, check_permissions=True, person=self.requester, overrides=[override], send_email=send_email, announce_from_person=self.requester, sponsored=self.sponsored, packageupload=pu, unembargo=self.unembargo, phased_update_percentage=self.phased_update_percentage) # Add a PackageDiff for this new upload if it has ancestry. if copied_publications and not ancestry.is_empty(): from_spr = None for publication in copied_publications: if ISourcePackagePublishingHistory.providedBy(publication): from_spr = publication.sourcepackagerelease break if from_spr: for ancestor in ancestry: to_spr = ancestor.sourcepackagerelease if from_spr != to_spr: try: to_spr.requestDiffTo(self.requester, from_spr) except PackageDiffAlreadyRequested: pass break if pu is not None: # A PackageUpload will only exist if the copy job had to be # held in the queue because of policy/ancestry checks. If one # does exist we need to make sure it gets moved to DONE. pu.setDone() if copied_publications: self.logger.debug("Packages copied to %s:" % self.target_archive.displayname) for copy in copied_publications: self.logger.debug(copy.displayname) def abort(self): """Abort work.""" transaction.abort() def findMatchingDSDs(self): """Find any `DistroSeriesDifference`s that this job might resolve.""" dsd_source = getUtility(IDistroSeriesDifferenceSource) target_series = self.target_distroseries candidates = dsd_source.getForDistroSeries( distro_series=target_series, name_filter=self.package_name, status=DistroSeriesDifferenceStatus.NEEDS_ATTENTION) # The job doesn't know what distroseries a given package is # coming from, and the version number in the DSD may have # changed. We can however filter out DSDs that are from # different distributions, based on the job's target archive. source_distro_id = self.source_archive.distributionID return [ dsd for dsd in candidates if dsd.parent_series.distributionID == source_distro_id ] def reportFailure(self, message): """Attempt to report failure to the user.""" if self.target_archive.purpose != ArchivePurpose.PPA: dsds = self.findMatchingDSDs() comment_source = getUtility(IDistroSeriesDifferenceCommentSource) # Register the error comment in the name of the Janitor. Not a # great choice, but we have no user identity to represent # Launchpad; it's far too costly to create one; and # impersonating the requester can be misleading and would also # involve extra bookkeeping. reporting_persona = getUtility(ILaunchpadCelebrities).janitor for dsd in dsds: comment_source.new(dsd, reporting_persona, message) else: self.setErrorMessage(message) def __repr__(self): """Returns an informative representation of the job.""" parts = ["%s to copy" % self.__class__.__name__] if self.package_name is None: parts.append(" no package (!)") else: parts.append(" package %s" % self.package_name) parts.append( " from %s/%s" % (self.source_archive.distribution.name, self.source_archive.name)) if self.source_pocket is not None: parts.append(", %s pocket," % self.source_pocket.name) if self.source_distroseries is not None: parts.append(" in %s" % self.source_distroseries) parts.append( " to %s/%s" % (self.target_archive.distribution.name, self.target_archive.name)) parts.append(", %s pocket," % self.target_pocket.name) if self.target_distroseries is not None: parts.append(" in %s" % self.target_distroseries) if self.include_binaries: parts.append(", including binaries") return "<%s>" % "".join(parts) def getPolicyImplementation(self): """Return the `ICopyPolicy` applicable to this job.""" return ICopyPolicy(self.copy_policy)
class PackageCopyJob(StormBase): """Base class for package copying jobs.""" implements(IPackageCopyJob) classProvides(IPackageCopyJobSource) __storm_table__ = 'PackageCopyJob' id = Int(primary=True) job_id = Int(name='job') job = Reference(job_id, Job.id) source_archive_id = Int(name='source_archive') source_archive = Reference(source_archive_id, Archive.id) target_archive_id = Int(name='target_archive') target_archive = Reference(target_archive_id, Archive.id) target_distroseries_id = Int(name='target_distroseries') target_distroseries = Reference(target_distroseries_id, DistroSeries.id) package_name = Unicode('package_name') copy_policy = EnumCol(enum=PackageCopyPolicy) job_type = EnumCol(enum=PackageCopyJobType, notNull=True) metadata = JSON('json_data') # Derived concrete classes. The entire class gets one dict for # this; it's not meant to be on an instance. concrete_classes = {} @classmethod def registerConcreteClass(cls, new_class): """Register a concrete `IPackageCopyJob`-implementing class.""" assert new_class.class_job_type not in cls.concrete_classes, ( "Class %s is already registered." % new_class) cls.concrete_classes[new_class.class_job_type] = new_class @classmethod def wrap(cls, package_copy_job): """See `IPackageCopyJobSource`.""" if package_copy_job is None: return None # Read job_type so You Don't Have To. If any other code reads # job_type, that's probably a sign that the interfaces need more # work. job_type = removeSecurityProxy(package_copy_job).job_type concrete_class = cls.concrete_classes[job_type] return concrete_class(package_copy_job) @classmethod def getByID(cls, pcj_id): """See `IPackageCopyJobSource`.""" return cls.wrap(IStore(PackageCopyJob).get(PackageCopyJob, pcj_id)) def __init__(self, source_archive, target_archive, target_distroseries, job_type, metadata, requester, package_name=None, copy_policy=None): super(PackageCopyJob, self).__init__() self.job = Job() self.job.requester = requester self.job_type = job_type self.source_archive = source_archive self.target_archive = target_archive self.target_distroseries = target_distroseries self.package_name = unicode(package_name) self.copy_policy = copy_policy self.metadata = metadata @property def package_version(self): return self.metadata["package_version"] def extendMetadata(self, metadata_dict): """Add metadata_dict to the existing metadata.""" existing = self.metadata existing.update(metadata_dict) self.metadata = existing @property def component_name(self): """See `IPackageCopyJob`.""" return self.metadata.get("component_override") @property def section_name(self): """See `IPackageCopyJob`.""" return self.metadata.get("section_override") def makeDerived(self): return PackageCopyJobDerived.makeSubclass(self)
class SiteWalkerSection(object): classProvides(ISectionBlueprint) implements(ISection) def __init__(self, transmogrifier, name, options, previous): self.previous = previous self.context = transmogrifier.context self.pathkey = options.get('path-key', '_path').strip() self.typekey = options.get('type-key', '_type').strip() self.entrieskey = options.get('entries-key', '_entries').strip() # If you only want to export a part of the site, you can # specify a start-path; use 'folder' to only export # '/plonesite/folder'. self.start_path = options.get('start-path', '').strip().split() # this is used for communication with 'logger' section self.anno = IAnnotations(transmogrifier) self.storage = self.anno.setdefault(VALIDATIONKEY, []) self.condition = Condition(options.get('condition', 'python:True'), transmogrifier, name, options) self.urltool = getToolByName(self.context, 'portal_url') def getContained(self, obj): contained = [(k, v) for k, v in obj.contentItems() if self.condition(None, context=v)] return tuple(contained) def walk(self, obj): if IFolderish.providedBy(obj) or IBaseFolder.providedBy(obj): contained = self.getContained(obj) yield obj, tuple([(k, v.getPortalTypeName()) for k, v in contained]) for k, v in contained: for x in self.walk(v): yield x else: yield obj, () def walker(self, start_obj): """ build items stack for each of star paths""" for obj, contained in self.walk(start_obj): item = { self.pathkey: self.urltool.getRelativeContentURL(obj), self.typekey: obj.getPortalTypeName(), } if contained: item[self.entrieskey] = contained # add item path to stack self.storage.append(item[self.pathkey]) yield item def __iter__(self): for item in self.previous: yield item # Determine the object from which to start walking. if self.start_path: # We only want to export a part of the site. for cur_start_path in self.start_path: start_obj = self.context.restrictedTraverse(cur_start_path) for item in self.walker(start_obj): yield item else: start_obj = self.context for item in self.walker(start_obj): yield item # cleanup if VALIDATIONKEY in self.anno: del self.anno[VALIDATIONKEY]
class DexterityUpdateSection(object): classProvides(ISectionBlueprint) def __init__(self, transmogrifier, name, options, previous): self.previous = previous self.context = transmogrifier.context self.name = name self.pathkey = defaultMatcher(options, 'path-key', name, 'path') self.fileskey = options.get('files-key', '_files').strip() self.disable_constraints = Expression( options.get('disable-constraints', 'python: False'), transmogrifier, name, options, ) # create logger if options.get('logger'): self.logger = logging.getLogger(options['logger']) self.loglevel = getattr(logging, options['loglevel'], None) if self.loglevel is None: # Assume it's an integer: self.loglevel = int(options['loglevel']) self.logger.setLevel(self.loglevel) self.log = lambda s: self.logger.log(self.loglevel, s) else: self.log = None self.errored = IAnnotations(transmogrifier).setdefault(ERROREDKEY, []) def __iter__(self): for item in self.previous: pathkey = self.pathkey(*item.keys())[0] # not enough info if not pathkey: yield item continue path = item[pathkey] # Skip the Plone site object itself if not path: yield item continue obj = self.context.unrestrictedTraverse(path.encode().lstrip('/'), None) # path doesn't exist if obj is None: yield item continue if IDexterityContent.providedBy(obj): uuid = item.get('plone.uuid') if uuid is not None: try: IMutableUUID(obj).set(str(uuid)) except: self.errored.append(item['_original_path']) files = item.setdefault(self.fileskey, {}) # For all fields in the schema, update in roughly the same way # z3c.form.widget.py would for schemata in iterSchemata(obj): for name, field in getFieldsInOrder(schemata): if name == 'id': continue if field.readonly: continue #setting value from the blueprint cue value = item.get(name, _marker) if value is not _marker: # Value was given in pipeline, so set it deserializer = IDeserializer(field) value = deserializer( value, files, item, self.disable_constraints, logger=self.log, ) field.set(field.interface(obj), value) continue # Get the widget's current value, if it has one then leave # it alone value = getMultiAdapter( (obj, field), interfaces.IDataManager).query() if not (value is field.missing_value or value is interfaces.NO_VALUE): continue # Finally, set a default value if nothing is set so far default = queryMultiAdapter( ( obj, obj.REQUEST, # request None, # form field, None, # Widget ), interfaces.IValue, name='default') if default is not None: default = default.get() if default is None: default = getattr(field, 'default', None) if default is None: try: default = field.missing_value except AttributeError: pass field.set(field.interface(obj), default) try: notify(ObjectModifiedEvent(obj)) except: print 'Error probably in linkintegrity transform' yield item
class Club(grok.GlobalUtility): interface.classProvides(IClub, ISpikyClub) grok.direct()
class MergeProposalUpdatedEmailJob(BranchMergeProposalJobDerived): """Send email to the subscribers informing them of updated fields. When attributes of the merge proposal are edited, we inform the subscribers. """ implements(IMergeProposalUpdatedEmailJob) classProvides(IMergeProposalUpdatedEmailJobSource) class_job_type = BranchMergeProposalJobType.MERGE_PROPOSAL_UPDATED config = config.IBranchMergeProposalJobSource def run(self): """See `IRunnableJob`.""" mailer = BMPMailer.forModification(self.branch_merge_proposal, self.delta_text, self.editor) mailer.sendAll() @classmethod def create(cls, merge_proposal, delta_text, editor): """See `IReviewRequestedEmailJobSource`.""" metadata = cls.getMetadata(delta_text, editor) return cls._create(merge_proposal, metadata) @staticmethod def getMetadata(delta_text, editor): metadata = {'delta_text': delta_text} if editor is not None: metadata['editor'] = editor.name return metadata @property def editor(self): """The person who updated the merge proposal.""" editor_name = self.metadata.get('editor') if editor_name is None: return None else: return getUtility(IPersonSet).getByName(editor_name) @property def delta_text(self): """The changes that were made to the merge proposal.""" return self.metadata['delta_text'] def getOopsVars(self): """See `IRunnableJob`.""" vars = BranchMergeProposalJobDerived.getOopsVars(self) vars.extend([ ('editor', self.metadata.get('editor', '(not set)')), ('delta_text', self.metadata['delta_text']), ]) return vars def getErrorRecipients(self): """Return a list of email-ids to notify about user errors.""" recipients = [] if self.editor is not None: recipients.append(format_address_for_person(self.editor)) return recipients def getOperationDescription(self): return 'emailing subscribers about merge proposal changes'
class PersonDeactivateJob(PersonTransferJobDerived): """A Job that deactivates a person.""" implements(IPersonDeactivateJob) classProvides(IPersonDeactivateJobSource) class_job_type = PersonTransferJobType.DEACTIVATE config = config.IPersonMergeJobSource @classmethod def create(cls, person): """See `IPersonMergeJobSource`.""" # Minor person has to be not null, so use the janitor. janitor = getUtility(ILaunchpadCelebrities).janitor return super(PersonDeactivateJob, cls).create( minor_person=janitor, major_person=person, metadata={}) @classmethod def find(cls, person=None): """See `IPersonMergeJobSource`.""" conditions = [ PersonTransferJob.job_type == cls.class_job_type, PersonTransferJob.job_id == Job.id, Job._status.is_in(Job.PENDING_STATUSES)] arg_conditions = [] if person: arg_conditions.append(PersonTransferJob.major_person == person) conditions.extend(arg_conditions) return DecoratedResultSet( IStore(PersonTransferJob).find( PersonTransferJob, *conditions), cls) @property def person(self): """See `IPersonMergeJob`.""" return self.major_person @property def log_name(self): return self.__class__.__name__ def getErrorRecipients(self): """See `IPersonMergeJob`.""" return [format_address_for_person(self.person)] def run(self): """Perform the merge.""" from lp.services.scripts import log person_name = self.person.name log.debug('about to deactivate ~%s', person_name) self.person.deactivate(validate=False, pre_deactivate=False) log.debug('done deactivating ~%s', person_name) def __repr__(self): return ( "<{self.__class__.__name__} to deactivate " "~{self.person.name}").format(self=self) def getOperationDescription(self): return 'deactivating ~%s' % self.person.name
class GenerateIncrementalDiffJob(BranchMergeProposalJobDerived): """A job to generate an incremental diff for a branch merge proposal. Provides class methods to create and retrieve such jobs. """ implements(IGenerateIncrementalDiffJob) classProvides(IGenerateIncrementalDiffJobSource) class_job_type = BranchMergeProposalJobType.GENERATE_INCREMENTAL_DIFF task_queue = 'bzrsyncd_job' config = config.IBranchMergeProposalJobSource def acquireLease(self, duration=600): return self.job.acquireLease(duration) def run(self): revision_set = getUtility(IRevisionSet) old_revision = revision_set.getByRevisionId(self.old_revision_id) new_revision = revision_set.getByRevisionId(self.new_revision_id) with server(get_ro_server(), no_replace=True): self.branch_merge_proposal.generateIncrementalDiff( old_revision, new_revision) @classmethod def create(cls, merge_proposal, old_revision_id, new_revision_id): metadata = cls.getMetadata(old_revision_id, new_revision_id) return cls._create(merge_proposal, metadata) @staticmethod def getMetadata(old_revision_id, new_revision_id): return { 'old_revision_id': old_revision_id, 'new_revision_id': new_revision_id, } @property def old_revision_id(self): """The old revision id for the diff.""" return self.metadata['old_revision_id'] @property def new_revision_id(self): """The new revision id for the diff.""" return self.metadata['new_revision_id'] def getOopsVars(self): """See `IRunnableJob`.""" vars = BranchMergeProposalJobDerived.getOopsVars(self) vars.extend([ ('old_revision_id', self.metadata['old_revision_id']), ('new_revision_id', self.metadata['new_revision_id']), ]) return vars def getOperationDescription(self): return ('generating an incremental diff for a merge proposal') def getErrorRecipients(self): """Return a list of email-ids to notify about user errors.""" registrant = self.branch_merge_proposal.registrant return format_address_for_person(registrant)
class Twitter(Item, Plugin): classProvides(IPlugin, IEridanusPluginProvider, IAmbientEventObserver) dummy = integer() def displayResults(self, results, source): source.reply(u'; '.join(results)) def snarfStatusIDs(self, text): """ Find Twitter status URLs in a line of text extract the status IDs. """ for url in iriparse.parseURLs(text): id = twitter.extractStatusIDFromURL(url) if id is not None: yield id def snarfURLs(self, source, text): """ Find Twitter status URLs in a line of text and display information about the status. """ for id in self.snarfStatusIDs(text): d = twitter.query('statuses/show', id) d.addCallback(self.formatStatus) d.addCallback(source.notice) d.addErrback(lambda f: None) yield d def formatUserInfo(self, user): """ Format a user info LXML C{ObjectifiedElement}. """ for key, value in twitter.formatUserInfo(user): yield '\002%s\002: %s' % (key, value) def formatStatus(self, *a, **kw): """ Format a status LXML C{ObjectifiedElement}. """ parts = twitter.formatStatus(*a, **kw) if parts.get('reply'): parts['reply'] = u' (in reply to #%(reply)s)' % parts else: parts['reply'] = u'' return u'\002%(name)s\002%(reply)s: %(text)s (posted %(timestamp)s)' % parts def formatResults(self, results): """ Format Twitter search results. """ for entry in results.entry: link = entry.find( '{http://www.w3.org/2005/Atom}link[@type="text/html"]') yield u'\002%s\002 by \002%s\002: <%s>' % (truncate( entry.title.text, 30), entry.author.name, link.get('href')) @usage(u'status <id>') def cmd_status(self, source, id): """ Retrieve a status by ID. """ d = twitter.query('statuses/show', id) d.addCallback(self.formatStatus) return d.addCallback(source.reply) @usage(u'user <nameOrID>') def cmd_user(self, source, nameOrID): """ Retrieve user information for a screen name or user ID. """ d = twitter.query('users/show', nameOrID) d.addCallback(self.formatUserInfo) d.addCallback(self.displayResults, source) return d @rest @usage(u'search <term>') def cmd_search(self, source, term): """ Search Twitter. For more information about search operators see <http://search.twitter.com/operators>. """ d = twitter.search(term) d.addCallback(self.formatResults) d.addCallback(self.displayResults, source) return d @usage(u'recent <nameOrID> [limit]') def cmd_recent(self, source, nameOrID, limit=3): """ Retrieve recent statuses (defaulting to 3) for a screen name or user ID. """ d = twitter.query('statuses/user_timeline', nameOrID, count=limit) @d.addCallback def displayStatuses(timeline): map(source.reply, map(self.formatStatus, timeline.status)) return d @usage(u'conversation <id_or_url> [limit]') def cmd_conversation(self, source, idOrURL, limit=None): """ Retrieve a Twitter conversation. The ID or URL of the latest tweet in the thread should be used, the conversation is followed backwards until the beginning or <limit>. """ def displayStatuses(statuses): formatStatus = partial(self.formatStatus, includeReplyTo=False) map(source.notice, map(formatStatus, reversed(statuses))) ids = list(self.snarfStatusIDs(idOrURL)) if not ids: ids = [idOrURL] d = twitter.conversation(ids[0], limit) d.addCallback(displayStatuses) return d cmd_convo = alias(cmd_conversation, 'cmd_convo') # IAmbientEventObserver def publicMessageReceived(self, source, text): return gatherResults(list(self.snarfURLs(source, text)))
class OrderSection(object): classProvides(ISectionBlueprint) implements(ISection) def __init__(self, transmogrifier, name, options, previous): self.every = int(options.get('every', 1000)) self.previous = previous self.context = transmogrifier.context self.pathkey = defaultMatcher(options, 'path-key', name, 'path') self.poskey = defaultMatcher(options, 'pos-key', name, 'gopip') # Position of items without a position value self.default_pos = int(options.get('default-pos', 1000000)) def __iter__(self): # Store positions in a mapping containing an id to position mapping for # each parent path {parent_path: {item_id: item_pos}}. positions_mapping = {} for item in self.previous: keys = item.keys() pathkey = self.pathkey(*keys)[0] poskey = self.poskey(*keys)[0] if not (pathkey and poskey): yield item continue item_id = item[pathkey].split('/')[-1] parent_path = '/'.join(item[pathkey].split('/')[:-1]) if parent_path not in positions_mapping: positions_mapping[parent_path] = {} positions_mapping[parent_path][item_id] = item[poskey] yield item # Set positions on every parent for path, positions in positions_mapping.items(): # Normalize positions ordered_keys = sorted(positions.keys(), key=lambda x: positions[x]) normalized_positions = {} for pos, key in enumerate(ordered_keys): normalized_positions[key] = pos path = safe_unicode(path.lstrip('/')).encode('ascii') parent = traverse(self.context, path, None) if not parent: continue parent_base = aq_base(parent) if hasattr(parent_base, 'getOrdering'): ordering = parent.getOrdering() # Only DefaultOrdering of p.folder is supported if (not hasattr(ordering, '_order') and not hasattr(ordering, '_pos')): continue order = ordering._order() pos = ordering._pos() order.sort(key=lambda x: normalized_positions.get( x, pos.get(x, self.default_pos))) for i, id_ in enumerate(order): pos[id_] = i notifyContainerModified(parent)
class Properties(object): """ """ classProvides(ISectionBlueprint) implements(ISection) def __init__(self, transmogrifier, name, options, previous): self.transmogrifier = transmogrifier self.name = name self.options = options self.previous = previous self.context = transmogrifier.context if 'path-key' in options: pathkeys = options['path-key'].splitlines() else: pathkeys = defaultKeys(options['blueprint'], name, 'path') self.pathkey = Matcher(*pathkeys) if 'properties-key' in options: propertieskeys = options['properties-key'].splitlines() else: propertieskeys = defaultKeys(options['blueprint'], name, 'properties') self.propertieskey = Matcher(*propertieskeys) def __iter__(self): for item in self.previous: pathkey = self.pathkey(*item.keys())[0] propertieskey = self.propertieskey(*item.keys())[0] if not pathkey or not propertieskey or \ propertieskey not in item: # not enough info yield item continue path = safe_unicode(item[pathkey].lstrip('/')).encode('ascii') obj = traverse(self.context, path, None) if obj is None: # path doesn't exist yield item continue if not getattr(aq_base(obj), '_setProperty', False): yield item continue # Bugfix > Set exclude_from_nav (Plone 5) if excludeFromNav (Plone 4) is True try: if item['excludeFromNav']: obj.exclude_from_nav = True except: pass # Bugfix > set start & end date in Event object Plone 4 > Plone 5 # Convert all datetime timezone in UTC+0 to avoid hours change try: start = item['startDate'] start = parser.parse(start).replace( tzinfo=pytz.timezone('UTC')) end = item['endDate'] end = parser.parse(end).replace(tzinfo=pytz.timezone('UTC')) if start and end: obj.start = start obj.end = end except: pass # Bugfix > effective_date and expiration_date field. If keys doesn't exists (e.g. effective_date in Plone 4) # or if var is in CamelCase (e.g. expirationDate in Plone 4) keys = item.keys() if 'effectiveDate' in keys: # Bugfix > Convert string (<type 'unicode'>) in DateTime object effective_date = item['effectiveDate'] if effective_date: effective_date = DateTime(effective_date) obj.effective_date = effective_date if not 'effective_date' in keys and not 'effectiveDate' in keys: # Bugfix > Convert string (<type 'unicode'>) in DateTime object creation_date = item['creation_date'] if creation_date: creation_date = DateTime(creation_date) obj.effective_date = creation_date if 'expirationDate' in keys: # Bugfix > Convert string (<type 'unicode'>) in DateTime object expiration_date = item['expirationDate'] if expiration_date: expiration_date = DateTime(expiration_date) obj.expiration_date = expiration_date # Bugfix > Convert Lineage child site in Subsite Dexterity object # Need to create a new Dexterity object called - Sub Site (subsite) portal_types = self.context.portal_types.listContentTypes() if item['_type'] == 'Folder': if 'collective.lineage.interfaces.IChildSite' in item[ '_directly_provided']: dxt_obj_id = 'subsite' if dxt_obj_id in portal_types: obj.portal_type = dxt_obj_id else: logger.error( "Unable to import a Lineage child site. Please add a new Dexterity Folder type with id 'subsite' and select 1. Folder Addable Constrains 2. Layout support 3. Navigation root in Behavior tab " ) raise for pid, pvalue, ptype in item[propertieskey]: if getattr(aq_base(obj), pid, None) is not None: # if object have a attribute equal to property, do nothing continue # Bugfix > plone default_page must be a string, got (<type 'unicode'>) if pid == 'default_page': pvalue = str(pvalue) try: if obj.hasProperty(pid): obj._updateProperty(pid, pvalue) else: obj._setProperty(pid, pvalue, ptype) except ConflictError: raise except Exception as e: raise Exception('Failed to set property "%s" type "%s"' ' to "%s" at object %s. ERROR: %s' % (pid, ptype, pvalue, str(obj), str(e))) logger.info("object creation %s" % (obj.absolute_url_path())) yield item
class XlsSource(object): classProvides(ISectionBlueprint) implements(ISection) def __init__(self, transmogrifier, name, options, previous): self.previous = previous self.options = options self.path = resolvePackageReferenceOrFile(options['directory']) if self.path is None or not os.path.isdir(self.path): raise IOError('Directory does not exists: {}'.format(self.path)) def __iter__(self): for item in self.previous: yield item files = sorted(os.listdir(self.path)) for repo_num, filename in enumerate(reversed(files)): if not self.is_parsable(filename): continue xls_path = os.path.join(self.path, filename) if not os.path.isfile(xls_path): continue locals()['__traceback_info__'] = xls_path repository_id, extension = os.path.splitext(filename) keys, sheet_data = self.read_excel_file(xls_path) for rownum, row in enumerate(sheet_data): yield self.process_row(row, rownum, keys, repository_id) def is_parsable(self, filename): """Return wheter we should attempt to parse the file based on its filename. Microsoft office crates temp files in the same directory and prefixes them with a tilde. """ if filename.startswith('.') or filename.startswith('~'): return False if not filename.endswith('.xlsx'): return False return True def read_excel_file(self, xls_path): tables = xlrd_xls2array(xls_path) repository_table = tables[0] sheet_data = repository_table['sheet_data'] # clean up table sheet_data = sheet_data[4:] # remove human readable stuff keys = sheet_data[0] del sheet_data[0] return keys, sheet_data def process_row(self, row, rownum, keys, repository_id): data = {} # repofolder or reporoot if rownum == 0: data['_type'] = u'opengever.repository.repositoryroot' else: data['_type'] = u'opengever.repository.repositoryfolder' data['_repo_root_id'] = repository_id for colnum, cell in enumerate(row): key = keys[colnum] if key in (None, '', u''): continue if key in ( 'classification', 'privacy_layer', 'public_trial', 'retention_period', 'custody_period', 'archival_value', ) and cell in (None, '', u''): continue if key == 'reference_number' and not isinstance(cell, basestring): raise Exception("Reference number has to be string: %s" % cell) if key in ('valid_from', 'valid_until') and cell in ('', u''): cell = None if key == 'addable_dossier_types': cell = cell.replace(' ', '').split(',') cell = [t for t in cell if not t == ''] if key in MAPPED_FIELDS.keys(): mapping = MAPPED_FIELDS[key] # Data is already a valid term if cell in mapping.values(): continue cell = mapping[cell] data[key] = cell return data
class ReportSection(object): """Create import reports for the current OGGBundle. """ classProvides(ISectionBlueprint) implements(ISection) def __init__(self, transmogrifier, name, options, previous): self.previous = previous self.context = transmogrifier.context self.bundle = IAnnotations(transmogrifier)[BUNDLE_KEY] self.report_dir = None def __iter__(self): for item in self.previous: yield item log.info("Creating import reports...") self.report_dir = self.create_report_dir() self.store_as_json(self.bundle.errors, 'errors.json') self.store_as_json(self.bundle.stats, 'stats.json') report_data = DataCollector(self.bundle)() self.bundle.report_data = report_data self.build_ascii_summary(self.bundle) self.build_xlsx_main_report(self.bundle) self.build_xlsx_validation_report(self.bundle) def create_report_dir(self): """Create a directory to store all import report files. In a real invocation, this will be created inside the instance's var/ directory (no git-pollution, variable data where it belongs). During tests, a temporary directory will be created. """ ts = datetime.now().strftime('%Y-%m-%d_%H-%M-%S') dirname = 'import-report-%s' % ts try: report_dir = os.path.join(PathFinder().var, dirname) try: os.makedirs(report_dir) except OSError: # Already exists pass except RuntimeError: # During tests report_dir = tempfile.mkdtemp(prefix=dirname) return report_dir def store_as_json(self, data, filename): """Store raw migration stats or errors as JSON files in report dir. """ json_path = os.path.join(self.report_dir, filename) with open(json_path, 'w') as json_file: advancedjson.dump(data, json_file, sort_keys=True, indent=4, separators=(',', ': ')) log.info('Stored %s' % json_path) def build_ascii_summary(self, bundle): summary = ASCIISummaryBuilder(bundle).build() log.info('\n\n%s\n' % summary) def build_xlsx_main_report(self, bundle): report_path = os.path.join(self.report_dir, 'main-report.xlsx') builder = XLSXMainReportBuilder(bundle) builder.build_and_save(report_path) def build_xlsx_validation_report(self, bundle): report_path = os.path.join(self.report_dir, 'validation-report.xlsx') builder = XLSXValidationReportBuilder(bundle) builder.build_and_save(report_path)
class RosettaUploadJob(BranchJobDerived): """A Job that uploads translation files to Rosetta.""" implements(IRosettaUploadJob) classProvides(IRosettaUploadJobSource) class_job_type = BranchJobType.ROSETTA_UPLOAD task_queue = 'bzrsyncd_job' config = config.IRosettaUploadJobSource def __init__(self, branch_job): super(RosettaUploadJob, self).__init__(branch_job) self.template_file_names = [] self.template_files_changed = [] self.translation_file_names = [] self.translation_files_changed = [] @staticmethod def getMetadata(from_revision_id, force_translations_upload): return { 'from_revision_id': from_revision_id, 'force_translations_upload': force_translations_upload, } @property def from_revision_id(self): return self.metadata['from_revision_id'] @property def force_translations_upload(self): return self.metadata['force_translations_upload'] @classmethod def providesTranslationFiles(cls, branch): """See `IRosettaUploadJobSource`.""" productseries = getUtility( IProductSeriesSet).findByTranslationsImportBranch(branch) return not productseries.is_empty() @classmethod def create(cls, branch, from_revision_id, force_translations_upload=False): """See `IRosettaUploadJobSource`.""" if branch is None: return None if from_revision_id is None: from_revision_id = NULL_REVISION if force_translations_upload or cls.providesTranslationFiles(branch): metadata = cls.getMetadata(from_revision_id, force_translations_upload) branch_job = BranchJob(branch, BranchJobType.ROSETTA_UPLOAD, metadata) job = cls(branch_job) job.celeryRunOnCommit() return job else: return None def _iter_all_lists(self): """Iterate through all the file lists. File names and files are stored in different lists according to their type (template or translation). But some operations need to be performed on both lists. This generator yields a pair of lists, one containing all file names for the given type, the other containing all file names *and* content of the changed files. """ yield (self.template_file_names, self.template_files_changed) yield (self.translation_file_names, self.translation_files_changed) def _iter_lists_and_uploaders(self, productseries): """Iterate through all files for a productseries. File names and files are stored in different lists according to their type (template or translation). Which of these are needed depends on the configuration of the product series these uploads are for. This generator checks the configuration of the series and produces the a lists of lists and a person object. The first list contains all file names or the given type, the second contains all file names *and* content of the changed files. The person is who is to be credited as the importer of these files and will vary depending on the file type. """ if (productseries.translations_autoimport_mode in (TranslationsBranchImportMode.IMPORT_TEMPLATES, TranslationsBranchImportMode.IMPORT_TRANSLATIONS) or self.force_translations_upload): # yield (self.template_file_names, self.template_files_changed, self._uploader_person_pot(productseries)) if (productseries.translations_autoimport_mode == TranslationsBranchImportMode.IMPORT_TRANSLATIONS or self.force_translations_upload): # yield (self.translation_file_names, self.translation_files_changed, self._uploader_person_po(productseries)) @property def file_names(self): """A contatenation of all lists of filenames.""" return self.template_file_names + self.translation_file_names def _init_translation_file_lists(self): """Initialize the member variables that hold the information about the relevant files. The information is collected from the branch tree and stored in the following member variables: * file_names is a dictionary of two lists ('pot', 'po') of file names that are POT or PO files respectively. This includes all files, changed or unchanged. * changed_files is a dictionary of two lists ('pot', 'po') of tuples of (file_name, file_content) of all changed files that are POT or PO files respectively. """ bzrbranch = self.branch.getBzrBranch() from_tree = bzrbranch.repository.revision_tree(self.from_revision_id) to_tree = bzrbranch.repository.revision_tree( self.branch.last_scanned_id) importer = TranslationImporter() to_tree.lock_read() try: for dir, files in to_tree.walkdirs(): for afile in files: file_path, file_name, file_type = afile[:3] if file_type != 'file': continue if importer.isHidden(file_path): continue if importer.isTemplateName(file_name): append_to = self.template_file_names elif importer.isTranslationName(file_name): append_to = self.translation_file_names else: continue append_to.append(file_path) from_tree.lock_read() try: for file_names, changed_files in self._iter_all_lists(): for changed_file in to_tree.iter_changes( from_tree, specific_files=file_names): (from_kind, to_kind) = changed_file[6] if to_kind != 'file': continue file_id, (from_path, to_path) = changed_file[:2] changed_files.append( (to_path, to_tree.get_file_text(file_id))) finally: from_tree.unlock() finally: to_tree.unlock() def _uploader_person_pot(self, series): """Determine which person is the uploader for a pot file.""" # Default uploader is the driver or owner of the series. uploader = series.driver if uploader is None: uploader = series.owner return uploader def _uploader_person_po(self, series): """Determine which person is the uploader for a po file.""" # For po files, try to determine the author of the latest push. uploader = None revision = self.branch.getTipRevision() if revision is not None and revision.revision_author is not None: uploader = revision.revision_author.person if uploader is None: uploader = self._uploader_person_pot(series) return uploader def run(self): """See `IRosettaUploadJob`.""" with server(get_ro_server(), no_replace=True): # This is not called upon job creation because the branch would # neither have been mirrored nor scanned then. self._init_translation_file_lists() # Get the product series that are connected to this branch and # that want to upload translations. productseriesset = getUtility(IProductSeriesSet) productseries = productseriesset.findByTranslationsImportBranch( self.branch, self.force_translations_upload) translation_import_queue = getUtility(ITranslationImportQueue) for series in productseries: approver = TranslationBranchApprover(self.file_names, productseries=series) for iter_info in self._iter_lists_and_uploaders(series): file_names, changed_files, uploader = iter_info for upload_file_name, upload_file_content in changed_files: if len(upload_file_content) == 0: continue # Skip empty files entry = translation_import_queue.addOrUpdateEntry( upload_file_name, upload_file_content, True, uploader, productseries=series) approver.approve(entry) @staticmethod def iterReady(): """See `IRosettaUploadJobSource`.""" jobs = IMasterStore(BranchJob).using(BranchJob, Job, Branch).find( (BranchJob), And(BranchJob.job_type == BranchJobType.ROSETTA_UPLOAD, BranchJob.job == Job.id, BranchJob.branch == Branch.id, Branch.last_mirrored_id == Branch.last_scanned_id, Job.id.is_in(Job.ready_jobs))).order_by(BranchJob.id) return (RosettaUploadJob(job) for job in jobs) @staticmethod def findUnfinishedJobs(branch, since=None): """See `IRosettaUploadJobSource`.""" store = IMasterStore(BranchJob) match = And(Job.id == BranchJob.jobID, BranchJob.branch == branch, BranchJob.job_type == BranchJobType.ROSETTA_UPLOAD, Job._status != JobStatus.COMPLETED, Job._status != JobStatus.FAILED) if since is not None: match = And(match, Job.date_created > since) jobs = store.using(BranchJob, Job).find((BranchJob), match) return jobs
class Maildir(object): """See `zope.sendmail.interfaces.IMaildir`""" classProvides(IMaildirFactory) implements(IMaildir) def __init__(self, path, create=False): "See `zope.sendmail.interfaces.IMaildirFactory`" self.path = path def access(path): return os.access(path, os.F_OK) subdir_cur = os.path.join(path, 'cur') subdir_new = os.path.join(path, 'new') subdir_tmp = os.path.join(path, 'tmp') if create and not access(path): os.mkdir(path) os.mkdir(subdir_cur) os.mkdir(subdir_new) os.mkdir(subdir_tmp) maildir = True else: maildir = (os.path.isdir(subdir_cur) and os.path.isdir(subdir_new) and os.path.isdir(subdir_tmp)) if not maildir: raise ValueError('%s is not a Maildir folder' % path) def __iter__(self): "See `zope.sendmail.interfaces.IMaildir`" join = os.path.join subdir_cur = join(self.path, 'cur') subdir_new = join(self.path, 'new') # http://www.qmail.org/man/man5/maildir.html says: # "It is a good idea for readers to skip all filenames in new # and cur starting with a dot. Other than this, readers # should not attempt to parse filenames." new_messages = [join(subdir_new, x) for x in os.listdir(subdir_new) if not x.startswith('.')] cur_messages = [join(subdir_cur, x) for x in os.listdir(subdir_cur) if not x.startswith('.')] # Sort by modification time so earlier messages are sent before # later messages during queue processing. msgs_sorted = [(m, os.path.getmtime(m)) for m in new_messages + cur_messages] msgs_sorted.sort(key=lambda x: x[1]) return iter([m[0] for m in msgs_sorted]) def newMessage(self): "See `zope.sendmail.interfaces.IMaildir`" # NOTE: http://www.qmail.org/man/man5/maildir.html says, that the first # step of the delivery process should be a chdir. Chdirs and # threading do not mix. Is that chdir really necessary? join = os.path.join subdir_tmp = join(self.path, 'tmp') subdir_new = join(self.path, 'new') pid = os.getpid() host = socket.gethostname() randmax = 0x7fffffff counter = 0 while True: timestamp = int(time.time()) unique = '%d.%d.%s.%d' % (timestamp, pid, host, random.randrange(randmax)) filename = join(subdir_tmp, unique) try: fd = os.open(filename, os.O_CREAT|os.O_EXCL|os.O_WRONLY, 0600) except OSError, e: if e.errno != errno.EEXIST: raise # File exists counter += 1 if counter >= 1000: raise RuntimeError("Failed to create unique file name" " in %s, are we under a DoS attack?" % subdir_tmp) # NOTE: maildir.html (see above) says I should sleep for 2 time.sleep(0.1) else: break return MaildirMessageWriter(os.fdopen(fd, 'w'), filename, join(subdir_new, unique))
class RemoteSource(object): """ """ name = 'collective.jsonmigrator.remotesource' _options = [ ('remote-url', 'http://127.0.0.1:8080'), ('remote-username', 'admin'), ('remote-password', 'admin'), ('local-path', '/Plone'), ('remote-path', '/Plone'), ('remote-catalog-path', '/Plone/portal_catalog'), ('remote-catalog-query', ''), ('remote-crawl-depth', -1), ('remote-skip-path', ''), ] classProvides(ISectionBlueprint) implements(ISection) def __init__(self, transmogrifier, name, options, previous): self.name, self.options, self.previous = name, options, previous self.transmogrifier = transmogrifier self.context = transmogrifier.context for option, default in self._options: setattr(self, option.replace('-', '_'), self.get_option(option, default)) if type(self.remote_crawl_depth) in [str, unicode]: self.remote_crawl_depth = int(self.remote_crawl_depth) if type(self.remote_skip_path) in [str, unicode]: self.remote_skip_path = self.remote_skip_path.split() if self.remote_catalog_query: self.remote_ok_path = self.get_ok_path() if self.remote_path[-1] == '/': self.remote_path = self.remote_path[:-1] if self.local_path[-1] == '/': self.local_path = self.local_path[:-1] # Load cached data from the given file self.cache = resolvePackageReferenceOrFile(options.get('cache', '')) if self.cache and os.path.exists(self.cache): cache_file = open(self.cache, 'rb') cache = pickle.load(cache_file) cache_file.close() setattr(self, MEMOIZE_PROPNAME, cache) def get_ok_path(self): catalog_query = '?catalog_query=%s' % urllib.quote(encodestring(self.remote_catalog_query)) url = urllib2.urlparse.urljoin(self.remote_url, urllib.quote(self.remote_catalog_path)) + catalog_query remote = Urllibrpc(url, self.remote_username, self.remote_password) try: items = remote.get_catalog_results() except UrllibrpcException, e: logger.error("Failed reading url '%s' with error code %s." % (e.url, e.code)) return set([]) return set(simplejson.loads(items))
class Program(object): implements(IPageTemplateProgram) classProvides(IPageTemplateEngine) # Zope 2 Page Template expressions secure_expression_types = { 'python': UntrustedPythonExpr, 'string': StringExpr, 'not': NotExpr, 'exists': ExistsExpr, 'path': PathExpr, 'provider': ProviderExpr, 'nocall': NocallExpr, } # Zope 3 Page Template expressions expression_types = { 'python': PythonExpr, 'string': StringExpr, 'not': NotExpr, 'exists': ExistsExpr, 'path': TrustedPathExpr, 'provider': ProviderExpr, 'nocall': NocallExpr, } extra_builtins = {'modules': ZRPythonExpr._SecureModuleImporter()} def __init__(self, template): self.template = template def __call__(self, context, macros, tal=True, **options): if tal is False: return self.template.body # Swap out repeat dictionary for Chameleon implementation # and store wrapped dictionary in new variable -- this is # in turn used by the secure Python expression # implementation whenever a 'repeat' symbol is found kwargs = context.vars kwargs['wrapped_repeat'] = kwargs['repeat'] kwargs['repeat'] = RepeatDict(context.repeat_vars) return self.template.render(**kwargs) @classmethod def cook(cls, source_file, text, engine, content_type): if engine is getEngine(): def sanitize(m): match = m.group(1) logger.info( 'skipped "<?python%s?>" code block in ' 'Zope 2 page template object "%s".', match, source_file) return '' text, count = re_match_pi.subn(sanitize, text) if count: logger.warning("skipped %d code block%s (not allowed in " "restricted evaluation scope)." % (count, 's' if count > 1 else '')) expression_types = cls.secure_expression_types else: expression_types = cls.expression_types # BBB: Support CMFCore's FSPagetemplateFile formatting if source_file is not None and source_file.startswith('file:'): source_file = source_file[5:] template = ChameleonPageTemplate( text, filename=source_file, keep_body=True, expression_types=expression_types, encoding='utf-8', extra_builtins=cls.extra_builtins, ) return cls(template), template.macros
class GroupsSource(object): """ >>> from plone.app.vocabularies.tests.base import create_context >>> from plone.app.vocabularies.tests.base import DummyTool >>> context = create_context() >>> tool = DummyTool('acl_users') >>> groups = ('group1', 'group2') >>> def getGroupById(value, default): ... return value in groups and value or default >>> tool.getGroupById = getGroupById >>> def searchGroups(name=None): ... return [dict(groupid=u) for u in groups] >>> tool.searchGroups = searchGroups >>> context.acl_users = tool >>> source = GroupsSource(context) >>> source <plone.app.vocabularies.groups.GroupsSource object at ...> >>> len(source.search('')) 2 >>> len(source.search(u'\xa4')) 2 >>> 'group1' in source, 'noone' in source (True, False) >>> source.get('group1'), source.get('noone') ('group1', None) """ implements(ISource) classProvides(IContextSourceBinder) def __init__(self, context): self.context = context self.users = getToolByName(context, "acl_users") def __contains__(self, value): """Return whether the value is available in this source """ if self.get(value) is None: return False return True def search(self, query): # XXX: For some reason, this doesn't seem to know how to match on # title, only name, and seems to match other random groups if # it's unicode try: name = query.encode('ascii') except UnicodeEncodeError: name = query return [u['groupid'] for u in self.users.searchGroups(name=name)] def get(self, value): return self.users.getGroupById(value, None)
class ReviewRequestedEmailJob(BranchMergeProposalJobDerived): """Send email to the reviewer telling them to review the proposal. Provides class methods to create and retrieve such jobs. """ implements(IReviewRequestedEmailJob) classProvides(IReviewRequestedEmailJobSource) class_job_type = BranchMergeProposalJobType.REVIEW_REQUEST_EMAIL config = config.IBranchMergeProposalJobSource def run(self): """See `IRunnableJob`.""" reason = RecipientReason.forReviewer(self.branch_merge_proposal, True, self.reviewer) mailer = BMPMailer.forReviewRequest(reason, self.branch_merge_proposal, self.requester) mailer.sendAll() @classmethod def create(cls, review_request): """See `IReviewRequestedEmailJobSource`.""" metadata = cls.getMetadata(review_request) bmp = review_request.branch_merge_proposal return cls._create(bmp, metadata) @staticmethod def getMetadata(review_request): return { 'reviewer': review_request.reviewer.name, 'requester': review_request.registrant.name, } @property def reviewer(self): """The person or team who has been asked to review.""" return getUtility(IPersonSet).getByName(self.metadata['reviewer']) @property def requester(self): """The person who requested the review to be done.""" return getUtility(IPersonSet).getByName(self.metadata['requester']) def getOopsVars(self): """See `IRunnableJob`.""" vars = BranchMergeProposalJobDerived.getOopsVars(self) vars.extend([ ('reviewer', self.metadata['reviewer']), ('requester', self.metadata['requester']), ]) return vars def getErrorRecipients(self): """Return a list of email-ids to notify about user errors.""" recipients = [] if self.requester is not None: recipients.append(format_address_for_person(self.requester)) return recipients def getOperationDescription(self): return 'emailing a reviewer requesting a review'
class ManifestExporterSection(object): classProvides(ISectionBlueprint) implements(ISection) def __init__(self, transmogrifier, name, options, previous): self.previous = previous self.context = transmogrifier.context self.entrieskey = defaultMatcher(options, 'entries-key', name, 'entries') self.fileskey = options.get('files-key', '_files').strip() self.doc = minidom.Document() def __iter__(self): for item in self.previous: entrieskey = self.entrieskey(*item.keys())[0] if not entrieskey: yield item continue manifest = self.createManifest(item[entrieskey]) if manifest: item.setdefault('_files', {}) item[self.fileskey]['manifest'] = { 'name': '.objects.xml', 'data': manifest, } yield item def createManifest(self, entries): if not entries: return None doc = self.doc root = doc.createElement('manifest') for obj_id, obj_type in entries: # create record record = doc.createElement('record') # set type attribute attr = doc.createAttribute('type') attr.value = obj_type record.setAttributeNode(attr) # add object id text = doc.createTextNode(obj_id) record.appendChild(text) root.appendChild(record) doc.appendChild(root) try: data = doc.toprettyxml(indent=' ', encoding='utf-8') except UnicodeDecodeError: # all comments are strings encoded in 'utf-8' and they will properly # saved in xml file, but if we explicitly give 'utf-8' encoding # UnicodeDecodeError will be raised when they have non-ascii chars data = doc.toprettyxml(indent=' ') doc.unlink() return data
class A(Odd): classProvides(I1)