Example #1
0
 def _run(self, data):
     logger.info("Start importing profile: " + data['config'])
     Transmogrifier(self.context)(data['config'])
     logger.info("Stop importing profile: " + data['config'])
     IStatusMessage(self.request).addStatusMessage('Migrated: %s' %
                                                   data['config'],
                                                   type='info')
    def handleRun(self, action):
        data, errors = self.extractData()
        if errors:
            return False

        logger.info("Start importing profile: " + data['config'])
        Transmogrifier(self.context)(data['config'])
        logger.info("Stop importing profile: " + data['config'])
Example #3
0
    def handleRun(self, action):
        data, errors = self.extractData()
        if errors:
            return False

        logger.info("Start importing profile: " + data['config'])
        Transmogrifier(self.context)(data['config'])
        logger.info("Stop importing profile: " + data['config'])
    def get_items(self, path, depth=0):
        if path and path[-1] == '/':
            path = path[:-1]
        if self.remote_crawl_depth == -1 or depth <= self.remote_crawl_depth:

            item, subitems = self.get_remote_item(path)

            if item is None:
                logger.warn(':: Skipping -> %s. No remote data.' % path)
                return

            if item.startswith('ERROR'):
                logger.error(
                    "Could not get item '%s' from remote. Got %s." %
                    (path, item))
                return

            try:
                item = json.loads(item)
            except json.JSONDecodeError:
                logger.error(
                    "Could not decode item from path '%s' as JSON." % path)
                return
            logger.info(':: Crawling %s' % item['_path'])

            # item['_path'] is relative to domain root. we need relative to
            # plone root
            remote_url = self.remote_url
            _, _, remote_path, _, _, _ = urlparse.urlparse(remote_url)
            item['_path'] = item['_path'][len(remote_path):]
            if item['_path'].startswith('/'):
                item['_path'] = item['_path'][1:]

            if item['_type'] == "Plone Site":
                pass
            else:
                yield item

            if subitems.startswith('ERROR'):
                logger.error(
                    "Could not get subitems for '%s'. Got %s." %
                    (path, subitems))
                return

            for subitem_id in json.loads(subitems):
                subitem_path = path + '/' + subitem_id

                if subitem_path[len(self.remote_path):]\
                        in self.remote_skip_path:
                    logger.info(':: Skipping -> ' + subitem_path)
                    continue

                for subitem in self.get_items(subitem_path, depth + 1):
                    yield subitem
Example #5
0
    def get_items(self, path, depth=0):
        if path and path[-1] == '/':
            path = path[:-1]
        if self.remote_crawl_depth == -1 or depth <= self.remote_crawl_depth:

            item, subitems = self.get_remote_item(path)

            if item is None:
                logger.warn(':: Skipping -> %s. No remote data.' % path)
                return

            if item.startswith(b'ERROR'):
                logger.error("Could not get item '%s' from remote. Got %s." %
                             (path, item))
                return

            try:
                item = json.loads(item)
            except JSONDecodeError:
                logger.error("Could not decode item from path '%s' as JSON." %
                             path)
                return
            logger.info(':: Crawling %s' % item['_path'])

            # item['_path'] is relative to domain root. we need relative to
            # plone root
            remote_url = self.remote_url
            _, _, remote_path, _, _, _ = urllib.parse.urlparse(remote_url)
            item['_path'] = item['_path'][len(remote_path):]
            if item['_path'].startswith('/'):
                item['_path'] = item['_path'][1:]

            if item['_type'] == "Plone Site":
                pass
            else:
                yield item

            if subitems.startswith(b'ERROR'):
                logger.error("Could not get subitems for '%s'. Got %s." %
                             (path, subitems))
                return

            for subitem_id in json.loads(subitems):
                subitem_path = path + '/' + subitem_id

                if subitem_path[len(self.remote_path):]\
                        in self.remote_skip_path:
                    logger.info(':: Skipping -> ' + subitem_path)
                    continue

                for subitem in self.get_items(subitem_path, depth + 1):
                    yield subitem
    def handleRun(self, action):
        data, errors = self.extractData()
        if errors:
            return False

        logger.info("Start importing profile: " + data["config"])
        rs_options = dict()
        for fieldname, value in data.iteritems():
            optname = fieldname.replace("_", "-")
            if type(value) == list:
                rs_options[optname] = "\n".join(value)
            else:
                rs_options[optname] = str(value)
            # logger.info( "%s(%s)::%s" % (fieldname, optname, rs_options[optname]))

        Transmogrifier(self.context)(data["config"], remotesource=rs_options)
        logger.info("Stop importing profile: " + data["config"])
Example #7
0
    def get_items(self, path, depth=0):
        if path and path[-1] == '/':
            path = path[:-1]

        if self.remote_crawl_depth == -1 or depth <= self.remote_crawl_depth:

            item, subitems = self.get_remote_item(path)

            if item is None:
                logger.warn(':: Skipping -> %s. No remote data.' % path)
                return

            if item.startswith('ERROR'):
                logger.error("Could not get item '%s' from remote. Got %s." % (path, item))
                return

            item = simplejson.loads(item)
            logger.info(':: Crawling %s' % item['_path'])

            if self.local_path:
                item['_path'] = self.local_path + item['_path'][len(self.remote_path):] 
            # item['_path'] is relative to domain root. we need relative to plone root
#            remote_url = self.remote_url
#            _,_,remote_path,_,_,_ = urlparse.urlparse(remote_url)
#            item['_path'] = item['_path'][len(remote_path):]
#            if item['_path'].startswith('/'):
#                item['_path'] = item['_path'][1:]

            if item['_type'] == "Plone Site":
                pass
            else:
                yield item

            if subitems.startswith('ERROR'):
                logger.error("Could not get subitems for '%s'. Got %s." % (path, subitems))
                return

            for subitem_id in simplejson.loads(subitems):
                subitem_path = path + '/' + subitem_id

                if subitem_path[len(self.remote_path):] in self.remote_skip_path:
                    logger.info(':: Skipping -> ' + subitem_path)
                    continue

                if self.remote_catalog_query:            
                    if subitem_path not in self.remote_ok_path:
                        logger.info(':: Skipping (2) -> ' + subitem_path)
                        continue

                for subitem in self.get_items(subitem_path, depth+1):

                    yield subitem
Example #8
0
 def callable():
     scheme, netloc, path, params, query, fragment = urllib.parse.urlparse(
         self.url)
     #if '@' not in netloc:
     #    netloc = '%s:%s@%s' % (self.username, self.password, netloc)
     if path.endswith("/"):
         path = path[:-1]
     path = path + '/' + item
     url = urllib.parse.urlunparse(
         (scheme, netloc, path, params, query, fragment))
     done = False
     TRIES = 0
     while not done:
         try:
             req = urllib.request.Request(url)
             credentials = ('%s:%s' % (self.username, self.password))
             encoded_credentials = base64.b64encode(
                 credentials.encode('ascii'))
             req.add_header(
                 'Authorization',
                 'Basic %s' % encoded_credentials.decode("ascii"))
             f = urllib.request.urlopen(url)
             done = True
         except Exception as e:
             logger.info(f"Exception {e} at {url}")
             print()
             TRIES += 1
             if TRIES > MAX_TRIES:
                 raise e
             else:
                 logger.info("Sleeping...")
                 time.sleep(10.0)
                 logger.info("Trying again.")
     content = f.read()
     if f.getcode() != 200:
         raise UrllibrpcException(f.getcode(), f.geturl())
     f.close()
     return content
    def __iter__(self):
        for item in self.previous:
            pathkey = self.pathkey(*item.keys())[0]
            propertieskey = self.propertieskey(*item.keys())[0]

            if not pathkey or not propertieskey or \
               propertieskey not in item:
                # not enough info
                yield item
                continue

            path = safe_unicode(item[pathkey].lstrip('/')).encode('ascii')
            obj = traverse(self.context, path, None)

            if obj is None:
                # path doesn't exist
                yield item
                continue

            if not getattr(aq_base(obj), '_setProperty', False):
                yield item
                continue

            # Bugfix > Set exclude_from_nav (Plone 5) if excludeFromNav (Plone 4) is True
            try:
                if item['excludeFromNav']:
                    obj.exclude_from_nav = True
            except:
                pass

            # Bugfix > set start & end date in Event object Plone 4 > Plone 5
            # Convert all datetime timezone in UTC+0 to avoid hours change
            try:
                start = item['startDate']
                start = parser.parse(start).replace(
                    tzinfo=pytz.timezone('UTC'))
                end = item['endDate']
                end = parser.parse(end).replace(tzinfo=pytz.timezone('UTC'))
                if start and end:
                    obj.start = start
                    obj.end = end
            except:
                pass

            # Bugfix > effective_date and expiration_date field. If keys doesn't exists (e.g. effective_date in Plone 4)
            # or if var is in CamelCase (e.g. expirationDate in Plone 4)
            keys = item.keys()
            if 'effectiveDate' in keys:
                # Bugfix > Convert string (<type 'unicode'>) in DateTime object
                effective_date = item['effectiveDate']
                if effective_date:
                    effective_date = DateTime(effective_date)
                obj.effective_date = effective_date

            if not 'effective_date' in keys and not 'effectiveDate' in keys:
                # Bugfix > Convert string (<type 'unicode'>) in DateTime object
                creation_date = item['creation_date']
                if creation_date:
                    creation_date = DateTime(creation_date)
                obj.effective_date = creation_date

            if 'expirationDate' in keys:
                # Bugfix > Convert string (<type 'unicode'>) in DateTime object
                expiration_date = item['expirationDate']
                if expiration_date:
                    expiration_date = DateTime(expiration_date)
                obj.expiration_date = expiration_date

            # Bugfix > Convert Lineage child site in Subsite Dexterity object
            # Need to create a new Dexterity object called - Sub Site (subsite)
            portal_types = self.context.portal_types.listContentTypes()
            if item['_type'] == 'Folder':
                if 'collective.lineage.interfaces.IChildSite' in item[
                        '_directly_provided']:

                    dxt_obj_id = 'subsite'

                    if dxt_obj_id in portal_types:
                        obj.portal_type = dxt_obj_id
                    else:
                        logger.error(
                            "Unable to import a Lineage child site. Please add a new Dexterity Folder type with id 'subsite' and select 1. Folder Addable Constrains 2. Layout support 3. Navigation root in Behavior tab "
                        )
                        raise

            for pid, pvalue, ptype in item[propertieskey]:
                if getattr(aq_base(obj), pid, None) is not None:
                    # if object have a attribute equal to property, do nothing
                    continue

                # Bugfix > plone default_page must be a string, got (<type 'unicode'>)
                if pid == 'default_page':
                    pvalue = str(pvalue)

                try:
                    if obj.hasProperty(pid):
                        obj._updateProperty(pid, pvalue)
                    else:
                        obj._setProperty(pid, pvalue, ptype)
                except ConflictError:
                    raise
                except Exception as e:
                    raise Exception('Failed to set property "%s" type "%s"'
                                    ' to "%s" at object %s. ERROR: %s' %
                                    (pid, ptype, pvalue, str(obj), str(e)))

            logger.info("object creation %s" % (obj.absolute_url_path()))

            yield item
 def _run(self, data):
     logger.info("Start importing profile: " + data['config'])
     Transmogrifier(self.context)(data['config'])
     logger.info("Stop importing profile: " + data['config'])
     IStatusMessage(self.request).addStatusMessage(
         'Migrated: %s' % data['config'], type='info')