Example #1
0
def _new_user(args):
    """Handle adding a new user to the system.

    If you don't include the required info, it will prompt you for it

    """
    if not args.username:
        args.username = raw_input('username? ')

    if not args.email:
        args.email = raw_input('email address? ')

    if not args.username or not args.email:
        raise Exception('Must supply a username and email address')

    import transaction
    _init_sql(args)
    from bookie.models import DBSession
    sess = DBSession()

    u = User()
    u.username = unicode(args.username)
    passwd = get_random_word(8)
    u.password = passwd
    u.email = unicode(args.email)
    u.activated = True
    u.is_admin = False
    u.api_key = User.gen_api_key()

    print dict(u)
    print passwd

    sess.add(u)
    sess.flush()
    transaction.commit()
Example #2
0
 def publish(self):
     """Publish each queued object and empty queue"""
     _return = []
     while self.context:
         try:
             item = self.context.pull()
             transaction.commit()
         except ConflictError: # queue concurrency exception...expected
             if logger.getEffectiveLevel() == logging.DEBUG:
                 logger.exception("ConflictError while publishing queue, " +\
                     "transaction aborted.  This error is an expected " +\
                     "runtime condition and does not necessarily " +\
                     "indicate an application issue")
             transaction.abort()
             """If we plan on reusing this database session we must create a new transaction"""
             self.connection.newTransaction()
             continue # skip to next loop
         # TODO: Add tests for re-queing on publishing errors
         try:
             IPublisher(item).publish()
             _return.append(item)
         except RecoverablePublishingError:
             if item:
                 self.enqueue(item) # add item back into queue for publishing exceptions
             logger.exception("A recoverable publishing error has occured "+\
                              "for queued item %s.  The item will be " +\
                              "added back in the publishing queue." )
     notify(PublisherQueuePublishedEvent(self))
     return _return
 def test_logged_in_no_skipcaptcha(self):
     self.portal.myfolder._Naaya___Skip_Captcha_Permission = tuple()
     transaction.commit()
     self.browser_do_login('contributor', 'contributor')
     self.browser.go('http://localhost/portal/myfolder/url_add_html')
     self.assertTrue(captcha_in_html(self.browser.get_html()))
     self.browser_do_logout()
Example #4
0
File: app.py Project: dmdm/PySite
 def add_fixtures(self):
     from pysite.models import DbSession
     from pprint import pprint
     sess = DbSession()
     transaction.begin()
     try:
         # Add in this sequence
         for g in self.__class__.FIXT_GROUPS:
             data = self.fixtures[g]
             print("***", g)
             for it in data:
                 it['owner'] = UNIT_TESTER_UID
                 pprint(it)
                 if g == 'roles':
                     usrmanager.add_role(it)
                 elif g == 'principals':
                     usrmanager.add_principal(it)
                 elif g == 'vmail_domains':
                     vmailmanager.add_domain(it)
                 elif g == 'vmail_mailboxes':
                     vmailmanager.add_mailbox(it)
                 elif g == 'vmail_aliases':
                     vmailmanager.add_alias(it)
                 else:
                     raise Exception("Unknown fixture group: '{0}'".format(
                         g))
         transaction.commit()
     except Exception as e:
         transaction.abort()
         raise e
Example #5
0
def _import_users(context: IResource, registry: Registry, filename: str):
    users_info = _load_users_info(filename)
    users_info = [_normalize_user_info(u) for u in users_info]
    users = find_service(context, 'principals', 'users')
    groups = find_service(context, 'principals', 'groups')
    for user_info in users_info:
        user_by_name, user_by_email = _locate_user(user_info,
                                                   context,
                                                   registry)
        if user_by_name or user_by_email:
            logger.info('Updating user {} ({})'.format(user_info['name'],
                                                       user_info['email']))
            _update_user(user_by_name, user_by_email, user_info, groups)
        else:
            logger.info('Creating user {}'.format(user_info['name']))
            send_invitation = user_info.get('send_invitation_mail', False)
            activate = not send_invitation
            user = _create_user(user_info, users, registry, groups,
                                activate=activate)
            if send_invitation:
                logger.info('Sending invitation mail to {}'.format(user.name))
                _send_invitation_mail(user, user_info, registry)
            badge_names = user_info.get('badges', [])
            if badge_names:
                logger.info('Assign badge for user {}'.format(user.name))
                badges = _create_badges(user, badge_names, registry)
                _assign_badges(user, badges, registry)
    transaction.commit()
Example #6
0
    def checkPackWhileReferringObjectChanges(self):
        # Packing should not remove objects referenced by an
        # object that changes during packing.
        db = DB(self._storage)
        try:
            # add some data to be packed
            c = db.open()
            root = c.root()
            child = PersistentMapping()
            root['child'] = child
            transaction.commit()
            expect_oids = [child._p_oid]

            def inject_changes():
                # Change the database just after the list of objects
                # to analyze has been determined.
                child2 = PersistentMapping()
                root['child2'] = child2
                transaction.commit()
                expect_oids.append(child2._p_oid)

            adapter = self._storage._adapter
            adapter.packundo.on_filling_object_refs = inject_changes
            packtime = time.time()
            self._storage.pack(packtime, referencesf)

            # "The on_filling_object_refs hook should have been called once")
            self.assertEqual(len(expect_oids), 2, expect_oids)

            # Both children should still exist.
            self._storage.load(expect_oids[0], '')
            self._storage.load(expect_oids[1], '')
        finally:
            db.close()
Example #7
0
 def inject_changes():
     # Change the database just after the list of objects
     # to analyze has been determined.
     child2 = PersistentMapping()
     root['child2'] = child2
     transaction.commit()
     expect_oids.append(child2._p_oid)
Example #8
0
    def checkAutoReconnectOnSync(self):
        # Verify auto-reconnect.
        db = DB(self._storage)
        try:
            c1 = db.open()
            r = c1.root()

            c1._storage._load_conn.close()
            c1._storage.sync()
            # ZODB5 calls sync when a connection is opened. Our monkey
            # patch on a Connection makes sure that works in earlier
            # versions, but we don't have that patch on ZODB5. So test
            # the storage directly. NOTE: The load connection must be open.
            # to trigger the actual sync.

            r = c1.root()
            r['alpha'] = 1
            transaction.commit()
            c1.close()

            c1._storage._load_conn.close()
            c1._storage._store_conn.close()

            c2 = db.open()
            self.assertIs(c2, c1)

            r = c2.root()
            self.assertEqual(r['alpha'], 1)
            r['beta'] = PersistentMapping()
            c2.add(r['beta'])
            transaction.commit()
            c2.close()
        finally:
            db.close()
Example #9
0
    def checkPackBatchLockNoWait(self):
        # Exercise the code in the pack algorithm that attempts to get the
        # commit lock but will sleep if the lock is busy.
        self._storage = self.make_storage(pack_batch_timeout=0)

        adapter = self._storage._adapter
        test_conn, test_cursor = adapter.connmanager.open()

        slept = []
        def sim_sleep(seconds):
            slept.append(seconds)
            adapter.locker.release_commit_lock(test_cursor)
            test_conn.rollback()
            adapter.connmanager.close(test_conn, test_cursor)

        db = DB(self._storage)
        try:
            # add some data to be packed
            c = db.open()
            r = c.root()
            r['alpha'] = PersistentMapping()
            transaction.commit()
            del r['alpha']
            transaction.commit()

            # Pack, with a commit lock held
            now = packtime = time.time()
            while packtime <= now:
                packtime = time.time()
            adapter.locker.hold_commit_lock(test_cursor)
            self._storage.pack(packtime, referencesf, sleep=sim_sleep)

            self.assertTrue(len(slept) > 0)
        finally:
            db.close()
Example #10
0
    def test_officeconnector_reauth_does_not_checkout(self, browser):
        api.portal.set_registry_record(
            'direct_checkout_and_edit_enabled',
            True,
            interface=IOfficeConnectorSettings)
        transaction.commit()

        # We cannot freeze time due to the test browser being threaded
        oc_url = create_oc_url(
            self.doc1.REQUEST, self.doc1, {'action': 'checkout'})
        decoded_oc_url = jwt.decode(oc_url.split(':')[-1], verify=False)

        redirector_js = browser.login().open(
            self.doc1,
            view='checkout_documents'
            '?_authenticator={}&mode=external&reauth=1'
            .format(createToken()),
            ).css('script.redirector')[0].text

        tokens_from_js = [token for token in redirector_js.split("\'")
                          if 'oc:' in token]

        self.assertEqual(3, len(tokens_from_js))

        parsed_oc_url = tokens_from_js[0]
        decoded_parsed_oc_url = jwt.decode(
            parsed_oc_url.split(':')[-1], verify=False)

        # Take out the timestamps
        del decoded_oc_url['exp']
        del decoded_parsed_oc_url['exp']

        self.assertEqual(decoded_oc_url, decoded_parsed_oc_url)
        self.assertFalse(self.get_manager(self.doc1).is_checked_out_by_current_user())
Example #11
0
    def checkAutoReconnect(self):
        # Verify auto-reconnect
        db = DB(self._storage)
        try:
            c1 = db.open()
            r = c1.root()
            r['alpha'] = 1
            transaction.commit()
            c1.close()

            c1._storage._load_conn.close()
            c1._storage._store_conn.close()
            # ZODB5 implicitly calls sync
            # immediately when a connection is opened;
            # fake that here for older releases.
            c2 = db.open()
            self.assertIs(c2, c1)
            c2.sync()
            r = c2.root()
            self.assertEqual(r['alpha'], 1)
            r['beta'] = PersistentMapping()
            c2.add(r['beta'])
            transaction.commit()
            c2.close()
        finally:
            db.close()
Example #12
0
    def check_record_iternext(self):

        db = DB(self._storage)
        conn = db.open()
        conn.root()['abc'] = MinPO('abc')
        conn.root()['xyz'] = MinPO('xyz')
        transaction.commit()

        # Ensure it's all on disk.
        db.close()
        self._storage.close()

        self.open()

        key = None
        for x in (b'\000', b'\001', b'\002'):
            oid, tid, data, next_oid = self._storage.record_iternext(key)
            self.assertEqual(oid, (b'\000' * 7) + x)
            key = next_oid
            expected_data, expected_tid = load_current(self._storage, oid)
            self.assertEqual(expected_data, data)
            self.assertEqual(expected_tid, tid)
            if x == b'\002':
                self.assertEqual(next_oid, None)
            else:
                self.assertNotEqual(next_oid, None)
Example #13
0
def bootStrapSubscriber(event):
    """The actual subscriber to the bootstrap IDataBaseOpenedEvent

    Boostrap a Zope3 instance given a database object This first checks if the
    root folder exists and has a site manager.  If it exists, nothing else
    is changed.  If no root folder exists, one is added.
    """

    db, connection, root, root_folder = getInformationFromEvent(event)
    root_created = False

    if root_folder is None:
        root_created = True
        # ugh... we depend on the root folder implementation
        root_folder = rootFolder()
        root[ZopePublication.root_name] = root_folder
        if not ISite.providedBy(root_folder):
            site_manager = site.LocalSiteManager(root_folder)
            root_folder.setSiteManager(site_manager)

        transaction.commit()

    connection.close()

    zope.event.notify(interfaces.DatabaseOpenedWithRoot(db))
    def __call__(self, name, content_type, data):
        ctr = getToolByName(self.context, 'content_type_registry')
        type_ = ctr.findTypeName(name.lower(), '', '') or 'File'

        # XXX: quick fix for german umlauts
        name = name.decode("utf8")

        normalizer = getUtility(IFileNameNormalizer)
        chooser = INameChooser(self.context)

        # otherwise I get ZPublisher.Conflict ConflictErrors
        # when uploading multiple files
        upload_lock.acquire()

        # this should fix #8
        newid = chooser.chooseName(normalizer.normalize(name),
                                   self.context.aq_parent)
        try:
            transaction.begin()
            obj = ploneutils._createObjectByType(type_,
                                                 self.context, newid)
            mutator = obj.getPrimaryField().getMutator(obj)
            mutator(data, content_type=content_type)
            obj.setTitle(name)
            obj.reindexObject()

            notify(ObjectInitializedEvent(obj))
            notify(ObjectModifiedEvent(obj))

            transaction.commit()
        finally:
            upload_lock.release()
        return obj
Example #15
0
    def check_conversion_from_dict_to_btree_data_in_fsIndex(self):
        # To support efficient range searches on its keys as part of
        # implementing a record iteration protocol in FileStorage, we
        # converted the fsIndex class from using a dictionary as its
        # self._data attribute to using an OOBTree in its stead.

        from ZODB.fsIndex import fsIndex
        from BTrees.OOBTree import OOBTree

        # Create some data, and remember the index.
        for i in range(10):
            self._dostore()
        data_dict = dict(self._storage._index._data)

        # Replace the OOBTree with a dictionary and commit it.
        self._storage._index._data = data_dict
        transaction.commit()

        # Save the index.
        self._storage.close()

        # Verify it's converted to fsIndex in memory upon open.
        self.open()
        self.assertTrue(isinstance(self._storage._index, fsIndex))
        self.assertTrue(isinstance(self._storage._index._data, OOBTree))

        # Verify it has the right content.
        new_data_dict = dict(self._storage._index._data)
        self.assertEqual(len(data_dict), len(new_data_dict))

        for k in data_dict:
            old_tree = data_dict[k]
            new_tree = new_data_dict[k]
            self.assertEqual(list(old_tree.items()), list(new_tree.items()))
Example #16
0
    def test_reindex(self):
        """Verify object reindexing"""
        if not self.redis_running():
            self.fail("requires redis")
            return

        # set up a test page
        context = api.content.create(
            type='Document',
            title='Test Document',
            container=self.portal)
        # api.create auto-indexes. modification does not
        context.title = 'Foobar Document'
        # we need to commit to make the object visible for async
        transaction.commit()
        # verify that our change is not indexed yet
        catalog = api.portal.get_tool('portal_catalog')
        self.assertFalse(context.title in [x.Title for x in catalog()])

        result = tasks.ReindexObject(context, self.request)()
        self.waitfor(result)
        # we need to commit in order to see the other transaction
        transaction.commit()
        # check that our modification was indexed
        self.assertTrue(context.title in [x.Title for x in catalog()])
Example #17
0
 def delete_restore_single_result(self, result_id):
     try:
         DBSession.query(VMRestoreResult).filter(VMRestoreResult.id == result_id).delete()
         LOGGER.info('VM restore single result is deleted.')
         transaction.commit()
     except Exception as ex:
         LOGGER.error(to_str(ex).replace("'", ''))
Example #18
0
    def test_model_list(self):
        from ptah.manage.model import ModelModule, ModelView

        content = Content1()
        content.title = u'Content test'

        ptah.cms.Session.add(content)
        ptah.cms.Session.flush()

        rowid = content.__id__
        transaction.commit()

        mod = ModelModule(None, DummyRequest())
        model = mod['content1']

        res = ModelView.__renderer__(model, DummyRequest())
        self.assertIn('value="%s"'%rowid, res.body)

        res = ModelView.__renderer__(model, DummyRequest(
                params={'batch': 0}))
        self.assertIn('value="%s"'%rowid, res.body)

        res = ModelView.__renderer__(model, DummyRequest(
                params={'batch': 'unknown'}))
        self.assertIn('value="%s"'%rowid, res.body)
Example #19
0
    def test_model_edit_node(self):
        from ptah.manage.model import ModelModule, EditRecord

        content = Content2()
        content.title = u'Content test'

        ptah.cms.Session.add(content)
        ptah.cms.Session.flush()

        rowid = content.__id__
        transaction.commit()

        mod = ModelModule(None, DummyRequest())
        model = mod['content2'][rowid]

        form = EditRecord(
            model, DummyRequest(
                POST=MultiDict({'title': 'Content',
                                'form.buttons.modify': 'Modify'})))
        form.csrf = False
        form.update()

        self.assertIn("Model record has been modified.",
                      form.request.session['msgservice'][0])
        transaction.commit()

        content = ptah.cms.Session.query(Content2) \
            .filter(Content2.__id__ == rowid).first()
        self.assertEqual(content.title, u'Content')
Example #20
0
def appmaker(zodb_root):
    if not 'app_root' in zodb_root:
        app_root = AppRoot()
        zodb_root['app_root'] = app_root
        import transaction
        transaction.commit()
    return zodb_root['app_root']
Example #21
0
    def test_error_on_no_remaining_identities(self):
        # Given a user with only one connected identity, produce an error
        #   disabling the user from removing the only remaining identity.
        # Note, that the we prevent this from happening server side, but
        #   on the client side the user should be given the option to
        #   completely remove the user account.
        from .models import Identity, User
        with transaction.manager:
            # Create the user first...
            user = User()
            DBSession.add(user)
            DBSession.flush()
            user_id = user.id
        # It's easier to do this in two transactions to avoid unrealistic
        #   flushing issues.
        with transaction.manager:
            name = 'Hao'
            identity = Identity(name, name, 'openid')
            identity.user_id = str(user_id)
            DBSession.add(identity)
            DBSession.flush()
            identity_id = identity.id

        request = testing.DummyRequest()
        removed_identity_id = identity_id
        request.matchdict = {'user_id': user_id,
                             'identity_id': removed_identity_id,
                             }

        from .views import delete_user_identity
        from pyramid.httpexceptions import HTTPForbidden
        with self.assertRaises(HTTPForbidden):
            # Forbidden, you can't do this, ever.
            delete_user_identity(request)
        transaction.commit()
Example #22
0
 def registerLocalUser(cls, email='', password='', groups=None):
     from quizsmith.app.models import Groups
     if groups == None:
         groups = [Groups.by(3).first()]
     user = Users(email=email, password=password, groups=groups)
     DBSession.add(user)
     transaction.commit()
    def test_statusmap_on_objects_having_inactive_content(self, browser):
        # Create a container which will hold the content used for this test.
        # The container does not play an important role in the test.
        folder = create(Builder('folder').titled('Container'))

        # Create some content used in this test.
        create(Builder('folder')
               .titled('Active Folder')
               .within(folder))
        create(Builder('folder')
               .titled('Inactive Folder')
               .having(effectiveDate=DateTime() + 10)
               .within(folder))

        # A user not having the permission to access inactive content can only
        # change the state of the container and the active folder inside the
        # container, but not the inactive folder inside the container.
        # Thus the inactive folder must not be visible in the statusmap view.
        setRoles(self.portal, TEST_USER_ID, ['Contributor'])
        transaction.commit()
        browser.login().visit(folder, view="@@statusmap")
        self.assertEqual(
            ['Container', 'Active Folder'],
            browser.css('.listing tr td span').text
        )

        # A manager can also change the state of the inactive folder.
        # Thus the inactive folder must be visible in the statusmap view too.
        setRoles(self.portal, TEST_USER_ID, ['Manager'])
        transaction.commit()
        browser.login().visit(folder, view="@@statusmap")
        self.assertEqual(
            ['Container', 'Active Folder', 'Inactive Folder'],
            browser.css('.listing tr td span').text
        )
 def activateCondition(self, name=None):
     registry = getUtility(IRegistry)
     settings = registry.forInterface(ISettings)
     settings.enabled = True
     if name:
         settings.activated = set((name,))
     transaction.commit()
Example #25
0
def _registerPackage(module_, init_func=None):
    """Registers the given python package as a Zope 2 style product
    """

    if not hasattr(module_, '__path__'):
        raise ValueError("Must be a package and the " \
                         "package must be filesystem based")
    
    app = Zope2.app()
    try:
        product = initializeProduct(module_, 
                                    module_.__name__, 
                                    module_.__path__[0],
                                    app)

        product.package_name = module_.__name__

        if init_func is not None:
            newContext = ProductContext(product, app, module_)
            init_func(newContext)
    finally:
        try:
            import transaction
            transaction.commit()
        finally:
            app._p_jar.close()
Example #26
0
    def test_view(self, browser):
        portal = self.layer['portal']

        self.grant('Manager')

        # Folders and templates
        template_folder_1 = create_testobject(
            portal,
            'opengever.tasktemplates.tasktemplatefolder',
            title='TaskTemplateFolder 1')

        template1 = create_testobject(
            template_folder_1,
            'opengever.tasktemplates.tasktemplate',
            title='TaskTemplate 1',
            text='Test Text',
            preselected=True,
            task_type='unidirectional_by_value',
            issuer='responsible',
            responsible_client='interactive_users',
            deadline=7,
            responsible='current_user', )
        transaction.commit()

        browser.login().open(template1)
        self.assertEquals(['TaskTemplate 1'],
                          browser.css('.documentFirstHeading').text)
Example #27
0
def get_login_headers(request, user):
    headers = remember(request, user.email)
    user.last_login_date = datetime.now()
    DBSession.add(user)
    DBSession.flush()
    transaction.commit()
    return headers
    def test_statusmap_on_inactive_content(self, browser):
        inactive_folder = create(Builder('folder')
                                 .titled('Inactive Folder')
                                 .having(effectiveDate=DateTime() + 10)
                                 .within(self.portal))

        create(Builder('folder')
               .titled('Active Folder')
               .within(inactive_folder))

        # A user not having the permission to access inactive content must
        # be able to change the state of the inactive content itself, i.e.
        # calling the statusmap view on an inactive context.
        setRoles(self.portal, TEST_USER_ID, ['Contributor'])
        transaction.commit()
        browser.login().visit(inactive_folder, view="@@statusmap")
        self.assertEqual(
            ['Inactive Folder', 'Active Folder'],
            browser.css('.listing tr td span').text
        )

        # The same applies to a user having the permission to access inactive
        # content.
        setRoles(self.portal, TEST_USER_ID, ['Manager'])
        transaction.commit()
        browser.login().visit(inactive_folder, view="@@statusmap")
        self.assertEqual(
            ['Inactive Folder', 'Active Folder'],
            browser.css('.listing tr td span').text
        )
def checkin_documents_for_user(portal, options):
    """Attempts to check in all documents checked out by a particular user.
    """
    username = options.user

    # Assume security context of user
    user = portal.acl_users.getUser(username)
    user = user.__of__(portal.acl_users)
    newSecurityManager(portal, user)

    catalog = getToolByName(portal, 'portal_catalog')
    docs = catalog(portal_type='opengever.document.document')
    checked_out_docs = [b.getObject() for b in docs if b.checked_out == username]

    for obj in checked_out_docs:
        manager = getMultiAdapter((obj, obj.REQUEST), ICheckinCheckoutManager)
        if not manager.is_checkin_allowed():
            print "WARNING: Checkin not allowed for document %s" % obj.absolute_url()
            print checkin_not_allowed_reason(obj)
        else:
            if not options.dryrun:
                manager.checkin(comment=options.comment)
                print "Checked in document %s" % obj.absolute_url()
            else:
                print "Would checkin document %s" % obj.absolute_url()

    if not options.dryrun:
        transaction.commit()
Example #30
0
 def registerNonLocalUser(cls, email='', fullname='', groups=None):
     from quizsmith.app.models import Groups
     if not groups:
         groups = [Groups.by(3).first()]
     user = Users(email=email, fullname=fullname, is_local=False, groups=groups)
     DBSession.add(user)
     transaction.commit()
Example #31
0
def main(argv=sys.argv):
    if len(argv) != 2:
        usage(argv)
    config_uri = argv[1]
    setup_logging(config_uri)
    settings = get_appsettings(config_uri)
    engine = engine_from_config(settings, 'sqlalchemy.')
    DBSession.configure(bind=engine)
    print("Beginning update.")
    PopulatedSystem.__table__.drop(engine)
    Listing.__table__.drop(engine)
    Station.__table__.drop(engine)
    Faction.__table__.drop(engine)
    Body.__table__.drop(engine)
    Faction.__table__.create(engine)
    PopulatedSystem.__table__.create(engine)
    Body.__table__.create(engine)
    Station.__table__.create(engine)
    Listing.__table__.create(engine)
    mark_changed(DBSession())
    transaction.commit()

    #
    # Factions
    #
    print("Updating factions...")
    print("Downloading factions.jsonl from EDDB.io...")
    r = requests.get("https://eddb.io/archive/v5/factions.jsonl", stream=True)
    with open('factions.json', 'wb') as f:
        for chunk in r.iter_content(chunk_size=4096):
            if chunk:
                f.write(chunk)
    print("Saved factions.json. Updating...")
    url = str(engine.url) + "::" + Faction.__tablename__
    ds = dshape("var *{  id: ?int64,  name: ?string,  updated_at: ?int64,  government_id: ?int64,  "
                "government: ?string,  allegiance_id: ?int64,  allegiance: ?string,  "
                "state_id: ?int64,  state: ?string, home_system_id: ?int64,  "
                "is_player_faction: ?bool }")
    t = odo('jsonlines://factions.json', url, dshape=ds)
    print("Done! Creating index...")
    DBSession.execute("CREATE INDEX factions_idx ON factions(id)")
    mark_changed(DBSession())
    transaction.commit()
    print("Completed processing factions.")

    #
    # Systems
    #
    print("Downloading systems_recently.csv from EDDB.io...")
    r = requests.get("https://eddb.io/archive/v5/systems_recently.csv", stream=True)
    with open('systems_recently.csv', 'wb') as f:
        for chunk in r.iter_content(chunk_size=4096):
            if chunk:
                f.write(chunk)
    print("Saved systems_recently.csv. Creating temporary table and importing...")
    DBSession.execute("CREATE TEMP TABLE systems_tmp (LIKE systems)")
    url = str(engine.url) + "::systems_tmp"
    ds = dshape("var *{  id: ?int64,  edsm_id: ?int64,  name: ?string,  x: ?float64,  y: ?float64,  "
                "z: ?float64,  population: ?int64,  is_populated: ?bool,  government_id: ?int64,  "
                "government: ?string,  allegiance_id: ?int64,  allegiance: ?string,  "
                "state_id: ?int64,  state: ?string,  security_id: ?float64,  security: ?string,  "
                "primary_economy_id: ?float64,  primary_economy: ?string,  power: ?string,  "
                "power_state: ?string,  power_state_id: ?string,  needs_permit: ?bool,  "
                "updated_at: ?int64,  simbad_ref: ?string,  controlling_minor_faction_id: ?string,  "
                "controlling_minor_faction: ?string,  reserve_type_id: ?float64,  reserve_type: ?string  }")
    t = odo('systems_recently.csv', url, dshape=ds)
    print("Updating systems...")
    DBSession.execute("INSERT INTO systems(id, edsm_id, name, x, y, z, population, is_populated, government_id, "
                      "government, allegiance_id, allegiance, state_id, state, security_id, security, "
                      "primary_economy_id, primary_economy, power, power_state, power_state_id, needs_permit, "
                      "updated_at, simbad_ref, controlling_minor_faction_id, controlling_minor_faction, "
                      "reserve_type_id, reserve_type) SELECT id, edsm_id, name, x, y, z, population, is_populated, "
                      "government_id, government, allegiance_id, allegiance, state_id, state, security_id, security, "
                      "primary_economy_id, primary_economy, power, power_state, power_state_id, needs_permit, "
                      "updated_at, simbad_ref, controlling_minor_faction_id, controlling_minor_faction, "
                      "reserve_type_id, reserve_type from systems_tmp ON CONFLICT DO UPDATE "
                      "SET edsm_id = EXCLUDED.edsm_id, name = EXCLUDED.name, x = EXCLUDED.x, "
                      "y = EXCLUDED.y, z = EXCLUDED.z, population = EXCLUDED.population, "
                      "is_populated = EXCLUDED.population, government_id = EXCLUDED.government_id, "
                      "government = EXCLUDED.government, allegiance_id = EXCLUDED.allegiance_id, "
                      "allegiance = EXCLUDED.allegiance, state_id = EXCLUDED.state_id, "
                      "state = EXCLUDED.state, security_id = EXCLUDED.security_id, security = EXCLUDED.security, "
                      "primary_economy_id = EXCLUDED.primary_economy_id, primary_economy = EXCLUDED.primary_economy, "
                      "power = EXCLUDED.power, power_state = EXCLUDED.power_state, power_state_id = "
                      "EXCLUDED.power_state_id, needs_permit = EXCLUDED.needs_permit, updated_at = "
                      "EXCLUDED.updated_at, simbad_ref = EXCLUDED.simbad_ref,"
                      "controlling_minor_faction_id = EXCLUDED.controlling_minor_faction_id, "
                      "reserve_type_id = EXCLUDED.reserve_type_id, reserve_type = EXCLUDED.reserve_type")
    mark_changed(DBSession())
    transaction.commit()
    print("Done!")

    #
    # Bodies
    #
    print("Downloading bodies.jsonl from EDDB.io...")
    r = requests.get("https://eddb.io/archive/v5/bodies.jsonl", stream=True)
    with open('bodies.json', 'wb') as f:
        for chunk in r.iter_content(chunk_size=4096):
            if chunk:
                f.write(chunk)
    print("Saved bodies.jsonl. Converting JSONL to SQL.")
    DBSession.execute("CREATE TEMP TABLE bodies_tmp (LIKE bodies)")
    url = str(engine.url) + "::bodies_tmp"
    ds = dshape("var *{ id: ?int64, created_at: ?int64, updated_at: ?int64, name: ?string, "
                "system_id: ?int64, group_id: ?int64, group_name: ?string, type_id: ?int64, "
                "type_name: ?string, distance_to_arrival: ?int64, full_spectral_class: ?string, "
                "spectral_class: ?string, spectral_sub_class: ?string, luminosity_class: ?string, "
                "luminosity_sub_class: ?string, surface_temperature: ?int64, is_main_star: ?bool, "
                "age: ?int64, solar_masses: ?float64, solar_radius: ?float64, catalogue_gliese_id : ?string, "
                "catalogue_hipp_id: ?string, catalogue_hd_id: ?string, volcanism_type_id: ?int64, "
                "volcanism_type_name: ?string, atmosphere_type_id: ?int64, atmosphere_type_name: ?string, "
                "terraforming_state_id: ?int64, terraforming_state_name: ?string, earth_masses: ?float64, "
                "radius: ?int64, gravity: ?float64, surface_pressure: ?int64, orbital_period: ?float64, "
                "semi_major_axis: ?float64, orbital_eccentricity: ?float64, orbital_inclination: ?float64, "
                "arg_of_periapsis: ?float64, rotational_period: ?float64, "
                "is_rotational_period_tidally_locked: ?bool, axis_tilt: ?float64, eg_id: ?int64, "
                "belt_moon_masses: ?float64, ring_type_id: ?int64, ring_type_name: ?string, "
                "ring_mass: ?int64, ring_inner_radius: ?float64, ring_outer_radius: ?float64, "
                "rings: ?json, atmosphere_composition: ?json, solid_composition: ?json, "
                "materials: ?json, is_landable: ?bool}")
    #url = str(engine.url) + "::" + Body.__tablename__
    t = odo('jsonlines://bodies.json', url, dshape=ds)
    print("Creating indexes...")
    DBSession.execute("CREATE INDEX bodies_idx ON bodies(name text_pattern_ops)")
    mark_changed(DBSession())
    transaction.commit()
    DBSession.execute("CREATE INDEX systemid_idx ON bodies(system_id)")
    mark_changed(DBSession())
    transaction.commit()
    print("Done!")

    #
    # Populated systems
    #
    print("Downloading systems_populated.jsonl from EDDB.io...")
    r = requests.get("https://eddb.io/archive/v5/systems_populated.jsonl", stream=True)
    with open('systems_populated.json', 'wb') as f:
        for chunk in r.iter_content(chunk_size=4096):
            if chunk:
                f.write(chunk)
    print("Saved systems_populated.json. Updating...")
    url = str(engine.url) + "::" + PopulatedSystem.__tablename__
    ds = dshape("var *{  id: ?int64,  edsm_id: ?int64,  name: ?string,  x: ?float64,  y: ?float64,  "
                "z: ?float64,  population: ?int64,  is_populated: ?bool,  government_id: ?int64,  "
                "government: ?string,  allegiance_id: ?int64,  allegiance: ?string,  "
                "state_id: ?int64,  state: ?string,  security_id: ?float64,  security: ?string,  "
                "primary_economy_id: ?float64,  primary_economy: ?string,  power: ?string,  "
                "power_state: ?string,  power_state_id: ?string,  needs_permit: ?int64,  "
                "updated_at: ?int64,  simbad_ref: ?string,  controlling_minor_faction_id: ?string,  "
                "controlling_minor_faction: ?string,  reserve_type_id: ?float64,  reserve_type: ?string,"
                "minor_faction_presences: ?json }")
    t = odo('jsonlines://systems_populated.json', url, dshape=ds)
    print("Done! Uppercasing system names...")
    DBSession.execute("UPDATE populated_systems SET name = UPPER(name)")
    mark_changed(DBSession())
    transaction.commit()
    print("Creating indexes...")
    DBSession.execute("CREATE INDEX index_populated_system_names_trigram ON populated_systems "
                      "USING GIN(name gin_trgm_ops)")
    mark_changed(DBSession())
    transaction.commit()
    DBSession.execute("CREATE INDEX index_populated_system_names_btree ON populated_systems (name)")
    mark_changed(DBSession())
    transaction.commit()
    print("Completed processing populated systems.")

    #
    # Stations
    #
    print("Downloading stations.jsonl from EDDB.io...")
    r = requests.get("https://eddb.io/archive/v5/stations.jsonl", stream=True)
    with open('stations.json', 'wb') as f:
        for chunk in r.iter_content(chunk_size=4096):
            if chunk:
                f.write(chunk)
    print("Saved stations.json. Updating...")
    DBSession.execute("CREATE TEMP TABLE stations_tmp (LIKE stations)")
    url = str(engine.url) + "::stations_tmp"
    #url = str(engine.url) + "::" + Station.__tablename__
    ds = dshape("var *{  id: ?int64,  name: ?string,  system_id: ?int64,  updated_at: ?int64,  "
                "max_landing_pad_size: ?string,  distance_to_star: ?int64,  government_id: ?int64,  "
                "government: ?string,  allegiance_id: ?int64,  allegiance: ?string,  "
                "state_id: ?int64,  state: ?string,  type_id: ?int64,  type: ?string,  "
                "has_blackmarket: ?bool,  has_market: ?bool,  has_refuel: ?bool,  "
                "has_repair: ?bool,  has_rearm: ?bool,  has_outfitting: ?bool,  "
                "has_shipyard: ?bool,  has_docking: ?bool,  has_commodities: ?bool,  "
                "import_commodities: ?json,  export_commodities: ?json,  prohibited_commodities: ?json, "
                "economies: ?json, shipyard_updated_at: ?int64, outfitting_updated_at: ?int64, "
                "market_updated_at: ?int64, is_planetary: ?bool, selling_ships: ?json, "
                "selling_modules: ?json, settlement_size_id: ?string, settlement_size: ?int64, "
                "settlement_security_id: ?int64, settlement_security: ?string, body_id: ?int64,"
                "controlling_minor_faction_id: ?int64 }")
    t = odo('jsonlines://stations.json', url, dshape=ds)
    print("Done! Cleaning stations without body references...")
    DBSession.execute("DELETE FROM stations_tmp WHERE body_id NOT IN (SELECT b.id from bodies b)")
    mark_changed(DBSession())
    transaction.commit()
    DBSession.execute("UPDATE stations SET id=t.id, name=t.name, system_id=t.system_id, updated_at=t.updated_at, "
                      "max_landing_pad_size=t.max_landing_pad_size, ")
    DBSession.execute("CREATE INDEX index_stations_systemid_btree ON stations(system_id)")
    mark_changed(DBSession())
    transaction.commit()
    DBSession.execute("CREATE INDEX index_stations_btree ON stations(id)")
    mark_changed(DBSession())
    transaction.commit()
    print("Completed processing stations.")

    #
    # Listings
    #
    print("Downloading listings.csv from EDDB.io...")
    r = requests.get("https://eddb.io/archive/v5/listings.csv", stream=True)
    with open('listings.csv', 'wb') as f:
        for chunk in r.iter_content(chunk_size=4096):
            if chunk:
                f.write(chunk)
    print("Saved listings.csv. Updating...")
    url = str(engine.url) + "::" + Listing.__tablename__
    ds = dshape("var *{  id: ?int64, station_id: ?int64, commodity: ?int64, supply: ?int64, "
                "buy_price: ?int64, sell_price: ?int64, demand: ?int64, collected_at: ?int64 }")
    t = odo('listings.csv', url, dshape=ds)

    print("Creating indexes...")
    DBSession.execute("CREATE INDEX index_listings_stationid_btree ON listings(station_id)")
    mark_changed(DBSession())
    transaction.commit()
    print("Updates complete.")
    studentName = ''
    def setStudentName(self, sName):
        self.studentName = sName
    def getStudentName(self):
        return self.studentName
        


storage = ZODB.FileStorage.FileStorage('mydata.fs')
db = ZODB.DB(storage)
connection = db.open()
root = connection.root

        
# saving the data


root.s1 = Student()

# set the data into the node
root.s1.setStudentName("james")

# save the changes!
transaction.commit()






Example #33
0
def create_defaults(registry, defaults, quiet=False):
    def log(message):
        if not quiet:
            print message

    root = initialize_zodb(registry)

    # esborrem els domains,deployments i users de la zodb
    # per tal de recarregar-los des del JSON - canvi temporal mentre
    # es decideix com es relacionaran els dos mètodes json i zodb
    root['deployments'].clear()
    root['domains'].clear()
    root['users'].clear()

    deployments = root['deployments']
    domains = root['domains']
    users = root['users']

    for name, deploy_data in defaults.get('deployments', {}).items():
        if name in deployments:
            log(' · Getting deployment "{}"'.format(name))
        else:
            log(' + Adding deployment "{}"'.format(name))
        deployment = deployments.add(name=name, title=deploy_data['title'])
        for component_data in deploy_data['components']:
            if deployment.get_component(component_data['type'],
                                        name=component_data['name']) is None:
                # if component_data['type'] == 'maxserver':
                #     import ipdb;ipdb.set_trace()
                deployment.add_component(component_data['type'],
                                         component_data['name'],
                                         component_data['title'],
                                         component_data['config'])
                log('   + Added new "{type}" component named "{name}"'.format(
                    **component_data))

    for name, domain_data in defaults.get('domains', {}).items():
        if name in domains:
            log(' · Getting domain "{}"'.format(name))
        else:
            log(' + Adding domain "{}"'.format(name))

        domain = domains.add(name=name, title=domain_data['title'])
        for component_data in domain_data['components']:
            if component_data['name'] not in domain:
                component = deployments[
                    component_data['deployment']].get_component(
                        component_data['type'], name=component_data['name'])
                if component is None:
                    component = deployments[
                        component_data['deployment']].get_component(
                            component_data['type'],
                            name=component_data['name'])

                    log('   x Missing {type} component named "{name}" on deployment "{deployment}"'
                        .format(**component_data))
                else:
                    domain.assign(component)
                    log('   √ Assigned "{type}" component named "{name}"'.
                        format(**component_data))

    for user_data in defaults.get('users', []):
        username = user_data['username']
        domain = user_data['domain']
        roles = user_data['roles']

        log(' · Setting user roles "{}"'.format(name))

        if domain not in users:
            log('   + Adding user {username} on domain {domain} with roles {roles}'
                .format(**user_data))
            users.add(username, domain, roles)
        else:
            if username not in users[domain]:
                log('   + Adding user {username} on domain {domain} with roles {roles}'
                    .format(**user_data))
                users.add(username, domain, roles)
            else:
                log('   · Setting roles {roles} for user {username} on domain {domain}'
                    .format(**user_data))
                users[domain][username].set_roles(roles)

    transaction.commit()
    registry._temp_zodb_connection.close()
    del registry._temp_zodb_connection
Example #34
0
def unregisterUtility(context):
    my_utility = getUtility(ITinyMCE)
    context.getSiteManager().unregisterUtility(my_utility, ITinyMCE)
    del my_utility

    transaction.commit()
Example #35
0
def run(port=1883,
        config=None,
        publish_on_pubrel=True,
        overlapping_single=True,
        dropQoS0=True,
        zero_length_clientids=True,
        topicAliasMaximum=2,
        maximumPacketSize=1000,
        receiveMaximum=2,
        serverKeepAlive=60):
    global logger, broker3, broker5, brokerSN, server
    logger = logging.getLogger('MQTT broker')
    logger.setLevel(logging.INFO)
    logger.addFilter(filter)

    lock = threading.RLock()  # shared lock
    persistence = False
    if persistence:
        connection, sharedData = setup_persistence(
            "sharedData"
        )  # location for data shared between brokers - subscriptions for example
    else:
        sharedData = {}

    broker3 = MQTTV3Brokers(publish_on_pubrel=publish_on_pubrel,
                            overlapping_single=overlapping_single,
                            dropQoS0=dropQoS0,
                            zero_length_clientids=zero_length_clientids,
                            lock=lock,
                            sharedData=sharedData)

    broker5 = MQTTV5Brokers(publish_on_pubrel=publish_on_pubrel,
                            overlapping_single=overlapping_single,
                            dropQoS0=dropQoS0,
                            zero_length_clientids=zero_length_clientids,
                            topicAliasMaximum=topicAliasMaximum,
                            maximumPacketSize=maximumPacketSize,
                            receiveMaximum=receiveMaximum,
                            serverKeepAlive=serverKeepAlive,
                            lock=lock,
                            sharedData=sharedData)

    brokerSN = MQTTSNBrokers(lock=lock, sharedData=sharedData)

    brokers = [broker3, broker5, brokerSN]

    broker3.setBroker5(broker5)
    broker5.setBroker3(broker3)

    brokerSN.setBroker3(broker3)
    brokerSN.setBroker5(broker5)

    servers = []
    UDPListeners.setBroker(brokerSN)
    TCPListeners.setBrokers(broker3, broker5)
    HTTPListeners.setBrokers(broker3, broker5, brokerSN)
    HTTPListeners.setSharedData(lock, sharedData)

    try:
        if config == None:
            TCPBridges.setBroker5(broker5)
            TCPBridges.create(1886)
            servers.append(TCPListeners.create(1883, serve_forever=True))
        else:
            servers_to_create = []
            lineno = 0
            while lineno < len(config):
                curline = config[lineno].strip()
                lineno += 1
                if curline.startswith('#') or len(curline) == 0:
                    continue
                words = curline.split()
                if words[0] == "listener":
                    ca_certs = certfile = keyfile = None
                    cert_reqs = ssl.CERT_REQUIRED
                    bind_address = ""
                    port = 1883
                    TLS = False
                    if len(words) > 1:
                        port = int(words[1])
                    protocol = "mqtt"
                    if len(words) >= 3:
                        bind_address = words[2]
                    if len(words) >= 4:
                        if words[3] in ["mqttsn", "http"]:
                            protocol = words[3]
                    while lineno < len(config) and not config[lineno].strip(
                    ).startswith("listener"):
                        curline = config[lineno].strip()
                        lineno += 1
                        if curline.startswith('#') or len(curline) == 0:
                            continue
                        words = curline.split()
                        if words[0] == "require_certificate":
                            if words[1] == "false":
                                cert_reqs = ssl.CERT_OPTIONAL
                        elif words[0] == "cafile":
                            ca_certs = words[1]
                            TLS = True
                        elif words[0] == "certfile":
                            certfile = words[1]
                            TLS = True
                        elif words[0] == "keyfile":
                            keyfile = words[1]
                            TLS = True
                    if protocol == "mqtt":
                        servers_to_create.append((TCPListeners, {
                            "host": bind_address,
                            "port": port,
                            "TLS": TLS,
                            "cert_reqs": cert_reqs,
                            "ca_certs": ca_certs,
                            "certfile": certfile,
                            "keyfile": keyfile
                        }))
                    elif protocol == "mqttsn":
                        servers_to_create.append((UDPListeners, {
                            "host": bind_address,
                            "port": port
                        }))
                    elif protocol == "http":
                        servers_to_create.append((HTTPListeners, {
                            "host": bind_address,
                            "port": port,
                            "TLS": TLS,
                            "cert_reqs": cert_reqs,
                            "ca_certs": ca_certs,
                            "certfile": certfile,
                            "keyfile": keyfile
                        }))
            servers_to_create[-1][1]["serve_forever"] = True
            for server in servers_to_create:
                servers.append(server[0].create(**server[1]))

    except KeyboardInterrupt:
        pass
    except:
        logger.exception("startBroker")

    # Stop incoming communications
    import socket
    for server in servers:
        try:
            logger.info("Stopping listener %s", str(server))
            server.shutdown()
        except:
            traceback.print_exc()

    logger.info("Shutdown brokers")
    for broker in brokers:
        try:
            logger.info("Stopping broker %s", str(broker))
            broker.shutdown()
        except:
            traceback.print_exc()
    filter.measure()

    logger.debug("Ending sharedData %s", sharedData)
    if persistence:
        sharedData._p_changed = True
        import transaction
        transaction.commit()
        connection.close()
Example #36
0
    def _PackWhileWriting(self, pack_now):
        # A storage should allow some reading and writing during
        # a pack.  This test attempts to exercise locking code
        # in the storage to test that it is safe.  It generates
        # a lot of revisions, so that pack takes a long time.

        db = DB(self._storage)
        conn = db.open()
        root = conn.root()

        for i in range(10):
            root[i] = MinPO(i)
        transaction.commit()

        snooze()
        packt = time.time()

        choices = range(10)
        for dummy in choices:
            for i in choices:
                root[i].value = MinPO(i)
                transaction.commit()

        # How many client threads should we run, and how long should we
        # wait for them to finish?  Hard to say.  Running 4 threads and
        # waiting 30 seconds too often left a thread still alive on Tim's
        # Win98SE box, during ZEO flavors of this test.  Those tend to
        # run one thread at a time to completion, and take about 10 seconds
        # per thread.  There doesn't appear to be a compelling reason to
        # run that many threads.  Running 3 threads and waiting up to a
        # minute seems to work well in practice.  The ZEO tests normally
        # finish faster than that, and the non-ZEO tests very much faster
        # than that.
        NUM_LOOP_TRIP = 50
        timer = ElapsedTimer(time.time())
        threads = [
            ClientThread(db, choices, NUM_LOOP_TRIP, timer, i)
            for i in range(3)
        ]
        for t in threads:
            t.start()

        if pack_now:
            db.pack(time.time())
        else:
            db.pack(packt)

        for t in threads:
            t.join(60)
        liveness = [t.isAlive() for t in threads]
        if True in liveness:
            # They should have finished by now.
            print 'Liveness:', liveness
            # Combine the outcomes, and sort by start time.
            outcomes = []
            for t in threads:
                outcomes.extend(t.outcomes)
            # each outcome list has as many of these as a loop trip got thru:
            #     thread_id
            #     elapsed millis at loop top
            #     elapsed millis at attempt to assign to self.root[index]
            #     index into self.root getting replaced
            #     elapsed millis when outcome known
            #     'OK' or 'Conflict'
            #     True if we got beyond this line, False if it raised an
            #         exception (one possible Conflict cause):
            #             self.root[index].value = MinPO(j)
            def cmp_by_time(a, b):
                return cmp((a[1], a[0]), (b[1], b[0]))

            outcomes.sort(cmp_by_time)
            counts = [0] * 4
            for outcome in outcomes:
                n = len(outcome)
                assert n >= 2
                tid = outcome[0]
                print 'tid:%d top:%5d' % (tid, outcome[1]),
                if n > 2:
                    print 'commit:%5d' % outcome[2],
                    if n > 3:
                        print 'index:%2d' % outcome[3],
                        if n > 4:
                            print 'known:%5d' % outcome[4],
                            if n > 5:
                                print '%8s' % outcome[5],
                                if n > 6:
                                    print 'assigned:%5s' % outcome[6],
                counts[tid] += 1
                if counts[tid] == NUM_LOOP_TRIP:
                    print 'thread %d done' % tid,
                print

            self.fail('a thread is still alive')

        self._sanity_check()
Example #37
0
def commit_transaction():
    try:
        transaction.commit()
    except Exception as e:
        log.critical(e)
        transaction.abort()
Example #38
0
def work(app, shoppath):
    plone_root = app.unrestrictedTraverse(str(shoppath.split('/')[0]))
    setSite(plone_root)
    shop = app.unrestrictedTraverse(str(shoppath))
    SAPArticles = getAllItems()
    # ADDING
    mailbody = u"""\
<!DOCTYPE html>
<html>
        <body>
          <h1>Neue Artikel</h1>
            <ul>"""
    print "ADDING OBJECTS"
    newarts = False
    with click.progressbar(SAPArticles) as IteratorSAPArticles:
        for item in IteratorSAPArticles:
            if save_id(item.matnr) not in shop:
                newarts = True
                artikel = getArticle(item.matnr)
                logger.info('ADD SOMETHING TO SHOP')
                shop_item = api.content.create(type="Artikel",
                                               title=artikel.title,
                                               description=item.description,
                                               artikelnummer=item.matnr,
                                               id=save_id(item.matnr),
                                               webcode=webcodehandler(),
                                               container=shop)
                shop_item.medienart = artikel.medienart
                #api.content.transition(obj=shop_item, transition='publish')
                mailbody += u"<li>%s</li>" % artikel.title
                print artikel.title
                transaction.commit()
    mailbody += u"</ul>"
    if not newarts:
        mailbody += u"<p>Es wurden keine neuen Artikel hinzugefügt."
    # DELETE
    mailbody += u"""<h1>Artikel auf Status privat</h1>
                      <ul>"""
    shopset = set(shop.keys())
    sapset = set([save_id(x.matnr) for x in SAPArticles])
    print "DELETEING OBJECTS"
    delarts = False
    with click.progressbar(shopset.difference(sapset)) as delitems:
        for matnr in delitems:
            artpath = '/portal/medienportal/artikel/%s' % matnr
            artikel = api.content.get(path=artpath)
            if artikel:
                titel = artikel.title
                url = artikel.absolute_url().replace(
                    'http://nohost/portal', 'https://medien.bgetem.de')
                review_state = api.content.get_state(obj=artikel)
                if review_state != 'private':
                    delarts = True
                    #api.content.delete(shop[matnr])
                    api.content.transition(shop[matnr], transition='reject')
                    mailbody += u'<li><a href="%s">%s</a> (ID: %s)</li>' % (
                        url, titel, matnr)
                    print 'Artikel soll geloescht werden: %s' % matnr
                    transaction.commit()
    mailbody += u"</ul>"
    if not delarts:
        mailbody += u"<p>Es wurden keine vorhandenen Artikel auf den Status privat gesetzt.</p>"
    mailbody += u"</body></html>"
    fromaddr = "*****@*****.**"
    toaddr = "*****@*****.**"
    msg = MIMEMultipart()
    msg['From'] = fromaddr
    msg['To'] = toaddr
    msg['Subject'] = "Update Medienportal"
    body = mailbody
    msg.attach(MIMEText(body.encode('utf-8'), 'html', 'utf-8'))
    text = msg.as_string()
    server.sendmail(fromaddr, toaddr, text)
Example #39
0
def test_celery__TransactionAwareTask_retry(
        interaction, celery_session_worker):
    """It keeps the interaction on retry."""
    result = get_principal_retry.delay()
    transaction.commit()
    assert [1, 'User'] == result.get()
Example #40
0
 def reload_if_needed(self):
     super(SQLCache, self).reload_if_needed()
     if self.request is None:
         transaction.commit()
         self.db.close()
    def fCrearInforme( self, 
        theUseCaseQueryResult   =None,
        theCheckPermissions     =True,
        thePermissionsCache     =None, 
        theRolesCache           =None, 
        theParentExecutionRecord=None):
        """Create a new instance of TRAInforme, capturing a summary of translations by languages and detailed report by modules and languages.
        
        """
        
        unExecutionRecord = self.fStartExecution( 'method',  'fCrearInforme', theParentExecutionRecord,  True, { 'log_what': 'details', 'log_when': True, }, ) 
        
        from Products.ModelDDvlPloneTool.ModelDDvlPloneTool_Mutators import cModificationKind_CreateSubElement, cModificationKind_Create
        
        try:
        
            aReport = self.fModelDDvlPloneTool().fModelDDvlPloneTool_Mutators( self).fNewVoidCreateElementReport()
            
            unPermissionsCache = fDictOrNew( thePermissionsCache)
            unRolesCache       = fDictOrNew( theRolesCache)
            
            
            unUseCaseQueryResult = self.fUseCaseAssessment(  
                theUseCaseName          = cUseCase_CreateTRAInforme, 
                theElementsBindings     = { cBoundObject: self,},
                theRulesToCollect       = [ 'languages', 'modules',], 
                thePermissionsCache     = unPermissionsCache, 
                theRolesCache           = unRolesCache, 
                theParentExecutionRecord= unExecutionRecord
            ) 
            if not unUseCaseQueryResult or not unUseCaseQueryResult.get( 'success', False):
                return aReport

            
            unAhoraString = self.fDateTimeNowTextual()
            unAhoraParaId = unAhoraString.replace( ':', '-').replace( ' ', '-')
            unMemberId    = self.fGetMemberId()
            
            
            unTitleInformeACrear = '%s by %s' % ( unAhoraString, unMemberId,)
            unIdInformeACrear    = '%s-%s'    % ( unAhoraParaId, unMemberId,)
      
            unInformeExistente = self.getElementoPorID( unIdInformeACrear)
            unCountIds = 0
            while not ( unInformeExistente == None):
                unCountIds += 1
                unTitleInformeACrear = '%s by %s (%d)' % ( unAhoraString, unMemberId, unCountIds)
                unIdInformeACrear    = '%s-%s-%d'      % ( unAhoraParaId, unMemberId, unCountIds)
                
                unInformeExistente = self.getElementoPorID( unIdInformeACrear)
                
                
            aNewInformeAttrsDict = { 
                'title': unTitleInformeACrear,
            }
            
            unaIdNuevoInforme = self.invokeFactory( cNombreTipoTRAInforme, unIdInformeACrear, **aNewInformeAttrsDict)
            if not unaIdNuevoInforme:
                aReport.update( { 'effect': 'error', 'failure': 'creation_failure', })
                return aReport
                     
            unNuevoInforme = self.getElementoPorID( unaIdNuevoInforme)
            if not unNuevoInforme:
                aReport.update( { 'effect': 'error', 'failure': 'new_element_not_found', })
                return aReport
            
            unNuevoInforme.manage_fixupOwnershipAfterAdd()
          
            unNuevoInforme.pSetPermissions()

            
            unResultadoNuevoInforme = self.fModelDDvlPloneTool().fRetrieveTypeConfig( 
                theTimeProfilingResults     =None,
                theElement                  =unNuevoInforme, 
                theParent                   =None,
                theParentTraversalName      ='',
                theTypeConfig               =None, 
                theAllTypeConfigs           =None, 
                theViewName                 ='', 
                theRetrievalExtents         =[ 'traversals', ],
                theWritePermissions         =None,
                theFeatureFilters           ={ 'attrs': [ 'title',], 'aggregations': [], 'relations': [], 'do_not_recurse_collections': True,}, 
                theInstanceFilters          =None,
                theTranslationsCaches       =None,
                theCheckedPermissionsCache  =unPermissionsCache,
                theAdditionalParams         =None                
            )
            if ( not unResultadoNuevoInforme) or ( unResultadoNuevoInforme.get( 'object', None) == None):
                aReport.update( { 'effect': 'error', 'failure': 'new_element_retrieval_failed', })
                return aReport
            
            
                
        
            unNuevoInforme.fElaborarInforme(                
                theUseCaseQueryResult       =unUseCaseQueryResult,
                theForceEllaboration        =True, 
                theCheckPermissions         =False, 
                thePermissionsCache         =unPermissionsCache, 
                theRolesCache               =unRolesCache, 
                theParentExecutionRecord    =unExecutionRecord,
            )
            
            
            aModelDDvlPloneTool_Mutators = self.fModelDDvlPloneTool().fModelDDvlPloneTool_Mutators( self)
                
            aCreateElementReport = aModelDDvlPloneTool_Mutators.fNewVoidCreateElementReport()
            aCreateElementReport.update( { 'effect': 'created', 'new_object_result': unResultadoNuevoInforme, })
            
            someFieldReports    = aCreateElementReport[ 'field_reports']
            aFieldReportsByName = aCreateElementReport[ 'field_reports_by_name']
            
            aReportForField = { 'attribute_name': 'id',          'effect': 'changed', 'new_value': unaIdNuevoInforme, 'previous_value': '',}
            someFieldReports.append( aReportForField)            
            aFieldReportsByName[ aReportForField[ 'attribute_name']] = aReportForField
            
            aReportForField = { 'attribute_name': 'title',       'effect': 'changed', 'new_value': unTitleInformeACrear,           'previous_value': '',}
            someFieldReports.append( aReportForField)            
            aFieldReportsByName[ aReportForField[ 'attribute_name']] = aReportForField

            aModelDDvlPloneTool_Mutators.pSetAudit_Creation( self,           cModificationKind_CreateSubElement, aCreateElementReport, theUseCounter=True)       
            aModelDDvlPloneTool_Mutators.pSetAudit_Creation( unNuevoInforme, cModificationKind_Create,           aCreateElementReport)       
         
            self.pFlushCachedTemplates()   
            unNuevoInforme.pFlushCachedTemplates()   
                
            unCatalogo = self.getCatalogo()
            if not ( unCatalogo == None):
                unCatalogo.pFlushCachedTemplates()            
                        
            aReport.update( { 'effect': 'created', 'new_object_result': unResultadoNuevoInforme, })

            transaction.commit( )

            logging.getLogger( 'gvSIGi18n').info("COMMIT") 
            
            return aReport
        
        finally:
            unExecutionRecord and unExecutionRecord.pEndExecution()
            unExecutionRecord and unExecutionRecord.pClearLoggedAll()
    def fCreateNewProgressAndOptionallyHandlerForElement( self, 
        theCreateHandler        =False,
        theInitialElement       =None, 
        theProcessType          ='', 
        theInputParameters      =None,
        theTimestamp            ='',
        theResult               =None,     
        theInitializeLambda     =None,
        theLoopLambda           =None,
        theElementLambda        =None,
        theElementPloneLambda   =None,
        theFinalizeLambda       =None,
        theLockCatalog          =False,
        thePermissionsCache     =None, 
        theRolesCache           =None, 
        theParentExecutionRecord=None,):
 

        unExecutionRecord = self.fStartExecution( 'method',  'fCreateNewProgressAndOptionallyHandlerForElement', theParentExecutionRecord,  False, None, ) 
        
        unPermissionsCache = fDictOrNew( thePermissionsCache)
        unRolesCache       = fDictOrNew( theRolesCache)

    
        try:
 
            unResult = self.fNewVoidCreateProgressHandlerResult()
            
            if theInitialElement == None:
                return unResult
            
            if not theProcessType:
                return unResult
            
            if not ( theProcessType in cTRAProgress_ProcessTypes):
                return unResult

            if not theResult:
                return unResult  
            
            
            unCatalogoRaiz = self.getCatalogo()           
            if unCatalogoRaiz == None:
                return unResult
        
            
            someProgressSupportKinds = self.fProgressSupportKindsForProcessTypeOnTarget( theProcessType, theInitialElement)
            if not someProgressSupportKinds:
                return unResult
            theResult[ 'progress_support_kinds'] = someProgressSupportKinds
            

            someProgressControlParms = self.fNewProgressControlParmsForProcessType( theProcessType, someProgressSupportKinds, theInitialElement)
            if not someProgressControlParms:
                return unResult
            theResult[ 'progress_parameters'] = someProgressControlParms
            
            
            someProgressControlCounters = self.fNewVoidProgressControlCounters( )
            theResult[ 'progress_counters'] = someProgressControlCounters

                
            
            unElementTitle = theInitialElement.Title()
            unElementPath  = theInitialElement.fPhysicalPathString()
            unElementUID   = theInitialElement.UID()
            
            unElementMetaType = 'UnknownType'
            try:
                unElementMetaType = theInitialElement.meta_type
            except:
                unElementMetaType = theInitialElement.__class__.__name
            if not unElementMetaType:
                unElementMetaType = 'UnknownType'
            
                
            aMemberId = self.fGetMemberId()
   
                
            unNuevoTitle       = '%s on %s by %s at %s' % ( theProcessType, unElementTitle, aMemberId, theTimestamp, )
            unNuevoDescription = 'Progress on %s process\n on element %s\n with path %s\n by %s\n started at %s' % ( theProcessType, unElementTitle, unElementPath.replace( '/', '/ '), aMemberId, theTimestamp, )
            
            unaNuevaId = unNuevoTitle.lower()
            unaNuevaId = unaNuevaId.replace(" ", "-")
            unaNuevaId = unaNuevaId.replace(":", "-")
    
            aPloneTool = self.getPloneUtilsToolForNormalizeString()
            if aPloneTool:
                unaNuevaId = aPloneTool.normalizeString( unaNuevaId)  
            
            unNuevoTitleACrear = unNuevoTitle
            unaNuevaIdACrear   = unaNuevaId
            
            
            
            aNewProgresoAttrsDict = { 
                'title':             unNuevoTitleACrear,
                'description':       unNuevoDescription,
                'tipoProceso':       theProcessType,
                'clasesSoporte':     ' '.join( someProgressSupportKinds),
                'comienzoTipo':      unElementMetaType,
                'comienzoTitulo':    unElementTitle,
                'comienzoUID':       unElementUID,
                'comienzoRuta':      unElementPath,
                'usuarioInformador': aMemberId,
                'estadoProceso':     cTRAProgreso_EstadoProceso_Inactivo,
                'haComenzado':       False,
                'haCompletadoConExito': False,
                'fechaComienzoProceso': None,
                'fechaUltimoInformeProgreso': None,
            }
            
               
            unProgresoExistente = self.getElementoPorID( unaNuevaIdACrear)
            unCountIds = 0
            while not ( unProgresoExistente == None):
                unCountIds += 1
                unNuevoTitleACrear = '%s-%d' % ( unNuevoTitle, unCountIds)
                unaNuevaIdACrear = '%s-%d'   % ( unaNuevaId, unCountIds)
                
                unProgresoExistente = self.getElementoPorID( unaNuevaIdACrear)
                
                
            aNewProgresoAttrsDict.update( { 
                'title':         unNuevoTitleACrear,
            })
            
            unaIdNuevoProgreso = self.invokeFactory( cNombreTipoTRAProgreso, unaNuevaIdACrear, **aNewProgresoAttrsDict)
            if not unaIdNuevoProgreso:
                return unResult
                     
            unNuevoProgreso = self.getElementoPorID( unaIdNuevoProgreso)
            if  unNuevoProgreso == None:
                return unResult

            
            unNuevoProgreso.manage_fixupOwnershipAfterAdd()
          
            unNuevoProgreso.pSetPermissions()
            
            
            
            unNuevoProgreso.pSetParametrosEntrada( theInputParameters)
            unNuevoProgreso.pSetDatosResultado(    theResult)
            unNuevoProgreso.pSetParametrosControl( someProgressControlParms)
            unNuevoProgreso.pSetContadoresControl( someProgressControlCounters)
            
                        
            transaction.commit()
            
            unCatalogoRaiz.pFlushCachedTemplates_All()

            if cTRAProgress_LogLongLivedProcess:                
                aLogger = logging.getLogger( 'gvSIGi18n')
                aLogger.info( '\n\nCreated %s %s (%s) UID=%s\n' % ( unNuevoProgreso.meta_type, unNuevoProgreso.Title(), unNuevoProgreso.fPhysicalPathString(), unNuevoProgreso.UID(),))
                
                
              
                
                
            if not theCreateHandler:    
                
                unResult.update( {
                    'success':                          True,
                    'condition':                        '',
                    'progress_element':                 unNuevoProgreso,
                })
                return unResult
                
            
            
            
            
            aProgressHandler = TRAProgressHandler( 
                theInitialElement, 
                unNuevoProgreso, 
                theInputParameters,
                someProgressControlParms, 
                someProgressControlCounters, 
                theResult,
                theInitializeLambda,
                theLoopLambda,
                theElementLambda,
                theElementPloneLambda,
                theFinalizeLambda,
                theLockCatalog,
                theTimestamp,
            )              
            if not aProgressHandler:
                return unResult
            
            aProgressHandlerKey = aProgressHandler.fKey()
            if not aProgressHandlerKey:
                return unResult
            
            if not unNuevoProgreso.fRegisterProgressHandler( aProgressHandler):
                return unResult
            
            unResult.update( {
                'success':                          True,
                'condition':                        '',
                'progress_handler_key':             aProgressHandlerKey,
                'progress_handler':                 aProgressHandler,
                'progress_element':                 unNuevoProgreso,
            })
            
            return unResult
   
                    
        finally:
            unExecutionRecord and unExecutionRecord.pEndExecution()
Example #43
0
 def commit(self, note):
     transaction.get().note(note)
     transaction.commit()
Example #44
0
def test_celery__TransactionAwareTask____call____4(
        celery_session_worker, interaction):
    """It propagates the task_id to the worker."""
    job = get_task_id.apply_async(task_id='my-nice-task-id')
    transaction.commit()
    assert 'my-nice-task-id' == job.get()
Example #45
0
 def testFlushQueueOnCommit(self):
     self.queue.index('foo')
     commit()
     self.assertEqual(self.queue.getState(), [])
     self.assertEqual(self.queue.processed, [(INDEX, 'foo', None)])
     self.assertEqual(self.queue.state, 'finished')
Example #46
0
 def dequeueMessage(self, activity_tool, processing_node,
                    node_family_id_list):
     message_list, group_method_id, uid_to_duplicate_uid_list_dict = \
       self.getProcessableMessageList(activity_tool, processing_node,
         node_family_id_list)
     if message_list:
         # Remove group_id parameter from group_method_id
         group_method_id = group_method_id.split('\0')[0]
         if group_method_id != "":
             method = activity_tool.invokeGroup
             args = (group_method_id, message_list, self.__class__.__name__,
                     hasattr(self, 'generateMessageUID'))
             activity_runtime_environment = ActivityRuntimeEnvironment(
                 None,
                 priority=min(x.line.priority for x in message_list),
             )
         else:
             method = activity_tool.invoke
             message, = message_list
             args = message_list
             activity_runtime_environment = ActivityRuntimeEnvironment(
                 message)
         # Commit right before executing messages.
         # As MySQL transaction does not start exactly at the same time as ZODB
         # transactions but a bit later, messages available might be called
         # on objects which are not available - or available in an old
         # version - to ZODB connector.
         # So all connectors must be committed now that we have selected
         # everything needed from MySQL to get a fresh view of ZODB objects.
         transaction.commit()
         transaction.begin()
         # Try to invoke
         try:
             # Refer Timeout.activity_timeout instead of
             #   from Products.ERP5Type.Timeout import activity_timeout
             # so that we can override the value in Timeout namescope in unit tests.
             offset = Timeout.activity_timeout
             with activity_runtime_environment, Deadline(offset):
                 method(*args)
             # Abort if at least 1 message failed. On next tic, only those that
             # succeeded will be selected because their at_date won't have been
             # increased.
             for m in message_list:
                 if m.getExecutionState() == MESSAGE_NOT_EXECUTED:
                     raise _DequeueMessageException
             transaction.commit()
         except:
             exc_info = sys.exc_info()
             if exc_info[1] is not _DequeueMessageException:
                 self._log(
                     WARNING,
                     'Exception raised when invoking messages (uid, path, method_id) %r'
                     % [(m.uid, m.object_path, m.method_id)
                        for m in message_list])
                 for m in message_list:
                     m.setExecutionState(MESSAGE_NOT_EXECUTED,
                                         exc_info,
                                         log=False)
             self._abort()
             exc_info = message_list[0].exc_info
             if exc_info:
                 try:
                     # Register it again.
                     with activity_runtime_environment:
                         cancel = message.on_error_callback(*exc_info)
                     del exc_info, message.exc_info
                     transaction.commit()
                     if cancel:
                         message.setExecutionState(MESSAGE_EXECUTED)
                 except:
                     self._log(
                         WARNING,
                         'Exception raised when processing error callbacks')
                     message.setExecutionState(MESSAGE_NOT_EXECUTED)
                     self._abort()
         self.finalizeMessageExecution(activity_tool, message_list,
                                       uid_to_duplicate_uid_list_dict)
     transaction.commit()
     return not message_list
def uploadComments(comments):
    """Upload batch comment data directly into Fluidinfo.

    This function is copied from fluiddb/scripts/dataset.py and modified to
    allow an explicit about value for comments to be given and to drop the
    passing of the importer name and url.

    @param comments: A C{list} of comment C{dict}s, each with keys:
        about: The C{unicode} string that the comment is about.
        text: The C{unicode} text of the comment.
        timestamp: A C{datetime.datetime} instance or C{None} if the
            current time should be used.
        username: The user's C{unicode} name in Fluidinfo.
    """
    if not dryRun:
        # import transaction and CommentAPI here to make sure no other code
        # in this file can possibly create comments.
        from fluiddb.security.comment import CommentAPI
        import transaction

    countByUser = defaultdict(int)
    nComments = len(comments)
    nUploaded = 0
    batchSize = 100
    print 'Uploading %d new comments.' % nComments

    while comments:
        # NOTE: Be careful here.  An obvious refactoring, at first glance,
        # is to move the logic to get the user and create the comment API
        # outside the loop, but doing so will cause the user object to be
        # used across transaction boundaries, which we don't want to do.
        # It's important that the database interactions for each batch
        # processed here are managed in a single transaction.
        thisBatch = comments[0:min(len(comments), batchSize)]
        try:
            user = getUser(u'fluidinfo.com')
            if user is None:
                raise Exception('Could not find fluidinfo.com user!')
            api = CommentAPI(user)
            for comment in thisBatch:
                # An explicit about value must be in a list and should
                # be lowercased (if not a URL).
                print 'IMPORT %d: %r' % (nUploaded, comment)
                about = [lowercaseAbout(comment['about'])]
                text = comment['text']
                if text is not None:
                    text = text.strip()
                if text:
                    countByUser[comment['username']] += 1
                    if not dryRun:
                        api.create(text, comment['username'], about=about,
                                   when=comment['when'])
                else:
                    print 'Skipped comment with invalid text: %r' % (comment,)
                nUploaded += 1
            if not dryRun:
                transaction.commit()
            print '%d of %d comments imported.' % (nUploaded, nComments)
        except:
            if not dryRun:
                transaction.abort()
            raise
        else:
            comments = comments[len(thisBatch):]

    print 'Number of comments added by user:'
    for user in countByUser:
        print user, countByUser[user]
Example #48
0
    def setUp(self):
        super(TestBatchingSiteRoot, self).setUp()

        for i in range(5):
            self._create_doc(self.portal, i)
        transaction.commit()
Example #49
0
 def setUpPloneSite(self, portal):
     quickInstallProduct(portal, 'collective.stripe')
     self.applyProfile(portal, 'collective.stripe:default')
     transaction.commit()
    def setup_method(self, _):
        # Always see the diff
        # https://docs.python.org/2/library/unittest.html#unittest.TestCase.maxDiff
        self.maxDiff = None

        from c2cgeoportal_commons.models import DBSession
        from c2cgeoportal_commons.models.main import (
            Theme,
            LayerGroup,
            Interface,
            OGCServer,
            LayerWMS,
            LayerWMTS,
        )

        main = Interface(name="main")

        ogc_server_internal = create_default_ogcserver()
        ogc_server_external = OGCServer(name="__test_ogc_server_external",
                                        url="http://wms.geo.admin.ch/",
                                        image_type="image/jpeg")

        layer_internal_wms = LayerWMS(name="__test_layer_internal_wms",
                                      public=True)
        layer_internal_wms.layer = "__test_layer_internal_wms"
        layer_internal_wms.interfaces = [main]
        layer_internal_wms.ogc_server = ogc_server_internal

        layer_external_wms = LayerWMS(name="__test_layer_external_wms",
                                      layer="ch.swisstopo.dreiecksvermaschung",
                                      public=True)
        layer_external_wms.interfaces = [main]
        layer_external_wms.ogc_server = ogc_server_external

        layer_wmts = LayerWMTS(name="__test_layer_wmts", public=True)
        layer_wmts.url = "http://example.com/1.0.0/WMTSCapabilities.xml"
        layer_wmts.layer = "map"
        layer_wmts.interfaces = [main]

        layer_group_1 = LayerGroup(name="__test_layer_group_1")
        layer_group_1.children = [layer_internal_wms]

        layer_group_2 = LayerGroup(name="__test_layer_group_2")
        layer_group_2.children = [layer_external_wms]

        layer_group_3 = LayerGroup(name="__test_layer_group_3")
        layer_group_3.children = [layer_wmts]

        layer_group_4 = LayerGroup(name="__test_layer_group_4")
        layer_group_4.children = [layer_group_1, layer_group_2]

        layer_group_5 = LayerGroup(name="__test_layer_group_5")
        layer_group_5.children = [layer_group_1, layer_group_3]

        layer_group_6 = LayerGroup(name="__test_layer_group_6")
        layer_group_6.children = [layer_internal_wms]

        layer_group_7 = LayerGroup(name="__test_layer_group_7")
        layer_group_7.children = [layer_group_1, layer_group_6]

        layer_group_8 = LayerGroup(name="__test_layer_group_8")
        layer_group_8.children = [layer_group_2, layer_group_6]

        theme = Theme(name="__test_theme")
        theme.interfaces = [main]
        theme.children = [
            layer_group_1,
            layer_group_2,
            layer_group_3,
            layer_group_4,
            layer_group_5,
            layer_group_7,
            layer_group_8,
        ]

        DBSession.add(theme)

        transaction.commit()
    def test_delete_group(self):
        response = self.api_session.delete('/@groups/ploneteam')
        transaction.commit()

        self.assertEqual(response.status_code, 204)
        self.assertEqual(None, self.gtool.getGroupById('ploneteam'))
Example #52
0
    def initialize(self, rpchost, rpcport, sync_all, ipc):
        if ipc:
            eth = EthIpc()
        else:
            eth = EthJsonRpc(rpchost, rpcport)

        if self.last_block:
            blockNum = self.last_block
            print("Resuming synchronization from block " + str(blockNum))
        else:

            blockNum = eth.eth_blockNumber()
            print("Starting synchronization from latest block: " +
                  str(blockNum))

        while (blockNum > 0):

            if not blockNum % 1000:
                print("Processing block " + str(blockNum) + ", " +
                      str(len(self.contracts.keys())) +
                      " unique contracts in database")

            block = eth.eth_getBlockByNumber(blockNum)

            for tx in block['transactions']:

                if not tx['to']:

                    receipt = eth.eth_getTransactionReceipt(tx['hash'])

                    if receipt is not None:

                        contract_address = receipt['contractAddress']

                        contract_code = eth.eth_getCode(contract_address)
                        contract_balance = eth.eth_getBalance(contract_address)

                        if not contract_balance or sync_all:
                            # skip contracts with zero balance (disable with --sync-all)
                            continue

                        code = ETHContract(contract_code, tx['input'])

                        m = hashlib.md5()
                        m.update(contract_code.encode('UTF-8'))
                        contract_hash = m.digest()

                        try:
                            self.contracts[contract_hash]
                        except KeyError:
                            self.contracts[contract_hash] = code
                            m = InstanceList()
                            self.instance_lists[contract_hash] = m

                        self.instance_lists[contract_hash].add(
                            contract_address, contract_balance)

                        transaction.commit()

            self.last_block = blockNum
            blockNum -= 1
Example #53
0
    def process(self, params):
        if request.method not in HTTP_VERBS:
            # Just to be sure that we don't get some crappy http verb we don't expect
            raise BadRequest

        cfg = Config.getInstance()
        forced_conflicts, max_retries, profile = cfg.getForceConflicts(
        ), cfg.getMaxRetries(), cfg.getProfile()
        profile_name, res, textLog = '', '', []

        self._startTime = datetime.now()

        # clear the context
        ContextManager.destroy()
        ContextManager.set('currentRH', self)
        g.rh = self

        #redirect to https if necessary
        if self._checkHttpsRedirect():
            return self._responseUtil.make_redirect()

        if self.EVENT_FEATURE is not None:
            self._check_event_feature()

        DBMgr.getInstance().startRequest()
        textLog.append("%s : Database request started" %
                       (datetime.now() - self._startTime))
        Logger.get('requestHandler').info(
            u'Request started: %s %s [IP=%s] [PID=%s]', request.method,
            request.relative_url, request.remote_addr, os.getpid())

        is_error_response = False
        try:
            for i, retry in enumerate(transaction.attempts(max_retries)):
                with retry:
                    if i > 0:
                        signals.before_retry.send()

                    try:
                        profile_name, res = self._process_retry(
                            params, i, profile, forced_conflicts)
                        signals.after_process.send()
                        if i < forced_conflicts:  # raise conflict error if enabled to easily handle conflict error case
                            raise ConflictError
                        if self.commit:
                            transaction.commit()
                        else:
                            transaction.abort()
                        DBMgr.getInstance().endRequest(commit=False)
                        break
                    except (ConflictError, POSKeyError):
                        transaction.abort()
                        import traceback
                        # only log conflict if it wasn't forced
                        if i >= forced_conflicts:
                            Logger.get('requestHandler').warning(
                                'Database conflict')
                    except ClientDisconnected:
                        transaction.abort()
                        Logger.get('requestHandler').warning(
                            'Client disconnected')
                        time.sleep(i)
                    except DatabaseError:
                        handle_sqlalchemy_database_error()
                        break
            self._process_success()
        except Exception as e:
            transaction.abort()
            res = self._getMethodByExceptionName(e)(e)
            if isinstance(e, HTTPException) and e.response is not None:
                res = e.response
            is_error_response = True

        totalTime = (datetime.now() - self._startTime)
        textLog.append('{} : Request ended'.format(totalTime))

        # log request timing
        if profile and os.path.isfile(profile_name):
            rep = Config.getInstance().getTempDir()
            stats = pstats.Stats(profile_name)
            stats.strip_dirs()
            stats.sort_stats('cumulative', 'time', 'calls')
            stats.dump_stats(os.path.join(rep, 'IndicoRequestProfile.log'))
            output = StringIO.StringIO()
            sys.stdout = output
            stats.print_stats(100)
            sys.stdout = sys.__stdout__
            s = output.getvalue()
            f = file(os.path.join(rep, 'IndicoRequest.log'), 'a+')
            f.write('--------------------------------\n')
            f.write('URL     : {}\n'.format(request.url))
            f.write('{} : start request\n'.format(self._startTime))
            f.write('params:{}'.format(params))
            f.write('\n'.join(textLog))
            f.write(s)
            f.write('--------------------------------\n\n')
            f.close()
        if profile and profile_name and os.path.exists(profile_name):
            os.remove(profile_name)

        if self._responseUtil.call:
            return self._responseUtil.make_call()

        if is_error_response and isinstance(
                res, (current_app.response_class, Response)):
            # if we went through error handling code, responseUtil._status has been changed
            # so make_response() would fail
            return res

        # In case of no process needed, we should return empty string to avoid erroneous output
        # specially with getVars breaking the JS files.
        if not self._doProcess or res is None:
            return self._responseUtil.make_empty()

        return self._responseUtil.make_response(res)
    def test_delete_non_existing_group(self):
        response = self.api_session.delete('/@groups/non-existing-group')
        transaction.commit()

        self.assertEqual(response.status_code, 404)
 def tearDown(self):
     self.portal.manage_delObjects(ids=FOLDER['id'])
     transaction.commit()
Example #56
0
    def test_anon_only(self):

        # Add folder content
        setRoles(self.portal, TEST_USER_ID, ('Manager', ))
        self.portal.invokeFactory('Folder', 'f1')
        self.portal['f1'].title = u"Folder one"
        self.portal['f1'].description = u"Folder one description"
        self.portal['f1'].reindexObject()

        # Add page content
        self.portal['f1'].invokeFactory('Document', 'd1')
        self.portal['f1']['d1'].title = u"Document one"
        self.portal['f1']['d1'].description = u"Document one description"
        testText = "Testing... body one"
        self.portal['f1']['d1'].text = RichTextValue(testText, 'text/plain',
                                                     'text/html')
        self.portal['f1']['d1'].reindexObject()

        # Publish the folder and page
        self.portal.portal_workflow.doActionFor(self.portal['f1'], 'publish')
        self.portal.portal_workflow.doActionFor(self.portal['f1']['d1'],
                                                'publish')

        # Set pages to have weak caching and test anonymous

        self.cacheSettings.operationMapping = {
            'plone.content.itemView': 'plone.app.caching.weakCaching'
        }
        transaction.commit()

        # View the page as anonymous
        browser = Browser(self.app)
        browser.open(self.portal['f1']['d1'].absolute_url())
        self.assertEqual('plone.content.itemView',
                         browser.headers['X-Cache-Rule'])
        self.assertEqual('plone.app.caching.weakCaching',
                         browser.headers['X-Cache-Operation'])
        self.assertTrue(testText in browser.contents)
        self.assertEqual('max-age=0, must-revalidate, private',
                         browser.headers['Cache-Control'])

        # Set pages to have moderate caching so that we can see the difference
        # between logged in and anonymous

        self.cacheSettings.operationMapping = {
            'plone.content.itemView': 'plone.app.caching.moderateCaching'
        }
        self.registry['plone.app.caching.moderateCaching.smaxage'] = 60
        self.registry['plone.app.caching.moderateCaching.vary'] = 'X-Anonymous'
        self.registry['plone.app.caching.moderateCaching.anonOnly'] = True

        transaction.commit()

        # View the page as anonymous
        browser = Browser(self.app)
        browser.open(self.portal['f1']['d1'].absolute_url())
        self.assertEqual('plone.content.itemView',
                         browser.headers['X-Cache-Rule'])
        self.assertEqual('plone.app.caching.moderateCaching',
                         browser.headers['X-Cache-Operation'])
        self.assertTrue(testText in browser.contents)
        self.assertEqual('max-age=0, s-maxage=60, must-revalidate',
                         browser.headers['Cache-Control'])
        self.assertEqual('X-Anonymous', browser.headers['Vary'])
        self.assertFalse('Etag' in browser.headers)

        # View the page as logged-in
        browser = Browser(self.app)
        browser.addHeader(
            'Authorization', 'Basic %s:%s' % (
                TEST_USER_NAME,
                TEST_USER_PASSWORD,
            ))
        browser.open(self.portal['f1']['d1'].absolute_url())
        self.assertEqual('plone.content.itemView',
                         browser.headers['X-Cache-Rule'])
        self.assertEqual('plone.app.caching.moderateCaching',
                         browser.headers['X-Cache-Operation'])
        self.assertTrue(testText in browser.contents)
        self.assertEqual('max-age=0, must-revalidate, private',
                         browser.headers['Cache-Control'])
        self.assertTrue('Etag' in browser.headers)

        # Set pages to have strong caching so that we can see the difference
        # between logged in and anonymous

        self.cacheSettings.operationMapping = {
            'plone.content.itemView': 'plone.app.caching.strongCaching'
        }
        self.registry['plone.app.caching.strongCaching.vary'] = 'X-Anonymous'
        self.registry['plone.app.caching.strongCaching.anonOnly'] = True
        transaction.commit()

        # View the page as anonymous
        browser = Browser(self.app)
        browser.open(self.portal['f1']['d1'].absolute_url())
        self.assertEqual('plone.content.itemView',
                         browser.headers['X-Cache-Rule'])
        self.assertEqual('plone.app.caching.strongCaching',
                         browser.headers['X-Cache-Operation'])
        self.assertTrue(testText in browser.contents)
        self.assertEqual('max-age=86400, proxy-revalidate, public',
                         browser.headers['Cache-Control'])
        self.assertEqual('X-Anonymous', browser.headers['Vary'])
        self.assertFalse('Etag' in browser.headers)

        # View the page as logged-in
        browser = Browser(self.app)
        browser.addHeader(
            'Authorization', 'Basic %s:%s' % (
                TEST_USER_NAME,
                TEST_USER_PASSWORD,
            ))
        browser.open(self.portal['f1']['d1'].absolute_url())
        self.assertEqual('plone.content.itemView',
                         browser.headers['X-Cache-Rule'])
        self.assertEqual('plone.app.caching.strongCaching',
                         browser.headers['X-Cache-Operation'])
        self.assertTrue(testText in browser.contents)
        self.assertEqual('max-age=0, must-revalidate, private',
                         browser.headers['Cache-Control'])
        self.assertTrue('Etag' in browser.headers)

        # Check an edge case that has had a problem in the past:
        # setting strongCaching maxage to zero.

        self.registry['plone.app.caching.strongCaching.maxage'] = 0
        self.registry['plone.app.caching.strongCaching.smaxage'] = 60
        transaction.commit()

        # View the page as anonymous
        browser = Browser(self.app)
        browser.open(self.portal['f1']['d1'].absolute_url())
        self.assertEqual('max-age=0, s-maxage=60, must-revalidate',
                         browser.headers['Cache-Control'])
Example #57
0
    def test_dump(self):
        """ Can dump all data to json format """
        user1 = make_user('user1', 'user1', True)
        user2 = make_user('user2', 'user2', False)
        user3 = make_user('user3', 'user3', False)
        user3.admin = True
        self.db.add_all([user1, user2, user3])
        transaction.commit()
        self.access.set_allow_register(False)
        self.access.create_group('g1')
        self.access.create_group('g2')
        self.access.edit_user_group('user2', 'g1', True)
        self.access.edit_user_group('user2', 'g2', True)
        self.access.edit_user_group('user3', 'g2', True)
        self.access.edit_user_permission('pkg1', 'user2', 'read', True)
        self.access.edit_user_permission('pkg2', 'user3', 'read', True)
        self.access.edit_user_permission('pkg2', 'user3', 'write', True)
        self.access.edit_group_permission('pkg1', 'g1', 'read', True)
        self.access.edit_group_permission('pkg2', 'g2', 'read', True)
        self.access.edit_group_permission('pkg2', 'g2', 'write', True)

        data = self.access.dump()

        self.assertFalse(data['allow_register'])

        # users
        self.assertEqual(len(data['users']), 2)
        for user in data['users']:
            self.assertTrue(
                pwd_context.verify(user['username'], user['password']))
            self.assertFalse(user['admin'] ^ (user['username'] == 'user3'))

        # pending users
        self.assertEqual(len(data['pending_users']), 1)
        user = data['pending_users'][0]
        self.assertTrue(pwd_context.verify(user['username'], user['password']))

        # groups
        self.assertEqual(len(data['groups']), 2)
        self.assertItemsEqual(data['groups']['g1'], ['user2'])
        self.assertItemsEqual(data['groups']['g2'], ['user2', 'user3'])

        # user package perms
        self.assertEqual(
            data['packages']['users'], {
                'pkg1': {
                    'user2': ['read'],
                },
                'pkg2': {
                    'user3': ['read', 'write'],
                },
            })

        # group package perms
        self.assertEqual(data['packages']['groups'], {
            'pkg1': {
                'g1': ['read'],
            },
            'pkg2': {
                'g2': ['read', 'write'],
            },
        })
Example #58
0
 def tearDown(cls):
     app = ZopeTestCase.app()
     app._delObject('site')
     transaction.commit()
     ZopeTestCase.close(app)
Example #59
0
def demo(context):

    if context.readDataFile('plonesocial.suite_demo.txt') is None:
        return

    portal = site = context.getSite()
    avatar_path = os.path.join(context._profile_path, 'avatars')

    # create users
    users = []
    for file_name in os.listdir(avatar_path):
        userid = str(file_name.split('.')[0])
        users.append(userid)
        properties = dict(
            fullname=" ".join([x.capitalize() for x in userid.split("_")]),
            location=random.choice(("New York", "Chicago", "San Francisco",
                                    "Paris", "Amsterdam", "Zurich")),
            description=" ".join(loremipsum.get_sentences(2)))
        try:
            api.user.create(email='*****@*****.**' % userid,
                            username=userid,
                            password='******',
                            properties=properties)
        except ValueError:
            user = api.user.get(username=userid)
            user.setMemberProperties(properties)

        portrait = context.openDataFile(file_name, 'avatars')
        scaled, mimetype = scale_image(portrait)
        portrait = Image(id=userid, file=scaled, title='')
        memberdata = getToolByName(site, 'portal_memberdata')
        memberdata._setPortrait(portrait, userid)

    # setup social network
    graph = queryUtility(INetworkGraph)
    graph.clear()
    testusers = ['clare_presler', 'kurt_silvio']
    graph.set_follow(testusers[1], testusers[0])
    # give clare som extra followers
    for fan in ['christian_stoner', 'guy_hachey', 'jamie_jacko']:
        graph.set_follow(fan, testusers[0])
    # fully random followers
    for i in xrange(100):
        followee = random.choice(users)
        follower = random.choice(users)
        if followee in testusers or follower in testusers \
                or followee == follower:
            continue
        else:
            graph.set_follow(follower, followee)

    # setup publicly accessible folder and document
    portal.invokeFactory('Folder', 'public', title=u"Public Folder")
    public = portal['public']
    public.invokeFactory('Document', 'd1', title=u"Public Document")

    # create and fill a local IMicroblogContext workspace
    workspace_users = [
        'clare_presler', 'dollie_nocera', 'esmeralda_claassen',
        'pearlie_whitby'
    ]
    if 'workspace' not in portal:
        portal.invokeFactory('Folder', 'workspace', title=u"Secure Workspace")
        # enable local microblog
        directlyProvides(portal.workspace, IMicroblogContext)
        # in testing we don't have the 'normal' default workflow
        workflowTool = getToolByName(portal, 'portal_workflow')
        if workflowTool.getInfoFor(portal.workspace,
                                   'review_state') != 'private':
            workflowTool.doActionFor(portal.workspace, 'hide')
        # share workspace with some users
        for userid in workspace_users:
            api.user.grant_roles(username=userid,
                                 obj=portal.workspace,
                                 roles=['Contributor', 'Reader', 'Editor'])
        # update object_provides + workflow state + sharing indexes
        portal.workspace.reindexObject()

    # microblog random loremipsum
    # prepare microblog
    microblog = queryUtility(IMicroblogTool)
    microblog.clear()  # wipe all
    tags = ("hr", "marketing", "fail", "innovation", "learning", "easy",
            "newbiz", "conference", "help", "checkthisout")
    for i in xrange(100):
        # select random user
        userid = random.choice(users)
        # generate text
        text = " ".join(loremipsum.get_sentences(3))
        if random.choice((True, False)):
            text += " #%s" % random.choice(tags)
        if userid in workspace_users:
            # workspace
            text += ' #girlspace'
            status = StatusUpdate(text, context=portal.workspace)
        else:
            # global
            status = StatusUpdate(text)
        status.userid = userid
        status.creator = " ".join([x.capitalize() for x in userid.split("_")])
        # distribute most over last week
        if i < 90:
            offset_time = random.random() * 3600 * 24 * 7
            status.id -= int(offset_time * 1e6)
            status.date = DateTime(time.time() - offset_time)
        microblog.add(status)

    # microblog deterministic test content most recent
    # workspace
    t0 = ('Workspaces can have local microblogs and activitystreams. '
          'Local activitystreams show only local status updates. '
          'Microblog updates will show globally only for users who '
          'have the right permissions. This demo has a #girlspace workspace.')
    s0 = StatusUpdate(t0, context=portal.workspace)
    s0.userid = workspace_users[0]  # clare
    s0.creator = " ".join([x.capitalize() for x in s0.userid.split("_")])
    microblog.add(s0)
    # global
    t1 = ('The "My Network" section only shows updates '
          'of people you are following.')
    s1 = StatusUpdate(t1)
    s1.userid = testusers[0]  # clare
    s1.creator = " ".join([x.capitalize() for x in s1.userid.split("_")])
    microblog.add(s1)
    t2 = 'The "Explore" section shows all updates of all people.'
    s2 = StatusUpdate(t2)
    s2.userid = testusers[1]  # kurt
    s2.creator = " ".join([x.capitalize() for x in s2.userid.split("_")])
    microblog.add(s2)
    t3 = 'The #demo hashtag demonstrates that you can filter on topic'
    s3 = StatusUpdate(t3)
    s3.userid = s2.userid  # kurt
    s3.creator = s2.creator
    microblog.add(s3)

    # commit
    microblog.flush_queue()
    transaction.commit()
Example #60
0
 def test_need_admin(self):
     """ If admin doesn't exist, need an admin """
     user = make_user('foo', 'bar', False)
     self.db.add(user)
     transaction.commit()
     self.assertTrue(self.access.need_admin())