コード例 #1
1
    def test_update_project_returns_extra(self):
        """This tests for backwards-compatibility with an essex/folsom bug.

        Non-indexed attributes were returned in an 'extra' attribute, instead
        of on the entity itself; for consistency and backwards compatibility,
        those attributes should be included twice.

        This behavior is specific to the SQL driver.

        """
        tenant_id = uuid.uuid4().hex
        arbitrary_key = uuid.uuid4().hex
        arbitrary_value = uuid.uuid4().hex
        tenant = {
            "id": tenant_id,
            "name": uuid.uuid4().hex,
            "domain_id": DEFAULT_DOMAIN_ID,
            arbitrary_key: arbitrary_value,
        }
        ref = self.identity_man.create_project({}, tenant_id, tenant)
        self.assertEqual(arbitrary_value, ref[arbitrary_key])
        self.assertIsNone(ref.get("extra"))

        tenant["name"] = uuid.uuid4().hex
        ref = self.identity_api.update_project(tenant_id, tenant)
        self.assertEqual(arbitrary_value, ref[arbitrary_key])
        self.assertEqual(arbitrary_value, ref["extra"][arbitrary_key])
コード例 #2
1
    def test_facebook_token(self, _get_external_data, get_new_token):
        self.app.test_request_context().push()
        _get_external_data.return_value = FACEBOOK_GRAPH_DATA
        from rockpack.mainsite.services.oauth.api import FacebookUser

        long_lived_fb_token = "fdsuioncf3w8ryl38yb7y4eius"
        get_new_token.return_value = FacebookUser("facebook", long_lived_fb_token, 3600)

        user = self._new_user()
        token = uuid.uuid4().hex
        eu = FacebookUser("facebook", token, 3600)
        eu._user_data = FACEBOOK_GRAPH_DATA.copy()
        ExternalToken.update_token(user, eu)
        self.session.commit()

        e = ExternalToken.query.filter_by(external_token=long_lived_fb_token).one()
        self.assertEquals("facebook", e.external_system)
        self.assertEquals(user.username, e.user_rel.username)

        # test we can overwrite token
        new_token = uuid.uuid4().hex
        eu = FacebookUser("facebook", new_token, 172800)
        eu._user_data = FACEBOOK_GRAPH_DATA.copy()
        ExternalToken.update_token(user, eu)
        self.session.commit()

        e = ExternalToken.query.filter_by(user=user.id)
        self.assertEquals(1, e.count(), "only one token should exist")
        e = e.one()
        self.assertEquals(new_token, e.external_token, "saved token should match new token")
コード例 #3
0
ファイル: mocks.py プロジェクト: shvar/redfs
    def run_bad_case():
        backup_uuid = uuid4()
        restore_uuid = uuid4()

        restore_total = 1000
        backup_total = 1000

        backup_started(backup_uuid)
        restore_started(restore_uuid)

        def _backup_progress(uuid, mult, total):
            backup_running(uuid)
            backup_progress(uuid, int(mult * total), total,
                            100000 * int(mult * total), 100000 * total)

        def _restore_progress(uuid, mult, total):
            restore_progress(uuid, int(mult * total), total,
                             100000 * int(mult * total), 100000 * total)
            restore_running(uuid)

        for i in xrange(1, 11):
            reactor.callLater(2 * i, _backup_progress, backup_uuid,
                              i * 10. / 100.0, backup_total)

            reactor.callLater(2 * i, _restore_progress, restore_uuid,
                              i * 9. / 100.0, restore_total)

        reactor.callLater(30, backup_finished, backup_uuid)
        reactor.callLater(30, restore_failed, restore_uuid)
コード例 #4
0
def rename_conflicting_fields(json_obj):
    if isinstance(json_obj, dict):
        for key, value in json_obj.items():
            if key in conflicting_fields:
                # rename
                prefix = 'mongo' + key
                new_key = prefix
                while new_key in json_obj:
                    new_key = prefix + str(uuid.uuid4())

                json_obj[new_key] = value
                del json_obj[key]
            elif '.' in key:
                prefix = key.replace('.', ':')
                new_key = prefix
                while new_key in json_obj:
                    new_key = prefix + str(uuid.uuid4())

                json_obj[new_key] = value
                del json_obj[key]

            rename_conflicting_fields(value)
    elif isinstance(json_obj, list):
        for json_section in json_obj:
            rename_conflicting_fields(json_section)
コード例 #5
0
ファイル: bucket.py プロジェクト: hiidef/hiitrack-api
 def test_unauthorized(self):
     BUCKETNAME = uuid.uuid4().hex
     DESCRIPTION = uuid.uuid4().hex
     result = yield request(
         "POST",
         "%s/%s" % (self.url, BUCKETNAME),
         username=self.username,
         password=self.password,
         data={"description":DESCRIPTION})
     self.assertEqual(result.code, 201)
     result = yield request(
         "GET",
         "%s/%s" % (self.url, BUCKETNAME),
         username="******",
         password="******")        
     self.assertEqual(result.code, 401)
     result = yield request(
         "GET",
         "%s/%s" % (self.url, BUCKETNAME),
         username=self.username2,
         password=self.password2)        
     self.assertEqual(result.code, 401)
     result = yield request(
         "DELETE",
         "%s/%s" % (self.url, BUCKETNAME),
         username=self.username,
         password=self.password)
     self.assertEqual(result.code, 200)
コード例 #6
0
def test_pqueue_schedule_message(live_servicebus_config, partitioned_queue):
    client = ServiceBusClient(
        service_namespace=live_servicebus_config['hostname'],
        shared_access_key_name=live_servicebus_config['key_name'],
        shared_access_key_value=live_servicebus_config['access_key'],
        debug=True)
    import uuid
    queue_client = client.get_queue(partitioned_queue)
    enqueue_time = (datetime.now() + timedelta(minutes=2)).replace(microsecond=0)
    with queue_client.get_receiver() as receiver:
        with queue_client.get_sender() as sender:
            content = str(uuid.uuid4())
            message_id = uuid.uuid4()
            message = Message(content)
            message.properties.message_id = message_id
            message.schedule(enqueue_time)
            sender.send(message)

        messages = receiver.fetch_next(timeout=120)
        if messages:
            try:
                data = str(messages[0])
                assert data == content
                assert messages[0].properties.message_id == message_id
                assert messages[0].scheduled_enqueue_time == enqueue_time
                assert messages[0].scheduled_enqueue_time == messages[0].enqueued_time.replace(microsecond=0)
                assert len(messages) == 1
            finally:
                for m in messages:
                    m.complete()
        else:
            raise Exception("Failed to receive schdeduled message.")
コード例 #7
0
 def test_element_count_validation(self):
     """
     Tests that big collections are detected and raise an exception.
     """
     TestMapModel.create(text_map={str(uuid4()): i for i in range(65535)})
     with self.assertRaises(ValidationError):
         TestMapModel.create(text_map={str(uuid4()): i for i in range(65536)})
コード例 #8
0
 def testMcas(self):
   mc = GetMemcacheClient()
   key1, key2 = uuid4().hex, uuid4().hex
   mc.set_multi({key1: 'foo', key2: 'bar'})
   item1, item2 = mcas_get(mc, key1), mcas_get(mc, key2)
   self.assertTrue(mcas(mc, [(item1, 'foo2'), (item2, 'bar2')]))
   self.assertEqual(mc.get_multi([key1, key2]), {key1: 'foo2', key2: 'bar2'})
コード例 #9
0
def test_pqueue_message_time_to_live(live_servicebus_config, partitioned_queue):
    client = ServiceBusClient(
        service_namespace=live_servicebus_config['hostname'],
        shared_access_key_name=live_servicebus_config['key_name'],
        shared_access_key_value=live_servicebus_config['access_key'],
        debug=True)
    import uuid
    queue_client = client.get_queue(partitioned_queue)
    
    with queue_client.get_sender() as sender:
        content = str(uuid.uuid4())
        message_id = uuid.uuid4()
        message = Message(content)
        message.time_to_live = timedelta(seconds=30)
        sender.send(message)

    time.sleep(30)
    with queue_client.get_receiver() as receiver:
        messages = receiver.fetch_next(timeout=10)
    assert not messages

    with queue_client.get_deadletter_receiver(idle_timeout=5, mode=ReceiveSettleMode.PeekLock) as receiver:
        count = 0
        for message in receiver:
            print_message(message)
            message.complete()
            count += 1
        assert count == 1
コード例 #10
0
    def test_federated_domain_scoped_payload(self):
        exp_user_id = 'someNonUuidUserId'
        exp_methods = ['token']
        exp_domain_id = uuid.uuid4().hex
        exp_expires_at = utils.isotime(timeutils.utcnow(), subsecond=True)
        exp_audit_ids = [provider.random_urlsafe_str()]
        exp_federated_info = {'group_ids': [{'id': 'someNonUuidGroupId'}],
                              'idp_id': uuid.uuid4().hex,
                              'protocol_id': uuid.uuid4().hex}

        payload = token_formatters.FederatedDomainScopedPayload.assemble(
            exp_user_id, exp_methods, exp_domain_id, exp_expires_at,
            exp_audit_ids, exp_federated_info)

        (user_id, methods, domain_id, expires_at, audit_ids,
         federated_info) = (
            token_formatters.FederatedDomainScopedPayload.disassemble(
                payload))

        self.assertEqual(exp_user_id, user_id)
        self.assertEqual(exp_methods, methods)
        self.assertEqual(exp_domain_id, domain_id)
        self.assertEqual(exp_expires_at, expires_at)
        self.assertEqual(exp_audit_ids, audit_ids)
        self.assertDictEqual(exp_federated_info, federated_info)
コード例 #11
0
ファイル: test_v3_oauth1.py プロジェクト: roopali8/keystone
    def test_consumer_update_normalize_field(self):
        # If update a consumer with a field with : or - in the name,
        # the name is normalized by converting those chars to _.
        field1_name = 'some:weird-field'
        field1_orig_value = uuid.uuid4().hex

        extra_fields = {field1_name: field1_orig_value}
        consumer = self._consumer_create(**extra_fields)
        consumer_id = consumer['id']

        field1_new_value = uuid.uuid4().hex

        field2_name = 'weird:some-field'
        field2_value = uuid.uuid4().hex

        update_ref = {field1_name: field1_new_value,
                      field2_name: field2_value}

        update_resp = self.patch(self.CONSUMER_URL + '/%s' % consumer_id,
                                 body={'consumer': update_ref})
        consumer = update_resp.result['consumer']

        normalized_field1_name = 'some_weird_field'
        self.assertEqual(field1_new_value, consumer[normalized_field1_name])

        normalized_field2_name = 'weird_some_field'
        self.assertEqual(field2_value, consumer[normalized_field2_name])
コード例 #12
0
    def test_legacy_padding_validation(self):
        first_value = uuid.uuid4().hex
        second_value = uuid.uuid4().hex
        payload = (first_value, second_value)
        msgpack_payload = msgpack.packb(payload)

        # NOTE(lbragstad): This method perserves the way that keystone used to
        # percent encode the tokens, prior to bug #1491926.
        def legacy_pack(payload):
            tf = token_formatters.TokenFormatter()
            encrypted_payload = tf.crypto.encrypt(payload)

            # the encrypted_payload is returned with padding appended
            self.assertTrue(encrypted_payload.endswith('='))

            # using urllib.parse.quote will percent encode the padding, like
            # keystone did in Kilo.
            percent_encoded_payload = urllib.parse.quote(encrypted_payload)

            # ensure that the padding was actaully percent encoded
            self.assertTrue(percent_encoded_payload.endswith('%3D'))
            return percent_encoded_payload

        token_with_legacy_padding = legacy_pack(msgpack_payload)
        tf = token_formatters.TokenFormatter()

        # demonstrate the we can validate a payload that has been percent
        # encoded with the Fernet logic that existed in Kilo
        serialized_payload = tf.unpack(token_with_legacy_padding)
        returned_payload = msgpack.unpackb(serialized_payload)
        self.assertEqual(first_value, returned_payload[0])
        self.assertEqual(second_value, returned_payload[1])
コード例 #13
0
    def _test_federated_payload_with_ids(self, exp_user_id, exp_group_id):
        exp_methods = ['password']
        exp_expires_at = utils.isotime(timeutils.utcnow(), subsecond=True)
        exp_audit_ids = [provider.random_urlsafe_str()]
        exp_federated_info = {'group_ids': [{'id': exp_group_id}],
                              'idp_id': uuid.uuid4().hex,
                              'protocol_id': uuid.uuid4().hex}

        payload = token_formatters.FederatedUnscopedPayload.assemble(
            exp_user_id, exp_methods, exp_expires_at, exp_audit_ids,
            exp_federated_info)

        (user_id, methods, expires_at, audit_ids, federated_info) = (
            token_formatters.FederatedUnscopedPayload.disassemble(payload))

        self.assertEqual(exp_user_id, user_id)
        self.assertEqual(exp_methods, methods)
        self.assertEqual(exp_expires_at, expires_at)
        self.assertEqual(exp_audit_ids, audit_ids)
        self.assertEqual(exp_federated_info['group_ids'][0]['id'],
                         federated_info['group_ids'][0]['id'])
        self.assertEqual(exp_federated_info['idp_id'],
                         federated_info['idp_id'])
        self.assertEqual(exp_federated_info['protocol_id'],
                         federated_info['protocol_id'])
コード例 #14
0
ファイル: test_xunit.py プロジェクト: mitsuhiko/changes
def test_result_generation():
    jobstep = JobStep(
        id=uuid.uuid4(),
        project_id=uuid.uuid4(),
        job_id=uuid.uuid4(),
    )

    fp = StringIO(SAMPLE_XUNIT)

    handler = XunitHandler(jobstep)
    results = handler.get_tests(fp)

    assert len(results) == 2

    r1 = results[0]
    assert type(r1) == TestResult
    assert r1.step == jobstep
    assert r1.package is None
    assert r1.name == 'tests.test_report'
    assert r1.duration == 0.0
    assert r1.result == Result.failed
    assert r1.message == """tests/test_report.py:1: in <module>
>   import mock
E   ImportError: No module named mock"""
    assert r1.owner == 'foo'
    r2 = results[1]
    assert type(r2) == TestResult
    assert r2.step == jobstep
    assert r2.package is None
    assert r2.name == 'tests.test_report.ParseTestResultsTest.test_simple'
    assert r2.duration == 1.65796279907
    assert r2.result == Result.passed
    assert r2.message == ''
    assert r2.reruns == 1
    assert r2.owner is None
コード例 #15
0
ファイル: test_integration.py プロジェクト: tomzhang/autopush
    def test_multiple_delivery_with_multiple_ack(self):
        data = str(uuid.uuid4())
        data2 = str(uuid.uuid4())
        client = yield self.quick_register(use_webpush=True)
        yield client.disconnect()
        ok_(client.channels)
        yield client.send_notification(data=data)
        yield client.send_notification(data=data2)
        yield client.connect()
        yield client.hello()
        result = yield client.get_notification()
        ok_(result != {})
        ok_(result["data"] in map(urlsafe_b64encode, [data, data2]))
        result2 = yield client.get_notification()
        ok_(result2 != {})
        ok_(result2["data"] in map(urlsafe_b64encode, [data, data2]))
        yield client.ack(result2["channelID"], result2["version"])
        yield client.ack(result["channelID"], result["version"])

        yield client.disconnect()
        yield client.connect()
        yield client.hello()
        result = yield client.get_notification()
        eq_(result, None)
        yield self.shut_down(client)
コード例 #16
0
ファイル: Kernel.py プロジェクト: cbecker/Sublime-IJulia
 def execute(self, code):
     if debug:
         execute_request = Msg(["execute_request"],
         {"msg_id": "07033084-5cfd-4812-90a4-e4d24ffb6e3d", 
          "username": str(self.id), 
          "session": "07033084-5cfd-4812-90a4-e4d24ffb6e3d", 
          "msg_type": "execute_request"}, 
          {"code": code, 
           "silent": False, 
           "store_history": False, 
           "user_variables": list(), 
           "user_expressions": {}, 
           "allow_stdin": True}, {})
     else:
         execute_request = Msg(["execute_request"],
         {"msg_id": str(uuid.uuid4()), 
          "username": str(self.id), 
          "session": str(uuid.uuid4()), 
          "msg_type": "execute_request"}, 
          {"code": code, 
           "silent": False, 
           "store_history": False, 
           "user_variables": list(), 
           "user_expressions": {}, 
           "allow_stdin": True}, {})
     ret = self.shell.send(execute_request)
コード例 #17
0
 def forwards(self, orm):
     for award in orm['badges.Award'].objects.all():
         user = award.user
         try:
             user.identity
             current_identity_hash = user.identity.identity_hash
             new_candidate_identity_hash = u'sha256$' + hashlib.sha256(user.email + user.identity.salt).hexdigest()
             if current_identity_hash != new_candidate_identity_hash:
                 salt = uuid.uuid4().hex[:5]
                 user.identity.salt = salt
                 user.identity.identity_hash = u'sha256$' + hashlib.sha256(user.email + salt).hexdigest()
                 user.identity.save()
         except:
             salt = uuid.uuid4().hex[:5]
             orm['badges.Identity'].objects.create(
                 user=user,
                 identity_hash=u'sha256$' + hashlib.sha256(user.email + salt).hexdigest(),
                 salt=salt
             )
         award.uuid = uuid.uuid1()
         award.identity_hash = award.user.identity.identity_hash
         award.identity_type = award.user.identity.type
         award.identity_hashed = award.user.identity.hashed
         award.identity_salt = award.user.identity.salt
         award.expires = None
         award.save()
コード例 #18
0
def create_access_id(global_id=None):
    """ Create and return a new AccessID object.

        If 'global_id' is supplied, it should be a GlobalID object to use for
        this access ID.  Otherwise, a new unique GlobalID object will be
        created.
    """
    if global_id == None:
        global_id = create_unique_global_id()

    while True:
        device_id = random_string()

        try:
            access_id = AccessID.objects.get(global_id=global_id,
                                             device_id=device_id)
        except AccessID.DoesNotExist:
            access_id = None

        if access_id == None:
            break

    access_id = AccessID()
    access_id.global_id     = global_id
    access_id.device_id     = device_id
    access_id.timestamp     = timezone.now()
    access_id.access_id     = uuid.uuid4().hex
    access_id.access_secret = uuid.uuid4().hex
    access_id.save()

    return access_id
コード例 #19
0
ファイル: models.py プロジェクト: kepinq/MyJobs
    def get_file_name(self, filename):
        """
        Ensures that a file name is unique before uploading.
        The PRMAttachment instance requires an extra attribute,
        partner (a Partner instance) to be set in order to create the
        file name.

        """
        filename, extension = path.splitext(filename)
        filename = '.'.join([sub(r'[\W]', '', filename),
                             sub(r'[\W]', '', extension)])

        # If the uploaded file only contains invalid characters the end
        # result will be a file named "."
        if not filename or filename == '.':
            filename = 'unnamed_file'

        uid = uuid4()
        if self.partner:
            partner = self.partner.pk
            owner = self.partner.owner.pk
        else:
            partner = owner = 'none'
        path_addon = "mypartners/%s/%s/%s" % (owner, partner, uid)
        name = "%s/%s" % (path_addon, filename)

        # Make sure that in the unlikely event that a filepath/uid/filename
        # combination isn't actually unique a new unique id
        # is generated.
        while default_storage.exists(name):
            uid = uuid4()
            path_addon = "mypartners/%s/%s/%s" % (owner, partner, uid)
            name = "%s/%s" % (path_addon, filename)

        return name
コード例 #20
0
def get_vw_nvalues(model_run_uuid):
    """
    Given a model run uuid that contains the lookup table and ESRI .asc with
    vegetation codes, return an ascii file that has the n-values properly
    assigned
    """
    vwc = default_vw_client()

    records = vwc.dataset_search(model_run_uuid=model_run_uuid).records

    downloads = [r['downloads'][0] for r in records]

    asc_url = filter(lambda d: d.keys().pop() == 'ascii',
                     downloads).pop()['ascii']

    xlsx_url = filter(lambda d: d.keys().pop() == 'xlsx',
                      downloads).pop()['xlsx']

    asc_path = 'tmp_' + str(uuid4()) + '.asc'
    vwc.download(asc_url, asc_path)

    xlsx_path = 'tmp_' + str(uuid4()) + '.xlsx'
    vwc.download(xlsx_url, xlsx_path)

    asc_nvals = vegcode_to_nvalue(asc_path, xlsx_path)

    os.remove(asc_path)
    os.remove(xlsx_path)

    return asc_nvals
コード例 #21
0
ファイル: base.py プロジェクト: magictour/trove
    def setUp(self):
        # Basic setup and mock/fake structures for testing only
        super(TestDatastoreBase, self).setUp()
        util.init_db()
        self.rand_id = str(uuid.uuid4())
        self.ds_name = "my-test-datastore" + self.rand_id
        self.ds_version = "my-test-version" + self.rand_id
        self.capability_name = "root_on_create" + self.rand_id
        self.capability_desc = "Enables root on create"
        self.capability_enabled = True
        self.datastore_version_id = str(uuid.uuid4())
        self.flavor_id = 1

        datastore_models.update_datastore(self.ds_name, False)
        self.datastore = Datastore.load(self.ds_name)

        datastore_models.update_datastore_version(self.ds_name, self.ds_version, "mysql", "", "", True)
        DatastoreVersionMetadata.add_datastore_version_flavor_association(
            self.ds_name, self.ds_version, [self.flavor_id]
        )

        self.datastore_version = DatastoreVersion.load(self.datastore, self.ds_version)
        self.test_id = self.datastore_version.id

        self.cap1 = Capability.create(self.capability_name, self.capability_desc, True)
        self.cap2 = Capability.create("require_volume" + self.rand_id, "Require external volume", True)
        self.cap3 = Capability.create("test_capability" + self.rand_id, "Test capability", False)
コード例 #22
0
ファイル: test_cinder.py プロジェクト: Waynelemars/flocker
    def test_foreign_cluster_volume(self):
        """
        Volumes from other Flocker clusters are not listed.
        """
        block_device_api1 = cinderblockdeviceapi_for_test(
            test_case=self,
            cluster_id=uuid4(),
        )

        flocker_volume1 = block_device_api1.create_volume(
            dataset_id=uuid4(),
            size=REALISTIC_BLOCKDEVICE_SIZE,
        )

        block_device_api2 = cinderblockdeviceapi_for_test(
            test_case=self,
            cluster_id=uuid4(),
        )

        flocker_volume2 = block_device_api2.create_volume(
            dataset_id=uuid4(),
            size=REALISTIC_BLOCKDEVICE_SIZE,
        )

        self.assertEqual(
            ([flocker_volume1], [flocker_volume2]),
            (block_device_api1.list_volumes(),
             block_device_api2.list_volumes())
        )
コード例 #23
0
    def test_delete_index(self):
        mother_case_id = uuid.uuid4().hex
        _submit_case_block(
            True, mother_case_id, user_id='user1', owner_id='owner1', case_type='mother',
            case_name='mother', date_modified=datetime.utcnow()
        )

        child_case_id = uuid.uuid4().hex
        _submit_case_block(
            True, child_case_id, user_id='user1', owner_id='owner1', case_type='child',
            case_name='child', date_modified=datetime.utcnow(), index={
                'mom': ('mother', mother_case_id)
            }
        )

        case = self.casedb.get_case(child_case_id)
        self.assertEqual(len(case.indices), 1)

        _submit_case_block(
            False, child_case_id, user_id='user1', date_modified=datetime.utcnow(), index={
                'mom': ('mother', '')
            }
        )
        case = self.casedb.get_case(child_case_id)
        self.assertEqual(len(case.indices), 0)
コード例 #24
0
ファイル: test_exception.py プロジェクト: cyan-cliqr/keystone
 def test_validation_error(self):
     target = uuid.uuid4().hex
     attribute = uuid.uuid4().hex
     e = exception.ValidationError(target=target, attribute=attribute)
     self.assertValidJsonRendering(e)
     self.assertIn(target, six.text_type(e))
     self.assertIn(attribute, six.text_type(e))
コード例 #25
0
    def test_update_index(self):
        mother_case_id = uuid.uuid4().hex
        _submit_case_block(
            True, mother_case_id, user_id='user1', owner_id='owner1', case_type='mother',
            case_name='mother', date_modified=datetime.utcnow()
        )

        child_case_id = uuid.uuid4().hex
        _submit_case_block(
            True, child_case_id, user_id='user1', owner_id='owner1', case_type='child',
            case_name='child', date_modified=datetime.utcnow(), index={
                'mom': ('mother', mother_case_id)
            }
        )

        case = self.casedb.get_case(child_case_id)
        self.assertEqual(case.indices[0].identifier, 'mom')

        _submit_case_block(
            False, child_case_id, user_id='user1', date_modified=datetime.utcnow(), index={
                'mom': ('other_mother', mother_case_id)
            }
        )
        case = self.casedb.get_case(child_case_id)
        self.assertEqual(case.indices[0].referenced_type, 'other_mother')
コード例 #26
0
ファイル: common.py プロジェクト: windskyer/k_nova
def vios_to_vios_auth(source, dest, conn_info):
    """Context allowing for SSH between VIOS partitions

    This context will build an SSH key on the source host, put the key
    into the authorized_keys on the destination host, and make the
    private key file name available within the context.
    The key files and key inserted into authorized_keys will be
    removed when the context exits.

    :param source: source IP or DNS name
    :param dest: destination IP or DNS name
    :param conn_info: dictionary object with SSH connection
                      information for both hosts
    """
    KEY_BASE_NAME = "os-%s" % uuid.uuid4().hex
    keypair_uuid = uuid.uuid4()
    src_conn_obj = ssh_connect(conn_info)

    dest_conn_info = Connection(dest, conn_info.username,
                                       conn_info.password)
    dest_conn_obj = ssh_connect(dest_conn_info)

    def run_command(conn_obj, cmd):
        stdout, stderr = processutils.ssh_execute(conn_obj, cmd)
        return stdout.strip().splitlines()

    def build_keypair_on_source():
        mkkey = ('ssh-keygen -f %s -N "" -C %s' %
                    (KEY_BASE_NAME, keypair_uuid.hex))
        ssh_command_as_root(src_conn_obj, mkkey)

        chown_key = ('chown %s %s*' % (conn_info.username, KEY_BASE_NAME))
        ssh_command_as_root(src_conn_obj, chown_key)

        cat_key = ('cat %s.pub' % KEY_BASE_NAME)
        pubkey = run_command(src_conn_obj, cat_key)

        return pubkey[0]

    def cleanup_key_on_source():
        rmkey = 'rm %s*' % KEY_BASE_NAME
        run_command(src_conn_obj, rmkey)

    def insert_into_authorized_keys(public_key):
        echo_key = 'echo "%s" >> .ssh/authorized_keys' % public_key
        ssh_command_as_root(dest_conn_obj, echo_key)

    def remove_from_authorized_keys():
        rmkey = ('sed /%s/d .ssh/authorized_keys > .ssh/authorized_keys' %
                 keypair_uuid.hex)
        ssh_command_as_root(dest_conn_obj, rmkey)

    public_key = build_keypair_on_source()
    insert_into_authorized_keys(public_key)

    try:
        yield KEY_BASE_NAME
    finally:
        remove_from_authorized_keys()
        cleanup_key_on_source()
コード例 #27
0
    def test_dup_domain(self):
        openstack_driver = FakeExtensionManager.get_extension_objects("vnc_cfg_api.resync")[0]
        orig_ks_domains_list = openstack_driver._ks_domains_list
        orig_ks_domain_get = openstack_driver._ks_domain_get
        try:
            openstack_driver._ks_domains_list = openstack_driver._ksv3_domains_list
            openstack_driver._ks_domain_get = openstack_driver._ksv3_domain_get
            logger.info('Creating first domain in "keystone"')
            dom_id = str(uuid.uuid4())
            dom_name = self.id()
            test_case.get_keystone_client().domains.add_domain(dom_id, dom_name)
            dom_obj = self._vnc_lib.domain_read(id=dom_id)
            self.assertThat(dom_obj.name, Equals(dom_name))

            logger.info('Creating second domain with same name diff id in "keystone"')
            new_dom_id = str(uuid.uuid4())
            test_case.get_keystone_client().domains.add_domain(new_dom_id, dom_name)
            new_dom_obj = self._vnc_lib.domain_read(id=new_dom_id)
            self.assertThat(new_dom_obj.name, Not(Equals(dom_name)))
            self.assertThat(new_dom_obj.name, Contains(dom_name))

            self._vnc_lib.domain_delete(id=dom_id)
            self._vnc_lib.domain_delete(id=new_dom_id)
        finally:
            openstack_driver._ks_domains_list = orig_ks_domains_list
            openstack_driver._ks_domain_get = orig_ks_domain_get
コード例 #28
0
    def test_notifier(self):
        self.config(notification_driver=['log'])

        transport = _FakeTransport(self.conf)

        notifier = messaging.Notifier(transport, 'test.localhost')

        message_id = uuid.uuid4()
        self.mox.StubOutWithMock(uuid, 'uuid4')
        uuid.uuid4().AndReturn(message_id)

        timeutils.set_time_override()

        message = {
            'message_id': str(message_id),
            'publisher_id': 'test.localhost',
            'event_type': 'test.notify',
            'priority': 'INFO',
            'payload': 'bar',
            'timestamp': str(timeutils.utcnow.override_time),
        }

        logger = self.mox.CreateMockAnything()

        self.mox.StubOutWithMock(logging, 'getLogger')
        logging.getLogger('oslo.messaging.notification.test.notify').\
            AndReturn(logger)

        logger.info(jsonutils.dumps(message))

        self.mox.ReplayAll()

        notifier.info({}, 'test.notify', 'bar')
コード例 #29
0
ファイル: omexml.py プロジェクト: ilia-kats/python-bioformats
 def set_image_count(self, value):
     '''Add or remove image nodes as needed'''
     assert value > 0
     root = self.root_node
     if self.image_count > value:
         image_nodes = root.find(qn(self.ns['ome'], "Image"))
         for image_node in image_nodes[value:]:
             root.remove(image_node)
     while(self.image_count < value):
         new_image = self.Image(ElementTree.SubElement(root, qn(self.ns['ome'], "Image")))
         new_image.ID = str(uuid.uuid4())
         new_image.Name = "default.png"
         new_image.AcquiredDate = xsd_now()
         new_pixels = self.Pixels(
             ElementTree.SubElement(new_image.node, qn(self.ns['ome'], "Pixels")))
         new_pixels.ID = str(uuid.uuid4())
         new_pixels.DimensionOrder = DO_XYCTZ
         new_pixels.PixelType = PT_UINT8
         new_pixels.SizeC = 1
         new_pixels.SizeT = 1
         new_pixels.SizeX = 512
         new_pixels.SizeY = 512
         new_pixels.SizeZ = 1
         new_channel = self.Channel(
             ElementTree.SubElement(new_pixels.node, qn(self.ns['ome'], "Channel")))
         new_channel.ID = "Channel%d:0" % self.image_count
         new_channel.Name = new_channel.ID
         new_channel.SamplesPerPixel = 1
コード例 #30
0
    def test_serializer(self):
        transport = _FakeTransport(self.conf)

        serializer = msg_serializer.NoOpSerializer()

        notifier = messaging.Notifier(transport,
                                      'test.localhost',
                                      driver='test',
                                      topic='test',
                                      serializer=serializer)

        message_id = uuid.uuid4()
        self.mox.StubOutWithMock(uuid, 'uuid4')
        uuid.uuid4().AndReturn(message_id)

        timeutils.set_time_override()

        self.mox.StubOutWithMock(serializer, 'serialize_entity')
        serializer.serialize_entity({}, 'bar').AndReturn('sbar')

        self.mox.ReplayAll()

        notifier.info({}, 'test.notify', 'bar')

        message = {
            'message_id': str(message_id),
            'publisher_id': 'test.localhost',
            'event_type': 'test.notify',
            'priority': 'INFO',
            'payload': 'sbar',
            'timestamp': str(timeutils.utcnow.override_time),
        }

        self.assertEquals(_impl_test.NOTIFICATIONS, [({}, message, 'INFO')])
コード例 #31
0
def _generate_scriptrun_id() -> str:
    """Randomly generate a unique ID for a script execution."""
    return str(uuid.uuid4())
コード例 #32
0
def random_string():
    return str(uuid4())
コード例 #33
0
def run_script(request):
    """
    Tags: scripts
    ---
    Start a script job to run the script.
    READ permission required on cloud.
    RUN_SCRIPT permission required on machine.
    RUN permission required on script.
    ---
    script_id:
      in: path
      required: true
      type: string
    machine_uuid:
      required: true
      type: string
    params:
      type: string
    su:
      type: boolean
    env:
      type: string
    job_id:
      type: string
    """
    script_id = request.matchdict['script_id']
    params = params_from_request(request)
    script_params = params.get('params', '')
    su = params.get('su', False)
    env = params.get('env')
    job_id = params.get('job_id')
    if not job_id:
        job = 'run_script'
        job_id = uuid.uuid4().hex
    else:
        job = None
    if isinstance(env, dict):
        env = json.dumps(env)

    auth_context = auth_context_from_request(request)
    if 'machine_uuid' in params:
        machine_uuid = params.get('machine_uuid')
        if not machine_uuid:
            raise RequiredParameterMissingError('machine_uuid')

        try:
            machine = Machine.objects.get(id=machine_uuid,
                                          state__ne='terminated')
            # used by logging_view_decorator
            request.environ['machine_id'] = machine.machine_id
            request.environ['cloud_id'] = machine.cloud.id
        except me.DoesNotExist:
            raise NotFoundError("Machine %s doesn't exist" % machine_uuid)
        cloud_id = machine.cloud.id
    else:
        # this will be depracated, keep it for backwards compatibility
        cloud_id = params.get('cloud_id')
        machine_id = params.get('machine_id')

        for key in ('cloud_id', 'machine_id'):
            if key not in params:
                raise RequiredParameterMissingError(key)
        try:
            machine = Machine.objects.get(cloud=cloud_id,
                                          machine_id=machine_id,
                                          state__ne='terminated')
            # used by logging_view_decorator
            request.environ['machine_uuid'] = machine.id
        except me.DoesNotExist:
            raise NotFoundError("Machine %s doesn't exist" % machine_id)

    # SEC require permission READ on cloud
    auth_context.check_perm("cloud", "read", cloud_id)
    # SEC require permission RUN_SCRIPT on machine
    auth_context.check_perm("machine", "run_script", machine.id)
    # SEC require permission RUN on script
    auth_context.check_perm('script', 'run', script_id)
    try:
        script = Script.objects.get(owner=auth_context.owner,
                                    id=script_id,
                                    deleted=None)
    except me.DoesNotExist:
        raise NotFoundError('Script id not found')
    job_id = job_id or uuid.uuid4().hex
    tasks.run_script.delay(auth_context.owner.id,
                           script.id,
                           machine.id,
                           params=script_params,
                           env=env,
                           su=su,
                           job_id=job_id,
                           job=job)
    return {'job_id': job_id, 'job': job}
コード例 #34
0
import redis
import uuid
import time
def wait_for_sync(mconn,sconn):
    identifier=str(uuid.uuid4())
    # 有序集合 zset : member = identifier , score = time.time()
    mconn.zadd('sync:wait',identifier,time.time())

    # 等待从服务器我完成同步
    while not sconn.info()['master_link_status'] != 'up':
        time.sleep(0.001)

    # 等待从服务器接受数据更新
    while not sconn.zscore('sync:wait',identifier):
        time.sleep(0.001)

    deadline=time.time()+1.01
    while time.time() < deadline:
        # 检查数据更新是否已经被同步到了硬盘
        if sconn.info()['aof_pending_bio_fsync']==0:
            break
        time.sleep(0.001)

    mconn.zrem('sync:wait',identifier)
    mconn.zremrangebyscore('sync:wait',0,time.time()-900)

if __name__=="__main__":
    print("uuid = ",uuid.uuid4())
コード例 #35
0
 def post_processor(self):
     self.data.name = '{0}_{1}'.format(self.uid, uuid4())
コード例 #36
0
def user(user_manager, cognito_client):
    user_id, username = str(uuid.uuid4()), str(uuid.uuid4())[:8]
    cognito_client.create_verified_user_pool_entry(user_id, username,
                                                   f'{username}@real.app')
    yield user_manager.create_cognito_only_user(user_id, username)
コード例 #37
0
ファイル: test_processor.py プロジェクト: hr004/rasa-1
async def test_handle_message_with_session_start(
    default_channel: CollectingOutputChannel,
    default_processor: MessageProcessor,
    monkeypatch: MonkeyPatch,
):
    sender_id = uuid.uuid4().hex

    entity = "name"
    slot_1 = {entity: "Core"}
    await default_processor.handle_message(
        UserMessage(f"/greet{json.dumps(slot_1)}", default_channel, sender_id)
    )

    assert default_channel.latest_output() == {
        "recipient_id": sender_id,
        "text": "hey there Core!",
    }

    # patch processor so a session start is triggered
    monkeypatch.setattr(default_processor, "_has_session_expired", lambda _: True)

    slot_2 = {entity: "post-session start hello"}
    # handle a new message
    await default_processor.handle_message(
        UserMessage(f"/greet{json.dumps(slot_2)}", default_channel, sender_id)
    )

    tracker = default_processor.tracker_store.get_or_create_tracker(sender_id)

    # make sure the sequence of events is as expected
    assert list(tracker.events) == [
        ActionExecuted(ACTION_SESSION_START_NAME),
        SessionStarted(),
        ActionExecuted(ACTION_LISTEN_NAME),
        UserUttered(
            f"/greet{json.dumps(slot_1)}",
            {"name": "greet", "confidence": 1.0},
            [{"entity": entity, "start": 6, "end": 22, "value": "Core"}],
        ),
        SlotSet(entity, slot_1[entity]),
        ActionExecuted("utter_greet"),
        BotUttered("hey there Core!", metadata={"template_name": "utter_greet"}),
        ActionExecuted(ACTION_LISTEN_NAME),
        ActionExecuted(ACTION_SESSION_START_NAME),
        SessionStarted(),
        # the initial SlotSet is reapplied after the SessionStarted sequence
        SlotSet(entity, slot_1[entity]),
        ActionExecuted(ACTION_LISTEN_NAME),
        UserUttered(
            f"/greet{json.dumps(slot_2)}",
            {"name": "greet", "confidence": 1.0},
            [
                {
                    "entity": entity,
                    "start": 6,
                    "end": 42,
                    "value": "post-session start hello",
                }
            ],
        ),
        SlotSet(entity, slot_2[entity]),
        ActionExecuted(ACTION_LISTEN_NAME),
    ]
コード例 #38
0
    def __init__(
        self,
        ioloop: tornado.ioloop.IOLoop,
        session_data: SessionData,
        uploaded_file_manager: UploadedFileManager,
        message_enqueued_callback: Optional[Callable[[], None]],
        local_sources_watcher: LocalSourcesWatcher,
    ):
        """Initialize the AppSession.

        Parameters
        ----------
        ioloop : tornado.ioloop.IOLoop
            The Tornado IOLoop that we're running within.

        session_data : SessionData
            Object storing parameters related to running a script

        uploaded_file_manager : UploadedFileManager
            The server's UploadedFileManager.

        message_enqueued_callback : Callable[[], None]
            After enqueuing a message, this callable notification will be invoked.

        local_sources_watcher: LocalSourcesWatcher
            The file watcher that lets the session know local files have changed.

        """
        # Each AppSession has a unique string ID.
        self.id = str(uuid.uuid4())

        self._ioloop = ioloop
        self._session_data = session_data
        self._uploaded_file_mgr = uploaded_file_manager
        self._message_enqueued_callback = message_enqueued_callback

        self._state = AppSessionState.APP_NOT_RUNNING

        # Need to remember the client state here because when a script reruns
        # due to the source code changing we need to pass in the previous client state.
        self._client_state = ClientState()

        self._local_sources_watcher = local_sources_watcher
        self._local_sources_watcher.register_file_change_callback(
            self._on_source_file_changed
        )
        self._stop_config_listener = config.on_config_parsed(
            self._on_source_file_changed, force_connect=True
        )

        # The script should rerun when the `secrets.toml` file has been changed.
        secrets._file_change_listener.connect(self._on_secrets_file_changed)

        self._run_on_save = config.get_option("server.runOnSave")

        # The ScriptRequestQueue is the means by which we communicate
        # with the active ScriptRunner.
        self._script_request_queue = ScriptRequestQueue()

        self._scriptrunner: Optional[ScriptRunner] = None

        # This needs to be lazily imported to avoid a dependency cycle.
        from streamlit.state import SessionState

        self._session_state = SessionState()

        LOGGER.debug("AppSession initialized (id=%s)", self.id)
コード例 #39
0
 def __init__(self, email, password, _id=None):
     self.email = email
     self.password = password
     self._id = uuid.uuid4().hex if _id is None else _id
コード例 #40
0
ファイル: sse-c.py プロジェクト: benoitMariaux/aws-memo
import boto3
import uuid
import os

BUCKET = "sse-c-demo-"+str(uuid.uuid4())

s3 = boto3.client('s3')

# Create bucket
s3.create_bucket(Bucket=BUCKET)

# Create a file
FILE = "/tmp/sse-c-file"
OBJECT_KEY=os.path.basename(FILE)

file_object = open(FILE, "w")
for i in range(3):
    file_object.write("Line %d\n" % (i+1))

file_object.close()

# Generate a random 32 bytes (256 bits) 
SECRET_KEY = os.urandom(32)

s3.put_object(
    Bucket=BUCKET,
    Key=OBJECT_KEY,
    Body=open(FILE, "r").read(),
    SSECustomerKey=SECRET_KEY,
    SSECustomerAlgorithm='AES256'
)
コード例 #41
0
 def test_cinder_add(self):
     fake_volume = mock.MagicMock(id=str(uuid.uuid4()),
                                  status='available',
                                  size=1)
     volume_file = six.BytesIO()
     self._test_cinder_add(fake_volume, volume_file)
コード例 #42
0
def test_temp():
    db = temp()
    uid = str(uuid.uuid4())
    db.insert('start', {'uid': uid, 'time': 0})
    db.insert('stop', {'uid': str(uuid.uuid4()), 'time': 1, 'run_start': uid})
    db[-1]
コード例 #43
0
ファイル: file_packager.py プロジェクト: nageshlop/proxy-1
def main():
    data_files = []
    export_name = 'Module'
    leading = ''
    has_preloaded = False
    plugins = []
    jsoutput = None
    from_emcc = False
    force = True
    # If set to True, IndexedDB (IDBFS in library_idbfs.js) is used to locally
    # cache VFS XHR so that subsequent page loads can read the data from the
    # offline cache instead.
    use_preload_cache = False
    indexeddb_name = 'EM_PRELOAD_CACHE'
    # If set to True, the blob received from XHR is moved to the Emscripten HEAP,
    # optimizing for mmap() performance (if ALLOW_MEMORY_GROWTH=0).
    # If set to False, the XHR blob is kept intact, and fread()s etc. are performed
    # directly to that data. This optimizes for minimal memory usage and fread()
    # performance.
    heap_copy = True
    # If set to True, the package metadata is stored separately from js-output
    # file which makes js-output file immutable to the package content changes.
    # If set to False, the package metadata is stored inside the js-output file
    # which makes js-output file to mutate on each invocation of this packager tool.
    separate_metadata = False
    lz4 = False
    use_preload_plugins = False

    for arg in sys.argv[2:]:
        if arg == '--preload':
            has_preloaded = True
            leading = 'preload'
        elif arg == '--embed':
            leading = 'embed'
        elif arg == '--exclude':
            leading = 'exclude'
        elif arg == '--no-force':
            force = False
            leading = ''
        elif arg == '--use-preload-cache':
            use_preload_cache = True
            leading = ''
        elif arg.startswith('--indexedDB-name'):
            indexeddb_name = arg.split('=', 1)[1] if '=' in arg else None
            leading = ''
        elif arg == '--no-heap-copy':
            heap_copy = False
            leading = ''
        elif arg == '--separate-metadata':
            separate_metadata = True
            leading = ''
        elif arg == '--lz4':
            lz4 = True
            leading = ''
        elif arg == '--use-preload-plugins':
            use_preload_plugins = True
            leading = ''
        elif arg.startswith('--js-output'):
            jsoutput = arg.split('=', 1)[1] if '=' in arg else None
            leading = ''
        elif arg.startswith('--export-name'):
            if '=' in arg:
                export_name = arg.split('=', 1)[1]
            leading = ''
        elif arg.startswith('--from-emcc'):
            from_emcc = True
            leading = ''
        elif arg.startswith('--plugin'):
            with open(arg.split('=', 1)[1]) as f:
                plugin = f.read()
            eval(plugin)  # should append itself to plugins
            leading = ''
        elif leading == 'preload' or leading == 'embed':
            mode = leading
            # position of @ if we're doing 'src@dst'. '__' is used to keep the index
            # same with the original if they escaped with '@@'.
            at_position = arg.replace('@@', '__').find('@')
            # '@@' in input string means there is an actual @ character, a single '@'
            # means the 'src@dst' notation.
            uses_at_notation = (at_position != -1)

            if uses_at_notation:
                srcpath = arg[0:at_position].replace('@@',
                                                     '@')  # split around the @
                dstpath = arg[at_position + 1:].replace('@@', '@')
            else:
                # Use source path as destination path.
                srcpath = dstpath = arg.replace('@@', '@')
            if os.path.isfile(srcpath) or os.path.isdir(srcpath):
                data_files.append({
                    'srcpath': srcpath,
                    'dstpath': dstpath,
                    'mode': mode,
                    'explicit_dst_path': uses_at_notation
                })
            else:
                print('Warning: ' + arg + ' does not exist, ignoring.',
                      file=sys.stderr)
        elif leading == 'exclude':
            excluded_patterns.append(arg)
        else:
            print('Unknown parameter:', arg, file=sys.stderr)
            sys.exit(1)

    if (not force) and not data_files:
        has_preloaded = False
    if not has_preloaded or jsoutput is None:
        assert not separate_metadata, (
            'cannot separate-metadata without both --preloaded files '
            'and a specified --js-output')

    if not from_emcc:
        print(
            'Remember to build the main file with  -s FORCE_FILESYSTEM=1  '
            'so that it includes support for loading this file package',
            file=sys.stderr)

    ret = ''
    # emcc.py will add this to the output itself, so it is only needed for
    # standalone calls
    if not from_emcc:
        ret = '''
  var Module = typeof %(EXPORT_NAME)s !== 'undefined' ? %(EXPORT_NAME)s : {};
  ''' % {
            "EXPORT_NAME": export_name
        }

    ret += '''
  if (!Module.expectedDataFileDownloads) {
    Module.expectedDataFileDownloads = 0;
    Module.finishedDataFileDownloads = 0;
  }
  Module.expectedDataFileDownloads++;
  (function() {
   var loadPackage = function(metadata) {
  '''

    code = '''
      function assert(check, msg) {
        if (!check) throw msg + new Error().stack;
      }
  '''

    for file_ in data_files:
        if not should_ignore(file_['srcpath']):
            if os.path.isdir(file_['srcpath']):
                add(file_['mode'], file_['srcpath'], file_['dstpath'])
            else:
                new_data_files.append(file_)
    data_files = [
        file_ for file_ in new_data_files
        if not os.path.isdir(file_['srcpath'])
    ]
    if len(data_files) == 0:
        print('Nothing to do!', file=sys.stderr)
        sys.exit(1)

    # Absolutize paths, and check that they make sense
    # os.getcwd() always returns the hard path with any symbolic links resolved,
    # even if we cd'd into a symbolic link.
    curr_abspath = os.path.abspath(os.getcwd())

    for file_ in data_files:
        if not file_['explicit_dst_path']:
            # This file was not defined with src@dst, so we inferred the destination
            # from the source. In that case, we require that the destination not be
            # under the current location
            path = file_['dstpath']
            # Use os.path.realpath to resolve any symbolic links to hard paths,
            # to match the structure in curr_abspath.
            abspath = os.path.realpath(os.path.abspath(path))
            if DEBUG:
                print(path, abspath, curr_abspath, file=sys.stderr)
            if not abspath.startswith(curr_abspath):
                print(
                    'Error: Embedding "%s" which is below the current directory '
                    '"%s". This is invalid since the current directory becomes the '
                    'root that the generated code will see' %
                    (path, curr_abspath),
                    file=sys.stderr)
                sys.exit(1)
            file_['dstpath'] = abspath[len(curr_abspath) + 1:]
            if os.path.isabs(path):
                print(
                    'Warning: Embedding an absolute file/directory name "%s" to the '
                    'virtual filesystem. The file will be made available in the '
                    'relative path "%s". You can use the explicit syntax '
                    '--preload-file srcpath@dstpath to explicitly specify the target '
                    'location the absolute source path should be directed to.'
                    % (path, file_['dstpath']),
                    file=sys.stderr)

    for file_ in data_files:
        # name in the filesystem, native and emulated
        file_['dstpath'] = file_['dstpath'].replace(os.path.sep, '/')
        # If user has submitted a directory name as the destination but omitted
        # the destination filename, use the filename from source file
        if file_['dstpath'].endswith('/'):
            file_['dstpath'] = file_['dstpath'] + os.path.basename(
                file_['srcpath'])
        # make destination path always relative to the root
        file_['dstpath'] = posixpath.normpath(
            os.path.join('/', file_['dstpath']))
        if DEBUG:
            print('Packaging file "%s" to VFS in path "%s".' %
                  (file_['srcpath'], file_['dstpath']),
                  file=sys.stderr)

    # Remove duplicates (can occur naively, for example preload dir/, preload dir/subdir/)
    seen = {}

    def was_seen(name):
        if seen.get(name):
            return True
        seen[name] = 1
        return False

    data_files = [
        file_ for file_ in data_files if not was_seen(file_['dstpath'])
    ]

    if AV_WORKAROUND:
        random.shuffle(data_files)

    # Apply plugins
    for file_ in data_files:
        for plugin in plugins:
            plugin(file_)

    metadata = {'files': []}

    # Set up folders
    partial_dirs = []
    for file_ in data_files:
        dirname = os.path.dirname(file_['dstpath'])
        dirname = dirname.lstrip(
            '/')  # absolute paths start with '/', remove that
        if dirname != '':
            parts = dirname.split('/')
            for i in range(len(parts)):
                partial = '/'.join(parts[:i + 1])
                if partial not in partial_dirs:
                    code += (
                        '''Module['FS_createPath']('/%s', '%s', true, true);\n'''
                        % ('/'.join(parts[:i]), parts[i]))
                    partial_dirs.append(partial)

    if has_preloaded:
        # Bundle all datafiles into one archive. Avoids doing lots of simultaneous
        # XHRs which has overhead.
        start = 0
        with open(data_target, 'wb') as data:
            for file_ in data_files:
                file_['data_start'] = start
                with open(file_['srcpath'], 'rb') as f:
                    curr = f.read()
                file_['data_end'] = start + len(curr)
                if AV_WORKAROUND:
                    curr += '\x00'
                start += len(curr)
                data.write(curr)

        # TODO: sha256sum on data_target
        if start > 256 * 1024 * 1024:
            print(
                'warning: file packager is creating an asset bundle of %d MB. '
                'this is very large, and browsers might have trouble loading it. '
                'see https://hacks.mozilla.org/2015/02/synchronous-execution-and-filesystem-access-in-emscripten/'
                % (start / (1024 * 1024)),
                file=sys.stderr)

        create_preloaded = '''
          Module['FS_createPreloadedFile'](this.name, null, byteArray, true, true, function() {
            Module['removeRunDependency']('fp ' + that.name);
          }, function() {
            if (that.audio) {
              Module['removeRunDependency']('fp ' + that.name); // workaround for chromium bug 124926 (still no audio with this, but at least we don't hang)
            } else {
              err('Preloading file ' + that.name + ' failed');
            }
          }, false, true); // canOwn this data in the filesystem, it is a slide into the heap that will never change
  '''
        create_data = '''
          Module['FS_createDataFile'](this.name, null, byteArray, true, true, true); // canOwn this data in the filesystem, it is a slide into the heap that will never change
          Module['removeRunDependency']('fp ' + that.name);
  '''

        # Data requests - for getting a block of data out of the big archive - have
        # a similar API to XHRs
        code += '''
      function DataRequest(start, end, audio) {
        this.start = start;
        this.end = end;
        this.audio = audio;
      }
      DataRequest.prototype = {
        requests: {},
        open: function(mode, name) {
          this.name = name;
          this.requests[name] = this;
          Module['addRunDependency']('fp ' + this.name);
        },
        send: function() {},
        onload: function() {
          var byteArray = this.byteArray.subarray(this.start, this.end);
          this.finish(byteArray);
        },
        finish: function(byteArray) {
          var that = this;
  %s
          this.requests[this.name] = null;
        }
      };
  %s
    ''' % (create_preloaded if use_preload_plugins else create_data, '''
          var files = metadata['files'];
          for (var i = 0; i < files.length; ++i) {
            new DataRequest(files[i]['start'], files[i]['end'], files[i]['audio']).open('GET', files[i]['filename']);
          }
  ''' if not lz4 else '')

    counter = 0
    for file_ in data_files:
        filename = file_['dstpath']
        dirname = os.path.dirname(filename)
        basename = os.path.basename(filename)
        if file_['mode'] == 'embed':
            # Embed
            data = list(bytearray(open(file_['srcpath'], 'rb').read()))
            code += '''var fileData%d = [];\n''' % counter
            if data:
                parts = []
                chunk_size = 10240
                start = 0
                while start < len(data):
                    parts.append(
                        '''fileData%d.push.apply(fileData%d, %s);\n''' %
                        (counter, counter, str(
                            data[start:start + chunk_size])))
                    start += chunk_size
                code += ''.join(parts)
            code += (
                '''Module['FS_createDataFile']('%s', '%s', fileData%d, true, true, false);\n'''
                % (dirname, basename, counter))
            counter += 1
        elif file_['mode'] == 'preload':
            # Preload
            counter += 1
            metadata['files'].append({
                'filename':
                file_['dstpath'],
                'start':
                file_['data_start'],
                'end':
                file_['data_end'],
                'audio':
                1 if filename[-4:] in AUDIO_SUFFIXES else 0,
            })
        else:
            assert 0

    if has_preloaded:
        if not lz4:
            # Get the big archive and split it up
            if heap_copy:
                use_data = '''
          // copy the entire loaded file into a spot in the heap. Files will refer to slices in that. They cannot be freed though
          // (we may be allocating before malloc is ready, during startup).
          var ptr = Module['getMemory'](byteArray.length);
          Module['HEAPU8'].set(byteArray, ptr);
          DataRequest.prototype.byteArray = Module['HEAPU8'].subarray(ptr, ptr+byteArray.length);
    '''
            else:
                use_data = '''
          // Reuse the bytearray from the XHR as the source for file reads.
          DataRequest.prototype.byteArray = byteArray;
    '''
            use_data += '''
            var files = metadata['files'];
            for (var i = 0; i < files.length; ++i) {
              DataRequest.prototype.requests[files[i].filename].onload();
            }
      '''
            use_data += (
                "          Module['removeRunDependency']('datafile_%s');\n" %
                shared.JS.escape_for_js_string(data_target))

        else:
            # LZ4FS usage
            temp = data_target + '.orig'
            shutil.move(data_target, temp)
            meta = run_js(shared.path_from_root('tools', 'lz4-compress.js'),
                          shared.NODE_JS, [
                              shared.path_from_root('src', 'mini-lz4.js'),
                              temp, data_target
                          ],
                          stdout=PIPE)
            os.unlink(temp)
            use_data = '''
            var compressedData = %s;
            compressedData['data'] = byteArray;
            assert(typeof LZ4 === 'object', 'LZ4 not present - was your app build with  -s LZ4=1  ?');
            LZ4.loadPackage({ 'metadata': metadata, 'compressedData': compressedData });
            Module['removeRunDependency']('datafile_%s');
      ''' % (meta, shared.JS.escape_for_js_string(data_target))

        package_uuid = uuid.uuid4()
        package_name = data_target
        remote_package_size = os.path.getsize(package_name)
        remote_package_name = os.path.basename(package_name)
        ret += r'''
      var PACKAGE_PATH;
      if (typeof window === 'object') {
        PACKAGE_PATH = window['encodeURIComponent'](window.location.pathname.toString().substring(0, window.location.pathname.toString().lastIndexOf('/')) + '/');
      } else if (typeof location !== 'undefined') {
        // worker
        PACKAGE_PATH = encodeURIComponent(location.pathname.toString().substring(0, location.pathname.toString().lastIndexOf('/')) + '/');
      } else {
        throw 'using preloaded data can only be done on a web page or in a web worker';
      }
      var PACKAGE_NAME = '%s';
      var REMOTE_PACKAGE_BASE = '%s';
      if (typeof Module['locateFilePackage'] === 'function' && !Module['locateFile']) {
        Module['locateFile'] = Module['locateFilePackage'];
        err('warning: you defined Module.locateFilePackage, that has been renamed to Module.locateFile (using your locateFilePackage for now)');
      }
      var REMOTE_PACKAGE_NAME = Module['locateFile'] ? Module['locateFile'](REMOTE_PACKAGE_BASE, '') : REMOTE_PACKAGE_BASE;
    ''' % (shared.JS.escape_for_js_string(data_target),
           shared.JS.escape_for_js_string(remote_package_name))
        metadata['remote_package_size'] = remote_package_size
        metadata['package_uuid'] = str(package_uuid)
        ret += '''
      var REMOTE_PACKAGE_SIZE = metadata['remote_package_size'];
      var PACKAGE_UUID = metadata['package_uuid'];
    '''

        if use_preload_cache:
            code += r'''
        var indexedDB = window.indexedDB || window.mozIndexedDB || window.webkitIndexedDB || window.msIndexedDB;
        var IDB_RO = "readonly";
        var IDB_RW = "readwrite";
        var DB_NAME = "''' + indexeddb_name + '''";
        var DB_VERSION = 1;
        var METADATA_STORE_NAME = 'METADATA';
        var PACKAGE_STORE_NAME = 'PACKAGES';
        function openDatabase(callback, errback) {
          try {
            var openRequest = indexedDB.open(DB_NAME, DB_VERSION);
          } catch (e) {
            return errback(e);
          }
          openRequest.onupgradeneeded = function(event) {
            var db = event.target.result;

            if(db.objectStoreNames.contains(PACKAGE_STORE_NAME)) {
              db.deleteObjectStore(PACKAGE_STORE_NAME);
            }
            var packages = db.createObjectStore(PACKAGE_STORE_NAME);

            if(db.objectStoreNames.contains(METADATA_STORE_NAME)) {
              db.deleteObjectStore(METADATA_STORE_NAME);
            }
            var metadata = db.createObjectStore(METADATA_STORE_NAME);
          };
          openRequest.onsuccess = function(event) {
            var db = event.target.result;
            callback(db);
          };
          openRequest.onerror = function(error) {
            errback(error);
          };
        };

        // This is needed as chromium has a limit on per-entry files in IndexedDB
        // https://cs.chromium.org/chromium/src/content/renderer/indexed_db/webidbdatabase_impl.cc?type=cs&sq=package:chromium&g=0&l=177
        // https://cs.chromium.org/chromium/src/out/Debug/gen/third_party/blink/public/mojom/indexeddb/indexeddb.mojom.h?type=cs&sq=package:chromium&g=0&l=60
        // We set the chunk size to 64MB to stay well-below the limit
        var CHUNK_SIZE = 64 * 1024 * 1024;

        function cacheRemotePackage(
          db,
          packageName,
          packageData,
          packageMeta,
          callback,
          errback
        ) {
          var transactionPackages = db.transaction([PACKAGE_STORE_NAME], IDB_RW);
          var packages = transactionPackages.objectStore(PACKAGE_STORE_NAME);
          var chunkSliceStart = 0;
          var nextChunkSliceStart = 0;
          var chunkCount = Math.ceil(packageData.byteLength / CHUNK_SIZE);
          var finishedChunks = 0;
          for (var chunkId = 0; chunkId < chunkCount; chunkId++) {
            nextChunkSliceStart += CHUNK_SIZE;
            var putPackageRequest = packages.put(
              packageData.slice(chunkSliceStart, nextChunkSliceStart),
              'package/' + packageName + '/' + chunkId
            );
            chunkSliceStart = nextChunkSliceStart;
            putPackageRequest.onsuccess = function(event) {
              finishedChunks++;
              if (finishedChunks == chunkCount) {
                var transaction_metadata = db.transaction(
                  [METADATA_STORE_NAME],
                  IDB_RW
                );
                var metadata = transaction_metadata.objectStore(METADATA_STORE_NAME);
                var putMetadataRequest = metadata.put(
                  {
                    'uuid': packageMeta.uuid,
                    'chunkCount': chunkCount
                  },
                  'metadata/' + packageName
                );
                putMetadataRequest.onsuccess = function(event) {
                  callback(packageData);
                };
                putMetadataRequest.onerror = function(error) {
                  errback(error);
                };
              }
            };
            putPackageRequest.onerror = function(error) {
              errback(error);
            };
          }
        }

        /* Check if there's a cached package, and if so whether it's the latest available */
        function checkCachedPackage(db, packageName, callback, errback) {
          var transaction = db.transaction([METADATA_STORE_NAME], IDB_RO);
          var metadata = transaction.objectStore(METADATA_STORE_NAME);
          var getRequest = metadata.get('metadata/' + packageName);
          getRequest.onsuccess = function(event) {
            var result = event.target.result;
            if (!result) {
              return callback(false, null);
            } else {
              return callback(PACKAGE_UUID === result['uuid'], result);
            }
          };
          getRequest.onerror = function(error) {
            errback(error);
          };
        }

        function fetchCachedPackage(db, packageName, metadata, callback, errback) {
          var transaction = db.transaction([PACKAGE_STORE_NAME], IDB_RO);
          var packages = transaction.objectStore(PACKAGE_STORE_NAME);

          var chunksDone = 0;
          var totalSize = 0;
          var chunkCount = metadata['chunkCount'];
          var chunks = new Array(chunkCount);

          for (var chunkId = 0; chunkId < chunkCount; chunkId++) {
            var getRequest = packages.get('package/' + packageName + '/' + chunkId);
            getRequest.onsuccess = function(event) {
              // If there's only 1 chunk, there's nothing to concatenate it with so we can just return it now
              if (chunkCount == 1) {
                callback(event.target.result);
              } else {
                chunksDone++;
                totalSize += event.target.result.byteLength;
                chunks.push(event.target.result);
                if (chunksDone == chunkCount) {
                  if (chunksDone == 1) {
                    callback(event.target.result);
                  } else {
                    var tempTyped = new Uint8Array(totalSize);
                    var byteOffset = 0;
                    for (var chunkId in chunks) {
                      var buffer = chunks[chunkId];
                      tempTyped.set(new Uint8Array(buffer), byteOffset);
                      byteOffset += buffer.byteLength;
                      buffer = undefined;
                    }
                    chunks = undefined;
                    callback(tempTyped.buffer);
                    tempTyped = undefined;
                  }
                }
              }
            };
            getRequest.onerror = function(error) {
              errback(error);
            };
          }
        }
      '''

        ret += r'''
      function fetchRemotePackage(packageName, packageSize, callback, errback) {
        var xhr = new XMLHttpRequest();
        xhr.open('GET', packageName, true);
        xhr.responseType = 'arraybuffer';
        xhr.onprogress = function(event) {
          var url = packageName;
          var size = packageSize;
          if (event.total) size = event.total;
          if (event.loaded) {
            if (!xhr.addedTotal) {
              xhr.addedTotal = true;
              if (!Module.dataFileDownloads) Module.dataFileDownloads = {};
              Module.dataFileDownloads[url] = {
                loaded: event.loaded,
                total: size
              };
            } else {
              Module.dataFileDownloads[url].loaded = event.loaded;
            }
            var total = 0;
            var loaded = 0;
            var num = 0;
            for (var download in Module.dataFileDownloads) {
            var data = Module.dataFileDownloads[download];
              total += data.total;
              loaded += data.loaded;
              num++;
            }
            total = Math.ceil(total * Module.expectedDataFileDownloads/num);
            if (Module['setStatus']) Module['setStatus']('Downloading data... (' + loaded + '/' + total + ')');
          } else if (!Module.dataFileDownloads) {
            if (Module['setStatus']) Module['setStatus']('Downloading data...');
          }
        };
        xhr.onerror = function(event) {
          throw new Error("NetworkError for: " + packageName);
        }
        xhr.onload = function(event) {
          if (xhr.status == 200 || xhr.status == 304 || xhr.status == 206 || (xhr.status == 0 && xhr.response)) { // file URLs can return 0
            var packageData = xhr.response;
            callback(packageData);
          } else {
            throw new Error(xhr.statusText + " : " + xhr.responseURL);
          }
        };
        xhr.send(null);
      };

      function handleError(error) {
        console.error('package error:', error);
      };
    '''

        code += r'''
      function processPackageData(arrayBuffer) {
        Module.finishedDataFileDownloads++;
        assert(arrayBuffer, 'Loading data file failed.');
        assert(arrayBuffer instanceof ArrayBuffer, 'bad input to processPackageData');
        var byteArray = new Uint8Array(arrayBuffer);
        var curr;
        %s
      };
      Module['addRunDependency']('datafile_%s');
    ''' % (use_data, shared.JS.escape_for_js_string(data_target))
        # use basename because from the browser's point of view,
        # we need to find the datafile in the same dir as the html file

        code += r'''
      if (!Module.preloadResults) Module.preloadResults = {};
    '''

        if use_preload_cache:
            code += r'''
        function preloadFallback(error) {
          console.error(error);
          console.error('falling back to default preload behavior');
          fetchRemotePackage(REMOTE_PACKAGE_NAME, REMOTE_PACKAGE_SIZE, processPackageData, handleError);
        };

        openDatabase(
          function(db) {
            checkCachedPackage(db, PACKAGE_PATH + PACKAGE_NAME,
              function(useCached, metadata) {
                Module.preloadResults[PACKAGE_NAME] = {fromCache: useCached};
                if (useCached) {
                  fetchCachedPackage(db, PACKAGE_PATH + PACKAGE_NAME, metadata, processPackageData, preloadFallback);
                } else {
                  fetchRemotePackage(REMOTE_PACKAGE_NAME, REMOTE_PACKAGE_SIZE,
                    function(packageData) {
                      cacheRemotePackage(db, PACKAGE_PATH + PACKAGE_NAME, packageData, {uuid:PACKAGE_UUID}, processPackageData,
                        function(error) {
                          console.error(error);
                          processPackageData(packageData);
                        });
                    }
                  , preloadFallback);
                }
              }
            , preloadFallback);
          }
        , preloadFallback);

        if (Module['setStatus']) Module['setStatus']('Downloading...');
      '''
        else:
            # Not using preload cache, so we might as well start the xhr ASAP,
            # potentially before JS parsing of the main codebase if it's after us.
            # Only tricky bit is the fetch is async, but also when runWithFS is called
            # is async, so we handle both orderings.
            ret += r'''
        var fetchedCallback = null;
        var fetched = Module['getPreloadedPackage'] ? Module['getPreloadedPackage'](REMOTE_PACKAGE_NAME, REMOTE_PACKAGE_SIZE) : null;

        if (!fetched) fetchRemotePackage(REMOTE_PACKAGE_NAME, REMOTE_PACKAGE_SIZE, function(data) {
          if (fetchedCallback) {
            fetchedCallback(data);
            fetchedCallback = null;
          } else {
            fetched = data;
          }
        }, handleError);
      '''

            code += r'''
        Module.preloadResults[PACKAGE_NAME] = {fromCache: false};
        if (fetched) {
          processPackageData(fetched);
          fetched = null;
        } else {
          fetchedCallback = processPackageData;
        }
      '''

    ret += '''
    function runWithFS() {
  '''
    ret += code
    ret += '''
    }
    if (Module['calledRun']) {
      runWithFS();
    } else {
      if (!Module['preRun']) Module['preRun'] = [];
      Module["preRun"].push(runWithFS); // FS is not initialized yet, wait for it
    }
  '''

    if separate_metadata:
        _metadata_template = '''
    Module['removeRunDependency']('%(metadata_file)s');
   }

   function runMetaWithFS() {
    Module['addRunDependency']('%(metadata_file)s');
    var REMOTE_METADATA_NAME = Module['locateFile'] ? Module['locateFile']('%(metadata_file)s', '') : '%(metadata_file)s';
    var xhr = new XMLHttpRequest();
    xhr.onreadystatechange = function() {
     if (xhr.readyState === 4 && xhr.status === 200) {
       loadPackage(JSON.parse(xhr.responseText));
     }
    }
    xhr.open('GET', REMOTE_METADATA_NAME, true);
    xhr.overrideMimeType('application/json');
    xhr.send(null);
   }

   if (Module['calledRun']) {
    runMetaWithFS();
   } else {
    if (!Module['preRun']) Module['preRun'] = [];
    Module["preRun"].push(runMetaWithFS);
   }
  ''' % {
            'metadata_file': os.path.basename(jsoutput + '.metadata')
        }

    else:
        _metadata_template = '''
   }
   loadPackage(%s);
  ''' % json.dumps(metadata)

    ret += '''%s
  })();
  ''' % _metadata_template

    if force or len(data_files):
        if jsoutput is None:
            print(ret)
        else:
            # Overwrite the old jsoutput file (if exists) only when its content
            # differs from the current generated one, otherwise leave the file
            # untouched preserving its old timestamp
            if os.path.isfile(jsoutput):
                with open(jsoutput) as f:
                    old = f.read()
                if old != ret:
                    with open(jsoutput, 'w') as f:
                        f.write(ret)
            else:
                with open(jsoutput, 'w') as f:
                    f.write(ret)
            if separate_metadata:
                with open(jsoutput + '.metadata', 'w') as f:
                    json.dump(metadata, f, separators=(',', ':'))

    return 0
コード例 #44
0
def get_file_path(instance, filename):
    ext = filename.split('.')[-1]
    filename = "%s.%s" % (uuid.uuid4(), ext)
    return os.path.join('projects/', filename)
コード例 #45
0
 def _generate_session(self, user):
     _session = uuid4().hex
     self.SESSION[_session] = USERS.get(user)
     return _session
コード例 #46
0
    def _test_open_cinder_volume(self,
                                 open_mode,
                                 attach_mode,
                                 error,
                                 multipath_supported=False,
                                 enforce_multipath=False,
                                 encrypted_nfs=False):
        self.config(cinder_mount_point_base=None)
        fake_volume = mock.MagicMock(id=str(uuid.uuid4()), status='available')
        fake_volumes = FakeObject(get=lambda id: fake_volume,
                                  detach=mock.Mock())
        fake_client = FakeObject(volumes=fake_volumes)
        _, fake_dev_path = tempfile.mkstemp(dir=self.test_dir)
        fake_devinfo = {'path': fake_dev_path}
        fake_connector = FakeObject(
            connect_volume=mock.Mock(return_value=fake_devinfo),
            disconnect_volume=mock.Mock())

        @contextlib.contextmanager
        def fake_chown(path, backend=None):
            yield

        def do_open():
            with self.store._open_cinder_volume(fake_client, fake_volume,
                                                open_mode):
                if error:
                    raise error

        def fake_factory(protocol, root_helper, **kwargs):
            self.assertEqual(fake_volume.initialize_connection.return_value,
                             kwargs['conn'])
            return fake_connector

        root_helper = "sudo glance-rootwrap /etc/glance/rootwrap.conf"
        with mock.patch.object(cinder.Store,
                               '_wait_volume_status',
                               return_value=fake_volume), \
                mock.patch.object(cinder.Store, 'temporary_chown',
                                  side_effect=fake_chown), \
                mock.patch.object(cinder.Store, 'get_root_helper',
                                  return_value=root_helper), \
                mock.patch.object(connector.InitiatorConnector, 'factory',
                                  side_effect=fake_factory):

            with mock.patch.object(connector,
                                   'get_connector_properties') as mock_conn:
                if error:
                    self.assertRaises(error, do_open)
                elif encrypted_nfs:
                    fake_volume.initialize_connection.return_value = {
                        'driver_volume_type': 'nfs'
                    }
                    fake_volume.encrypted = True
                    try:
                        with self.store._open_cinder_volume(
                                fake_client, fake_volume, open_mode):
                            pass
                    except exceptions.BackendException:
                        self.assertEqual(1, fake_volume.unreserve.call_count)
                        self.assertEqual(1, fake_volume.delete.call_count)
                else:
                    do_open()

                if not encrypted_nfs:
                    mock_conn.assert_called_once_with(root_helper,
                                                      socket.gethostname(),
                                                      multipath_supported,
                                                      enforce_multipath)
                    fake_connector.connect_volume.assert_called_once_with(
                        mock.ANY)
                    fake_connector.disconnect_volume.assert_called_once_with(
                        mock.ANY, fake_devinfo)
                    fake_volume.attach.assert_called_once_with(
                        None,
                        'glance_store',
                        attach_mode,
                        host_name=socket.gethostname())
                    fake_volumes.detach.assert_called_once_with(fake_volume)
コード例 #47
0
import sys
try:
    metaID = sys.argv[1]
except Exception:
    metaID = None
import json
import uuid
taskParamMap = {}
taskParamMap['taskName'] = str(uuid.uuid4())
taskParamMap['userName'] = '******'
taskParamMap['vo'] = 'atlas'
taskParamMap['taskPriority'] = 100
taskParamMap['architecture'] = 'i686-slc5-gcc43-opt'
taskParamMap['transUses'] = 'Atlas-17.2.7'
taskParamMap['transHome'] = 'AtlasProduction-17.2.8.10'
taskParamMap['transPath'] = 'Reco_trf.py'
taskParamMap['processingType'] = 'reco'
taskParamMap['prodSourceLabel'] = 'test'
taskParamMap['taskType'] = 'prod'
taskParamMap['workingGroup'] = 'AP_Higgs'
taskParamMap['coreCount'] = 1
taskParamMap['cloud'] = 'US'
logDatasetName = 'panda.jeditest.log.{0}'.format(uuid.uuid4())
taskParamMap['log'] = {'dataset': logDatasetName,
                       'type':'template',
                       'param_type':'log',
                       'token':'ATLASDATADISK',
                       'value':'{0}.${{SN}}.log.tgz'.format(logDatasetName)}
outDatasetName = 'panda.jeditest.NTUP_EMBLLDN.{0}'.format(uuid.uuid4())
taskParamMap['jobParameters'] = [
    {'type':'template',
コード例 #48
0
    def test(self):
        sleep(0.5)
        self.logger.log("************************************")
        sleep(0.5)
        self.logger.log('Sign Up initialized', 'yellow')
        sleep(0.5)
        self.logger.log("************************************", 'green')
        mail_var = uuid.uuid4().hex
        name = 'name'
        surname = 'surname'
        email = '*****@*****.**'
        newmail = "mail_" + mail_var + "@mail.com"
        pass1 = 'abcd'
        pass2 = "testpass"
        flag_err = 0

        flag = 0
        while flag == 0:
            try:
                self.driver.find_element_by_id('tvPass')
                flag = 1
            except NoSuchElementException:
                try:
                    self.driver.find_element_by_id("tvLogin")
                    flag = 2
                except NoSuchElementException:
                    try:
                        self.driver.find_element_by_id("drawer_indicator")
                        flag = 3
                    except NoSuchElementException:
                        sleep(1)
                        self.logger.log("Wait")
                        flag = 0
        if flag == 1:
            self.logger.log("Pressing Pass")
            self.driver.find_element_by_id('tvPass').click()

        if flag == 2:
            sleep(1)

        if flag == 3:
            sleep(1)

        self.logger.log("Pressing Sign Up")
        self.driver.find_element_by_id('tvSingUp').click()
        self.logger.log("Inserting name '" + name + "'")
        self.driver.find_element_by_id('etName').send_keys(name)
        self.logger.log("Inserting surname '" + surname + "'")
        self.driver.find_element_by_id('etSurname').send_keys(surname)
        self.logger.log("Inserting email '" + email + "'")
        self.driver.find_element_by_id('etEmail').send_keys(email)
        self.logger.log("Inserting password '" + pass1 + "'")

        self.driver.find_element_by_id('etPassword').send_keys(pass1)
        self.driver.back()
        self.logger.log("Clicking button to create account")
        self.logger.log("ERROR is expected")
        self.driver.find_element_by_id('btnCreateAccount').click()
        try:
            self.driver.find_element_by_id("tvPositive")
            flag = 1
        except NoSuchElementException:
            flag = 0

        if flag == 1:
            self.logger.log("Error for unchecked agreement shown")
            self.appium_worker.screenshot("Error_unchecked_agreement")
            self.driver.find_element_by_id("tvPositive").click()
            sleep(1)
        else:
            self.logger.log("Error of agreement was expected")
            self.logger.log("Quit driver. Test Unsuccessful")
            self.driver.quit()
            self.logger.log("Selecting agreement button")
        self.driver.find_element_by_id("tvReadAgreement").click()
        sleep(1)
        self.logger.log("Clicking OK")
        flag = 0
        while flag == 0:
            try:
                self.driver.find_element_by_id("tvPositive")
                flag = 1
            except NoSuchElementException:
                sleep(1)
                flag = 0
        self.driver.find_element_by_id("tvPositive").click()
        self.logger.log("Clicking to read agreement")
        self.driver.find_element_by_id("cbReadAgreement").click()
        self.logger.log("Entering phone number '" + "5544984171'")
        self.driver.find_element_by_id('etPhoneNumber').send_keys("5544984171")
        self.driver.back()
        self.logger.log("Clicking button to create account")
        self.driver.find_element_by_id('btnCreateAccount').click()
        self.logger.log("ERROR for sign-up field is expected")
        try:
            self.driver.find_element_by_id("tvPositive")
            flag = 1
        except NoSuchElementException:
            flag = 0
        if flag == 1:
            self.logger.log("Error for sign up fields is shown")
            self.appium_worker.screenshot("Error_fields")
            self.driver.find_element_by_id("tvPositive").click()

        else:
            self.logger.log("Error was expected")
            flag_err += 1
            self.logger.log("Quit driver. Test Unsuccessful")
            self.driver.quit()
        self.logger.log("Adding new password '" + pass2 + "'")
        self.driver.find_element_by_id("etPassword").clear()
        self.driver.find_element_by_id("etPassword").clear()
        self.driver.find_element_by_id('etPassword').send_keys(pass2)
        self.driver.back()
        self.logger.log("Adding birthday")
        self.driver.find_element_by_id("rlBirthday").click()
        self.driver.find_element_by_id("date_picker_year").click()
        self.logger.log("Clicking DONE")
        self.driver.find_element_by_id("done").click()
        self.logger.log("Clicking button to create account")
        self.driver.find_element_by_id('btnCreateAccount').click()
        flag = 0
        while flag == 0:
            try:
                self.driver.find_element_by_id("tvPositive")
                flag = 0
            except NoSuchElementException:
                flag = 1
            if flag == 0:
                self.logger.log("Email is used")
                self.appium_worker.screenshot("Used_email")
                self.logger.log("Clicking OK")
                self.driver.find_element_by_id("tvPositive").click()
                self.logger.log("Change email to '" + newmail + "'")
                self.driver.find_element_by_id("etEmail").clear()
                self.driver.find_element_by_id("etEmail").clear()
                self.driver.find_element_by_id("etEmail").send_keys(newmail)
                self.driver.back()
                self.logger.log("Clicking button to create account")
                self.driver.find_element_by_id('btnCreateAccount').click()
                sleep(3)
            if flag == 1:
                self.logger.log("Email not used")
                sleep(2)
        self.logger.log("******************************")
        sleep(0.5)
        self.appium_worker.bekle_android("ivRegAmountInfo")
        self.logger.log("Showing page '" + codecs.encode(
            self.driver.find_element_by_id("tvHeader").text, 'utf-8') + "'")
        self.logger.log("Clicking Detailed Information")
        self.driver.find_element_by_id("tvDetailedInfo").click()
        self.logger.log("Showing detailed information")
        sleep(1)
        self.logger.log("Clicking OK")
        self.driver.find_element_by_id("tvPositive").click()
        self.logger.log("Clicking to show registration payment information")
        self.driver.find_element_by_id("ivRegAmountInfo").click()
        try:
            self.driver.find_element_by_id("tvPositive")
            flag = 1
        except NoSuchElementException:
            flag = 0
            self.logger.log(
                "Registration fee information was expected to be shown")
            self.logger.log("ERROR!!")
            flag_err += 1
        if flag == 1:
            sleep(2)
            self.driver.find_element_by_id("tvPositive").click()
        self.logger.log("Clicking to show membership type info")
        self.driver.find_element_by_id("ivMembershipType").click()
        try:
            self.driver.find_element_by_id("tvPositive")
            flag = 1
        except NoSuchElementException:
            flag = 0
            self.logger.log("Membership type information was expected")
            self.logger.log("ERROR!!")
            flag_err += 1
        if flag == 1:
            sleep(2)
            self.driver.find_element_by_id("tvPositive").click()
        self.logger.log("Change membership type")
        self.driver.find_element_by_id("llYearly").click()
        self.logger.log("Inserting promotion code '" + "REGISTERTEST'")
        self.driver.find_element_by_id("etPromotionCode").send_keys(
            'REGISTERTEST')
        self.driver.back()
        self.logger.log("Clicking Aply")
        self.driver.find_element_by_id("tvApplyPromotionCode").click()
        sleep(1)
        try:
            self.driver.find_element_by_id("tvPositive")
            flag = 1
        except NoSuchElementException:
            flag = 0
        if flag == 1:
            self.logger.log("Promotion code incorrect")
            self.appium_worker.screenshot("Promotion_code_incorrect")
            self.logger.log("Clicking Ok")
            self.driver.find_element_by_id("tvPositive").click()
            self.logger.log("Entering new promotion code '" +
                            "REGISTERTEST062016'")
            self.driver.find_element_by_id("etPromotionCode").clear()
            self.driver.find_element_by_id("etPromotionCode").clear()
            self.driver.find_element_by_id("etPromotionCode").send_keys(
                'REGISTERTEST062016')
            self.driver.back()
            self.logger.log("Clicking Apply button")
            self.driver.find_element_by_id("tvApplyPromotionCode").click()
        else:
            self.logger.log("Promotion code correct")
            # check for a correct promotion code
        self.logger.log("Clicking to enter payment information")
        self.driver.find_element_by_id("tvPaymentInfo").click()
        self.logger.log("Scrolling down")
        element2 = self.driver.find_element_by_class_name(
            "android.widget.RelativeLayout")
        touch_actions = TouchActions(self.driver)
        touch_actions.flick_element(element2, 0, -700, 100).perform()
        touch_actions.perform()
        sleep(3)
        # self.driver.find_element_by_id("etCardHolderName").send_keys("cardholder")
        self.logger.log("Adding card holder surname '" + "Surname'")

        self.driver.find_element_by_id("etCardHolderSurName").send_keys(
            "Surname" + "\n")
        surname = self.driver.find_element_by_id("etCardHolderSurName").text
        self.logger.log("Surname entered : " + str(surname))
        self.driver.find_element_by_id("etCardNumber").send_keys(
            "4090700214269159")
        card = self.driver.find_element_by_id("etCardNumber").text
        self.logger.log("Card number entered : " + str(card))
        self.driver.find_element_by_id("tvExpireDate").click()
        self.driver.find_elements_by_id("numberpicker_input")[1].send_keys(
            "2017")
        self.driver.find_elements_by_id("numberpicker_input")[0].click()
        element2 = self.driver.find_elements_by_id("numberpicker_input")[1]
        touch_actions = TouchActions(self.driver)
        touch_actions.flick_element(element2, 0, -200, 100).perform()
        touch_actions.perform()
        self.driver.find_element_by_id("btnOk").click()
        expire = self.driver.find_element_by_id("tvExpireDate").text
        self.logger.log("Expiring date of card : " + str(expire))

        self.driver.find_element_by_id("ivCCV").click()
        try:
            self.driver.find_element_by_id("tvPositive")
            flag = 1
        except NoSuchElementException:
            flag = 0
        if flag == 1:
            self.logger.log("Information of CCV")
            self.driver.find_element_by_id("tvPositive").click()
        else:
            self.logger.log("CCV information was expected to be shown")
            self.logger.log("ERROR!!")
            flag_err += 1
        self.logger.log("Adding CCV number '494'")
        self.driver.find_element_by_id("etCCV").send_keys("494")
        self.driver.back()
        self.logger.log("Clicking finalize")
        self.driver.find_element_by_id("btnFinalize").click()
        sleep(0.5)
        self.logger.log("Error for missing card information is shown")
        self.appium_worker.screenshot("Missing_card_info_1")
        try:
            self.driver.find_element_by_id("etCardHolderName")
            flag = 1
        except NoSuchElementException:
            flag = 0
        if flag == 1:
            self.logger.log("Adding card holder name 'Test'")
            self.driver.find_element_by_id("etCardHolderName").send_keys(
                "Test")
            self.driver.back()
            self.logger.log("Clicking finalize")
            self.driver.find_element_by_id("btnFinalize").click()
        else:
            self.logger.log(
                "Error message for payment information was expected")
            self.logger.log("ERROR!!")
            flag_err += 1

        #flag = 0
        #while flag == 0:
        #    self.logger.log("Wait")
        #    try:
        #        self.driver.find_element_by_id("tvPositive")
        #        flag = 1
        #    except NoSuchElementException:
        #        flag = 0
#
#if flag == 1:
#    self.logger.log("Connection time out")
#    self.driver.find_element_by_id("tvPositive").click()
#    self.driver.back()
#else:
#    flag1 = 0
#    while flag1 == 0:
#        try:
#            self.driver.find_element_by_id("tvUserDetail")
#            flag1 = 1
#        except NoSuchElementException:
#            sleep(1)
#            flag1 = 0
#    self.appium_worker.screenshot("Registration_completed")
#    m1 = self.driver.find_element_by_id("tvUserDetail").text
#    self.logger.log("User details: " + m1)
#    self.driver.find_element_by_id("tvStartShopping").click()
#
#    self.logger.log("Clicking HOME button")
#    self.driver.find_element_by_id("ivSubmenuHome").click()
#    sleep(1)
#    self.logger.log("Clicking left sub-menu")
#    self.driver.find_element_by_id("drawer_indicator").click()
#    self.logger.log("Scrolling down")
#    element2 = self.driver.find_element_by_class_name("android.widget.RelativeLayout")
#    touch_actions = TouchActions(self.driver)
#    touch_actions.flick_element(element2, 0, -1000, 100).perform()
#    touch_actions.perform()
#    sleep(1)
#    self.logger.log("Clicking Exit")
#    self.driver.find_elements_by_class_name("android.widget.LinearLayout")[19].click()
#    sleep(2)
#self.driver.back()
        self.logger.log("TEST FINISHED", "green", 1, 2,
                        self.appium_worker.logger.getFileName())

        if flag_err == 0:
            self.logger.log("Test finished without errors \n")
        else:
            self.logger.log("Test finished with  %d errors \n" % flag_err)
コード例 #49
0
ファイル: MDXView.py プロジェクト: zPat/TM1py
from TM1py import TM1pyQueries as TM1, TM1pyLogin, MDXView
import uuid

# establish connection to TM1 Server
login = TM1pyLogin.native('admin', 'apple')
tm1 = TM1(ip='', port=8001, login=login, ssl=False)

# random text
random_string = str(uuid.uuid4())

# create mdx view
mdx = "SELECT {([plan_version].[FY 2003 Budget], [plan_department].[105], [plan_chart_of_accounts].[61030], " \
      "[plan_exchange_rates].[local], [plan_source].[goal] , [plan_time].[Jan-2004]) } on COLUMNS," \
      "{[plan_business_unit].[10110]} on ROWS FROM [plan_BudgetPlan]"
mdx_view = MDXView(cube_name='plan_BudgetPlan', view_name='TM1py_' + random_string, MDX=mdx)

# create mdx view on TM1 Server
tm1.create_view(view=mdx_view)

# get view content
content = tm1.get_view_content(cube_name=mdx_view.cube, view_name=mdx_view.name)
print(content)

# logout
tm1.logout()

コード例 #50
0
ファイル: test_account.py プロジェクト: skorotkiewicz/ieddit
 def setUp(self):
     app.testing = True
     self.app = app.test_client()
     self.getNewID = lambda: str(uuid4()).split('-')[3]
コード例 #51
0
def randomSubject():
    return uuid.uuid4().urn
コード例 #52
0
def randomID():
    return str(uuid.uuid4())
コード例 #53
0
 def create_request_elements(cls, request_type, credentials, url, params=None, headers=None,
                             body='', method='GET', verifier='', callback=''):
     """
     Creates |oauth1| request elements.
     """
     
     params = params or {}
     headers = headers or {}
     
     consumer_key = credentials.consumer_key or ''
     consumer_secret = credentials.consumer_secret or ''
     token = credentials.token or ''
     token_secret = credentials.token_secret or ''
     
     # separate url base and query parameters
     url, base_params = cls._split_url(url)
     
     # add extracted params to future params
     params.update(dict(base_params))
     
     if request_type == cls.USER_AUTHORIZATION_REQUEST_TYPE:
         # no need for signature
         if token:
             params['oauth_token'] = token
         else:
             raise OAuth1Error('Credentials with valid token are required to create User Authorization URL!')
     else:
         # signature needed
         if request_type == cls.REQUEST_TOKEN_REQUEST_TYPE:
             # Request Token URL
             if consumer_key and consumer_secret and callback:
                 params['oauth_consumer_key'] = consumer_key
                 params['oauth_callback'] = callback
             else:
                 raise OAuth1Error('Credentials with valid consumer_key, consumer_secret and ' +\
                                                          'callback are required to create Request Token URL!')
             
         elif request_type == cls.ACCESS_TOKEN_REQUEST_TYPE:
             # Access Token URL
             if consumer_key and consumer_secret and token and verifier:
                 params['oauth_token'] = token
                 params['oauth_consumer_key'] = consumer_key
                 params['oauth_verifier'] = verifier
             else:
                 raise OAuth1Error('Credentials with valid consumer_key, consumer_secret, token ' +\
                                                          'and argument verifier are required to create Access Token URL!')
             
         elif request_type == cls.PROTECTED_RESOURCE_REQUEST_TYPE:
             # Protected Resources URL
             if consumer_key and consumer_secret and token and token_secret:
                 params['oauth_token'] = token
                 params['oauth_consumer_key'] = consumer_key
             else:
                 raise OAuth1Error('Credentials with valid consumer_key, consumer_secret, token and ' +\
                                                          'token_secret are required to create Protected Resources URL!')
         
         # Sign request.
         # http://oauth.net/core/1.0a/#anchor13
         
         # Prepare parameters for signature base string
         # http://oauth.net/core/1.0a/#rfc.section.9.1
         params['oauth_signature_method'] = cls._signature_generator.method
         params['oauth_timestamp'] = str(int(time.time()))
         params['oauth_nonce'] = cls.csrf_generator(uuid.uuid4())
         params['oauth_version'] = '1.0'
         
         # add signature to params
         params['oauth_signature'] = cls._signature_generator.create_signature(method, url, params, consumer_secret, token_secret)
     
     request_elements = core.RequestElements(url, method, params, headers, body)
     
     return cls._x_request_elements_filter(request_type, request_elements, credentials)
コード例 #54
0
import requests
import json
from uuid import uuid4

url = "http://localhost/api/form/upload"

DEPOSIT_ID = str(uuid4())

querystring = {
    "resumableChunkNumber": "1",
    "resumableTotalChunks": "1",
    "deposit_id": DEPOSIT_ID
}

f = open('test360.zip', 'rb')
files = {'file': f}

response = requests.request("POST", url, files=files, params=querystring)

print(response.text)
url = "http://localhost/api/form/submit"
payload_dict = {
    'media_type':
    'video',
    'id':
    DEPOSIT_ID,
    'form': [{
        'id': 'object_title',
        'value': 'Test 360 video'
    }, {
        'id': 'media_type',
コード例 #55
0
 def test_factory_defaults(self):
     owner_id = uuid.uuid4().hex
     factory = CaseFactory(case_defaults={'owner_id': owner_id})
     case = factory.create_case()
     self.assertEqual(owner_id, case.owner_id)
コード例 #56
0
 def test_override_defaults(self):
     owner_id = uuid.uuid4().hex
     factory = CaseFactory(case_defaults={'owner_id': owner_id})
     case = factory.create_case(owner_id='notthedefault')
     self.assertEqual('notthedefault', case.owner_id)
コード例 #57
0
    def _copy_from_img_service(self, context, volume, image_service, image_id):
        """Copies from the image service using copy offload."""
        LOG.debug("Trying copy from image service using copy offload.")
        image_loc = image_service.get_location(context, image_id)
        locations = self._construct_image_nfs_url(image_loc)
        src_ip = None
        selected_loc = None
        # this will match the first location that has a valid IP on cluster
        for location in locations:
            conn, dr = self._check_get_nfs_path_segs(location)
            if conn:
                try:
                    src_ip = self._get_ip_verify_on_cluster(conn.split(':')[0])
                    selected_loc = location
                    break
                except exception.NotFound:
                    pass
        if src_ip is None:
            raise exception.NotFound(_("Source host details not found."))
        (__, ___, img_file) = selected_loc.rpartition('/')
        src_path = os.path.join(dr, img_file)
        dst_ip = self._get_ip_verify_on_cluster(self._get_host_ip(
            volume['id']))
        # tmp file is required to deal with img formats
        tmp_img_file = six.text_type(uuid.uuid4())
        col_path = self.configuration.netapp_copyoffload_tool_path
        img_info = image_service.show(context, image_id)
        dst_share = self._get_provider_location(volume['id'])
        self._check_share_can_hold_size(dst_share, img_info['size'])
        run_as_root = self._execute_as_root

        dst_dir = self._get_mount_point_for_share(dst_share)
        dst_img_local = os.path.join(dst_dir, tmp_img_file)
        try:
            # If src and dst share not equal
            if (('%s:%s' % (src_ip, dr)) !=
                ('%s:%s' % (dst_ip, self._get_export_path(volume['id'])))):
                dst_img_serv_path = os.path.join(
                    self._get_export_path(volume['id']), tmp_img_file)
                # Always run copy offload as regular user, it's sufficient
                # and rootwrap doesn't allow copy offload to run as root
                # anyways.
                self._execute(col_path,
                              src_ip,
                              dst_ip,
                              src_path,
                              dst_img_serv_path,
                              run_as_root=False,
                              check_exit_code=0)
            else:
                self._clone_file_dst_exists(dst_share, img_file, tmp_img_file)
            self._discover_file_till_timeout(dst_img_local, timeout=120)
            LOG.debug('Copied image %(img)s to tmp file %(tmp)s.', {
                'img': image_id,
                'tmp': tmp_img_file
            })
            dst_img_cache_local = os.path.join(dst_dir,
                                               'img-cache-%s' % image_id)
            if img_info['disk_format'] == 'raw':
                LOG.debug('Image is raw %s.', image_id)
                self._clone_file_dst_exists(dst_share,
                                            tmp_img_file,
                                            volume['name'],
                                            dest_exists=True)
                self._move_nfs_file(dst_img_local, dst_img_cache_local)
                LOG.debug('Copied raw image %(img)s to volume %(vol)s.', {
                    'img': image_id,
                    'vol': volume['id']
                })
            else:
                LOG.debug('Image will be converted to raw %s.', image_id)
                img_conv = six.text_type(uuid.uuid4())
                dst_img_conv_local = os.path.join(dst_dir, img_conv)

                # Checking against image size which is approximate check
                self._check_share_can_hold_size(dst_share, img_info['size'])
                try:
                    image_utils.convert_image(dst_img_local,
                                              dst_img_conv_local,
                                              'raw',
                                              run_as_root=run_as_root)
                    data = image_utils.qemu_img_info(dst_img_conv_local,
                                                     run_as_root=run_as_root)
                    if data.file_format != "raw":
                        raise exception.InvalidResults(
                            _("Converted to raw, but format is now %s.") %
                            data.file_format)
                    else:
                        self._clone_file_dst_exists(dst_share,
                                                    img_conv,
                                                    volume['name'],
                                                    dest_exists=True)
                        self._move_nfs_file(dst_img_conv_local,
                                            dst_img_cache_local)
                        LOG.debug(
                            'Copied locally converted raw image'
                            ' %(img)s to volume %(vol)s.', {
                                'img': image_id,
                                'vol': volume['id']
                            })
                finally:
                    if os.path.exists(dst_img_conv_local):
                        self._delete_file_at_path(dst_img_conv_local)
            self._post_clone_image(volume)
        finally:
            if os.path.exists(dst_img_local):
                self._delete_file_at_path(dst_img_local)
コード例 #58
0
 def test_domain(self):
     domain = uuid.uuid4().hex
     factory = CaseFactory(domain=domain)
     case = factory.create_case()
     self.assertEqual(domain, case.domain)
コード例 #59
0
parser.add_argument('--epochs', type=int, default=164, help='number of epochs to train (default: 10)')
parser.add_argument('--lr', type=float, default=0.001, help='learning rate (default: 1e-3)')
parser.add_argument('--lambda_', type=float, default=0.25, help='OLE loss weight \lambda (default: 0.25)')
parser.add_argument('--gpu', default='0', help='index of gpus to use')
parser.add_argument('--ngpu', type=int, default=2, help='number of gpus to use')
parser.add_argument('--seed', type=int, default=117, help='random seed (default: 1)')
parser.add_argument('--num_samples', type=int, default=500, help='number of training samples per class to use')
parser.add_argument('--data_augment', type=int, default=1, help='use data augmentation, 1: yes (default), 0: no')
parser.add_argument('--validation', type=int, default=0, help='run validation on 10%% of training set 0: no (default), 1: yes')
parser.add_argument('--log_interval', type=int, default=125,  help='how many batches to wait before logging training status')
parser.add_argument('--test_interval', type=int, default=5,  help='how many epochs to wait before another test')
parser.add_argument('--logdir', default='log/default', help='folder to save to the log')
parser.add_argument('--decreasing_lr', default='81,122', help='decreasing strategy')
args = parser.parse_args()

foldername =  'results/wd_%s_batch_%s_channel_%s_samples_%s/' % (str(args.wd), str(args.batch_size), str(args.channel), str(args.num_samples)) + str(uuid.uuid4())


args.logdir = os.path.join(os.path.dirname(__file__), foldername)
misc.logger.init(args.logdir, 'train_log')
print = misc.logger.info

# select gpu
args.gpu = misc.auto_select_gpu(utility_bound=0, num_gpu=args.ngpu, selected_gpus=args.gpu)
args.ngpu = len(args.gpu)

# logger
misc.ensure_dir(args.logdir)
print("=================FLAGS==================")
for k, v in args.__dict__.items():
    print('{}: {}'.format(k, v))
コード例 #60
0
ファイル: order.py プロジェクト: Brahim820/odoo
class sale_order(osv.osv):
    _inherit = 'sale.order'

    def _get_total(self, cr, uid, ids, name, arg, context=None):
        res = {}
        for order in self.browse(cr, uid, ids, context=context):
            total = 0.0
            for line in order.order_line:
                total += line.price_subtotal + line.price_unit * ((line.discount or 0.0) / 100.0) * line.product_uom_qty
            res[order.id] = total
        return res

    _columns = {
        'access_token': fields.char('Security Token', required=True, copy=False),
        'template_id': fields.many2one('sale.quote.template', 'Quotation Template', readonly=True, states={'draft': [('readonly', False)], 'sent': [('readonly', False)]}),
        'website_description': fields.html('Description', translate=True),
        'options' : fields.one2many('sale.order.option', 'order_id', 'Optional Products Lines', readonly=True, states={'draft': [('readonly', False)], 'sent': [('readonly', False)]}, copy=True),
        'amount_undiscounted': fields.function(_get_total, string='Amount Before Discount', type="float", digits=0),
        'quote_viewed': fields.boolean('Quotation Viewed'),
        'require_payment': fields.selection([
            (0, 'Not mandatory on website quote validation'),
            (1, 'Immediate after website order validation')
            ], 'Payment', help="Require immediate payment by the customer when validating the order from the website quote"),
    }

    def _get_template_id(self, cr, uid, context=None):
        try:
            template_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'website_quote', 'website_quote_template_default')[1]
        except ValueError:
            template_id = False
        return template_id

    _defaults = {
        'access_token': lambda self, cr, uid, ctx={}: str(uuid.uuid4()),
        'template_id' : _get_template_id,
    }

    def open_quotation(self, cr, uid, quote_id, context=None):
        quote = self.browse(cr, uid, quote_id[0], context=context)
        self.write(cr, uid, quote_id[0], {'quote_viewed': True}, context=context)
        return {
            'type': 'ir.actions.act_url',
            'target': 'self',
            'url': '/quote/%s/%s' % (quote.id, quote.access_token)
        }

    def onchange_template_id(self, cr, uid, ids, template_id, partner=False, fiscal_position_id=False, pricelist_id=False, context=None):
        if not template_id:
            return {}

        if partner:
            context = dict(context or {})
            context['lang'] = self.pool['res.partner'].browse(cr, uid, partner, context).lang

        pricelist_obj = self.pool['product.pricelist']

        lines = [(5,)]
        quote_template = self.pool.get('sale.quote.template').browse(cr, uid, template_id, context=context)
        for line in quote_template.quote_line:
            res = self.pool.get('sale.order.line').product_id_change(cr, uid, False,
                False, line.product_id.id, line.product_uom_qty, line.product_uom_id.id, line.product_uom_qty,
                line.product_uom_id.id, line.name, partner, False, True, time.strftime('%Y-%m-%d'),
                False, fiscal_position_id, True, context)
            data = res.get('value', {})
            if pricelist_id:
                uom_context = context.copy()
                uom_context['uom'] = line.product_uom_id.id
                price = pricelist_obj.price_get(cr, uid, [pricelist_id], line.product_id.id, 1, context=uom_context)[pricelist_id]
            else:
                price = line.price_unit

            if 'tax_id' in data:
                data['tax_id'] = [(6, 0, data['tax_id'])]
            else:
                fpos = (fiscal_position_id and self.pool['account.fiscal.position'].browse(cr, uid, fiscal_position_id)) or False
                taxes = fpos.map_tax(line.product_id.product_tmpl_id.taxes_id).ids if fpos else line.product_id.product_tmpl_id.taxes_id.ids
                data['tax_id'] = [(6, 0, taxes)]
            data.update({
                'name': line.name,
                'price_unit': price,
                'discount': line.discount,
                'product_uom_qty': line.product_uom_qty,
                'product_id': line.product_id.id,
                'product_uom': line.product_uom_id.id,
                'website_description': line.website_description,
                'state': 'draft',
            })
            lines.append((0, 0, data))
        options = []
        for option in quote_template.options:
            if pricelist_id:
                uom_context = context.copy()
                uom_context['uom'] = option.uom_id.id
                price = pricelist_obj.price_get(cr, uid, [pricelist_id], option.product_id.id, 1, context=uom_context)[pricelist_id]
            else:
                price = option.price_unit
            options.append((0, 0, {
                'product_id': option.product_id.id,
                'name': option.name,
                'quantity': option.quantity,
                'uom_id': option.uom_id.id,
                'price_unit': price,
                'discount': option.discount,
                'website_description': option.website_description,
            }))
        date = False
        if quote_template.number_of_days > 0:
            date = (datetime.datetime.now() + datetime.timedelta(quote_template.number_of_days)).strftime("%Y-%m-%d")
        data = {
            'order_line': lines,
            'website_description': quote_template.website_description,
            'options': options,
            'validity_date': date,
            'require_payment': quote_template.require_payment
        }
        if quote_template.note:
            data['note'] = quote_template.note
        return {'value': data}

    def recommended_products(self, cr, uid, ids, context=None):
        order_line = self.browse(cr, uid, ids[0], context=context).order_line
        product_pool = self.pool.get('product.product')
        products = []
        for line in order_line:
            products += line.product_id.product_tmpl_id.recommended_products(context=context)
        return products

    def get_access_action(self, cr, uid, ids, context=None):
        """ Override method that generated the link to access the document. Instead
        of the classic form view, redirect to the online quote if exists. """
        quote = self.browse(cr, uid, ids[0], context=context)
        if not quote.template_id:
            return super(sale_order, self).get_access_action(cr, uid, ids, context=context)
        return {
            'type': 'ir.actions.act_url',
            'url': '/quote/%s' % quote.id,
            'target': 'self',
            'res_id': quote.id,
        }

    def action_quotation_send(self, cr, uid, ids, context=None):
        action = super(sale_order, self).action_quotation_send(cr, uid, ids, context=context)
        ir_model_data = self.pool.get('ir.model.data')
        quote_template_id = self.read(cr, uid, ids, ['template_id'], context=context)[0]['template_id']
        if quote_template_id:
            try:
                template_id = ir_model_data.get_object_reference(cr, uid, 'website_quote', 'email_template_edi_sale')[1]
            except ValueError:
                pass
            else:
                action['context'].update({
                    'default_template_id': template_id,
                    'default_use_template': True
                })

        return action

    def _confirm_online_quote(self, cr, uid, order_id, tx, context=None):
        """ Payment callback: validate the order and write tx details in chatter """
        order = self.browse(cr, uid, order_id, context=context)

        # create draft invoice if transaction is ok
        if tx and tx.state == 'done':
            if order.state in ['draft', 'sent']:
                self.action_confirm(cr, SUPERUSER_ID, order.id, context=context)
            message = _('Order payed by %s. Transaction: %s. Amount: %s.') % (tx.partner_id.name, tx.acquirer_reference, tx.amount)
            self.message_post(cr, uid, order_id, body=message, context=context)
            return True
        return False

    def create(self, cr, uid, values, context=None):
        if not values.get('template_id'):
            defaults = self.default_get(cr, uid, ['template_id'], context=context)
            template_values = self.onchange_template_id(cr, uid, [], defaults.get('template_id'), partner=values.get('partner_id'), fiscal_position_id=values.get('fiscal_position'), context=context).get('value', {})
            values = dict(template_values, **values)
        return super(sale_order, self).create(cr, uid, values, context=context)