Exemple #1
0
def upload_custom_socketenvironment_file_to(instance, filename):
    _, ext = os.path.splitext(filename)
    return '{instance_prefix}/env/{filename}{ext}'.format(
        instance_prefix=get_current_instance().get_storage_prefix(),
        filename=generate_key(),
        ext=ext.lower()[:16]  # extensions longer than 16 would be kinda strange
    )
    def process_task(self, task_class, object_pk, instance_pk, payload_key, meta_key,
                     trace_pk, template_name=None, **kwargs):
        """
        Process script task. Queue it, wait for subscription to go through and then wait for results.
        """

        result_channel = TASK_RESULT_KEY_TEMPLATE.format(key=generate_key())
        queue = self.subscribe(result_channel)

        try:
            # Now queue the task
            task_kwargs = {
                'result_key': result_channel,
                'instance_pk': instance_pk,
                'payload_key': payload_key,
                'meta_key': meta_key,
                'trace_pk': trace_pk,
                'template_name': template_name,
            }
            task_kwargs.update(kwargs)
            import_class(task_class).apply_async(args=[object_pk], kwargs=task_kwargs)

            try:
                # Wait for script to finish
                return queue.get(timeout=settings.WEBHOOK_MAX_TIMEOUT)
            except Empty:
                logger.warning("Timeout during processing %s(pk=%s) in Instance[pk=%s]",
                               task_class, object_pk, instance_pk)
                raise RequestTimeout('Script took too much time.')
        finally:
            self.unsubscribe(result_channel)
Exemple #3
0
 def publish_codebox_spec(cls, instance_pk, incentive_pk, spec):
     serialized_spec = json.dumps(spec)
     spec_id = generate_key()
     spec_key = SPEC_TEMPLATE.format(instance_pk=instance_pk,
                                     incentive_pk=incentive_pk,
                                     spec_id=spec_id)
     redis.set(spec_key, serialized_spec, SPEC_TIMEOUT)
     return spec_key
Exemple #4
0
def upload_hosting_file_to(instance, filename):
    _, ext = os.path.splitext(filename)
    return '{instance_prefix}/{hosting_id}h/{filename}{ext}'.format(
        instance_prefix=get_current_instance().get_storage_prefix(),
        hosting_id=instance.hosting_id,
        filename=generate_key(),
        ext=ext.lower()[:16]  # extensions longer than 16 would be kinda strange
    )
Exemple #5
0
    def get_response(self, request):
        content = list(
            self.process_channel_subscribe(request.environ, generate_key()))
        if not content:
            return JSONResponse(status=status.HTTP_204_NO_CONTENT)

        response = JSONResponse(content[0])
        response['X-Last-Id'] = self.extract_change_id(content[0])
        return response
    def subscribe_to_channel(self, last_id=1, mock_args=None, maxsize=1):
        if not mock_args:
            mock_args = {'return_value': True}

        event_mock = mock.Mock(**mock_args)
        with mock.patch('apps.async_tasks.handlers.Event',
                        mock.Mock()) as e_mock:
            e_mock().wait = event_mock
            return list(
                self.handler.process_channel_subscribe(
                    {
                        'LAST_ID': last_id,
                        'STREAM_CHANNEL': 'boguschannel'
                    },
                    generate_key(),
                    maxsize=maxsize)), event_mock
 def test_ws_handler_calls_subscribe(self, subscribe_mock):
     client = mock.Mock(id=generate_key())
     self.handler.ws_handler(mock.MagicMock(), client)
     self.assertTrue(subscribe_mock.called)
     client.send.assert_called_with('abc')
 def generate_key(self):
     return generate_key(parity=True)
Exemple #9
0
 def generate_key(self):
     return generate_key()
def restore_filename(restore, filename):
    return 'restores/{}/{}.zip'.format(restore.owner_id, generate_key())
Exemple #11
0
    def restore_to_new_schema(self, storage, instance, partial=False):
        from apps.instances.models import Instance
        from .executor import BackupMigrationExecutor

        db = get_instance_db(instance)
        con = connections[db]

        stored_targets = self.get_stored_migration_targets(storage)

        new_instance = Instance(owner=instance.owner,
                                name="_%s" % generate_key(),
                                schema_name="%s_{self.id}_%s" %
                                (instance.id, instance.name),
                                database=instance.database)

        # If there are no stored migrations sync_schema on create
        try:
            new_instance.save(sync_schema=not stored_targets)
            new_instance.storage_prefix = "%s_%s" % (instance.pk,
                                                     new_instance.pk)
            new_instance.save()

            apps = None
            if stored_targets:
                with instance_context(new_instance):
                    executor = BackupMigrationExecutor(con)
                    state = executor.migrate(stored_targets)
                    apps = state.apps

                    if partial:
                        signals.post_tenant_migrate.send(
                            sender=new_instance,
                            tenant=new_instance,
                            using=con.alias,
                            created=True,
                            partial=True,
                        )

            models_sorted = self.calculate_sorted(apps)
            self.restore_to_instance(storage,
                                     new_instance,
                                     models_sorted,
                                     apps,
                                     partial=partial)

            # Upgrade schema to current version
            # migrate will detect that this instance is already created
            # and will forward to current project state
            # it will not fire post_migrate signals
            # and it will fire post_tenant_migrate with schema_created=False
            # Initial data will be fed from migrations (a common way how we do it in project)
            if stored_targets:
                call_command('migrate',
                             shared=False,
                             schema_name=new_instance.schema_name,
                             interactive=False,
                             verbosity=settings.SCHEMA_MIGRATIONS_VERBOSITY)

            # swap prefixes. When new_instance is deleted, old instance files will also be deleted.
            instance.storage_prefix, new_instance.storage_prefix = (
                new_instance.get_storage_prefix(),
                instance.get_storage_prefix())
            # swap schemas
            instance.schema_name, new_instance.schema_name = (
                new_instance.schema_name, instance.schema_name)
            instance.save()
            new_instance.save()
        finally:
            new_instance.delete()
def create_unique_key(apps, schema_editor):
    Socket = apps.get_model('sockets.Socket')
    for socket in Socket.objects.all().iterator():
        socket.key = generate_key()
        socket.save(update_fields=('key', ))
 def generate_key(self):
     return generate_key(parity=False)