示例#1
0
def create_restore_job(sid):
    """
    Args:
        sid: Server ID

        Creates a new job for restore task

    Returns:
        None
    """
    is_error, errmsg, data, _file = _get_create_req_data()
    if is_error:
        return errmsg

    is_error, errmsg, driver, manager, conn, \
        connected, server = _connect_server(sid)
    if is_error:
        return errmsg

    utility = manager.utility('restore')
    ret_val = does_utility_exist(utility)
    if ret_val:
        return make_json_response(success=0, errormsg=ret_val)

    args = _set_args_param_values(data, manager, server, driver, conn, _file)

    try:
        p = BatchProcess(
            desc=RestoreMessage(server.id,
                                data['file'].encode('utf-8') if hasattr(
                                    data['file'], 'encode') else data['file'],
                                *args,
                                database=data['database']),
            cmd=utility,
            args=args)
        manager.export_password_env(p.id)
        # Check for connection timeout and if it is greater than 0 then
        # set the environment variable PGCONNECT_TIMEOUT.
        if manager.connect_timeout > 0:
            env = dict()
            env['PGCONNECT_TIMEOUT'] = str(manager.connect_timeout)
            p.set_env_variables(server, env=env)
        else:
            p.set_env_variables(server)

        p.start()
        jid = p.id
    except Exception as e:
        current_app.logger.exception(e)
        return make_json_response(status=410, success=0, errormsg=str(e))
    # Return response
    return make_json_response(data={
        'job_id': jid,
        'desc': p.desc.message,
        'Success': 1
    })
示例#2
0
    def runTest(self, current_user_mock, current_user, db_mock,
                popen_mock, get_server_details_mock):
        with self.app.app_context():
            current_user.id = 1
            current_user_mock.id = 1
            current_app.PGADMIN_RUNTIME = False

            def db_session_add_mock(j):
                cmd_obj = loads(j.desc)
                self.assertTrue(isinstance(cmd_obj, IProcessDesc))
                self.assertEqual(cmd_obj.backup_type,
                                 self.class_params['type'])
                self.assertEqual(cmd_obj.bfile, self.class_params['bfile'])
                self.assertEqual(cmd_obj.database,
                                 self.class_params['database'])
                self.assertEqual(cmd_obj.cmd,
                                 ' --file "backup_file" '
                                 '--host "{0}" '
                                 '--port "{1}" '
                                 '--username "{2}" '
                                 '--no-password '
                                 '--database "{3}"'.format(
                                     self.class_params['host'],
                                     self.class_params['port'],
                                     self.class_params['username'],
                                     self.class_params['database']
                                 ))

            db_mock.session.add.side_effect = db_session_add_mock
            db_mock.session.commit = MagicMock(return_value=True)

            get_server_details_mock.return_value = \
                self.class_params['name'], \
                self.class_params['host'], \
                self.class_params['port']

            backup_obj = BackupMessage(
                self.class_params['type'],
                self.class_params['sid'],
                self.class_params['bfile'],
                *self.class_params['args'],
                **{'database': self.class_params['database']}
            )

            p = BatchProcess(
                desc=backup_obj,
                cmd=self.class_params['cmd'],
                args=self.class_params['args']
            )

            # Check that _create_process has been called
            self.assertTrue(db_mock.session.add.called)

            # Check start method
            self._check_start(popen_mock, p, backup_obj)

            # Check list method
            self._check_list(p, backup_obj)
示例#3
0
    def runTest(self, current_user_mock, current_user, db_mock, popen_mock,
                get_server_name_mock, pref_module):
        with self.app.app_context():
            current_user.id = 1
            current_user_mock.id = 1
            current_app.PGADMIN_RUNTIME = False

            def db_session_add_mock(j):
                cmd_obj = loads(j.desc)
                self.assertTrue(isinstance(cmd_obj, IProcessDesc))
                self.assertEqual(cmd_obj.bfile, self.class_params['bfile'])
                self.assertEqual(
                    cmd_obj.cmd, ' --file "restore_file" '
                    '--host "{0}" '
                    '--port "{1}" '
                    '--username "{2}" '
                    '--no-password '
                    '--database "{3}"'.format(self.class_params['host'],
                                              self.class_params['port'],
                                              self.class_params['username'],
                                              self.class_params['database']))

            pref_module.return_value.preference.return_value.get. \
                return_value = 5

            get_server_name_mock.return_value = "{0} ({1}:{2})" \
                .format(
                    self.class_params['name'],
                    self.class_params['host'],
                    self.class_params['port'])

            db_mock.session.add.side_effect = db_session_add_mock
            db_mock.session.commit = MagicMock(return_value=True)

            restore_obj = RestoreMessage(self.class_params['sid'],
                                         self.class_params['bfile'],
                                         *self.class_params['args'])

            p = BatchProcess(desc=restore_obj,
                             cmd=self.class_params['cmd'],
                             args=self.class_params['args'])

            # Check that _create_process has been called
            self.assertTrue(db_mock.session.add.called)

            # Check start method
            self._check_start(popen_mock, p, restore_obj)

            # Check list method
            self._check_list(p, restore_obj)
示例#4
0
    def runTest(self, current_user_mock, server_mock, db_mock,
                current_app_mock, popen_mock):
        current_user_mock.id = 1
        current_app_mock.PGADMIN_RUNTIME = False

        class TestMockServer():
            def __init__(self, name, host, port):
                self.name = name
                self.host = host
                self.port = port

        def db_session_add_mock(j):
            if sys.version_info < (2, 7):
                cmd_obj = loads(str(j.desc))
            else:
                cmd_obj = loads(j.desc)
            self.assertTrue(isinstance(cmd_obj, IProcessDesc))
            self.assertEqual(cmd_obj.query, self.class_params['cmd'])
            self.assertEqual(cmd_obj.message, self.expected_msg)
            self.assertEqual(cmd_obj.data, self.class_params['data'])

        mock_obj = TestMockServer(self.class_params['username'],
                                  self.class_params['host'],
                                  self.class_params['port'])
        mock_result = server_mock.query.filter_by.return_value
        mock_result.first.return_value = mock_obj

        db_mock.session.add.side_effect = db_session_add_mock
        db_mock.session.commit = MagicMock(return_value=True)

        maintenance_obj = Message(
            self.class_params['sid'],
            self.class_params['data'],
            self.class_params['cmd']
        )

        p = BatchProcess(
            desc=maintenance_obj,
            cmd=self.class_params['cmd'],
            args=self.class_params['args']
        )

        # Check that _create_process has been called
        self.assertTrue(db_mock.session.add.called)

        # Check start method
        self._check_start(popen_mock, p, maintenance_obj)

        # Check list method
        self._check_list(p, maintenance_obj)
    def runTest(self, current_user_mock, current_user, db_mock,
                current_app_mock, popen_mock, get_server_details_mock):
        current_user.id = 1
        current_user_mock.id = 1
        current_app_mock.PGADMIN_RUNTIME = False

        def db_session_add_mock(j):
            if sys.version_info < (2, 7):
                cmd_obj = loads(str(j.desc))
            else:
                cmd_obj = loads(j.desc)
            self.assertTrue(isinstance(cmd_obj, IProcessDesc))
            self.assertEqual(cmd_obj.bfile, self.class_params['bfile'])
            self.assertEqual(
                cmd_obj.cmd, ' --file "restore_file" '
                '--host "{0}" '
                '--port "{1}" '
                '--username "{2}" '
                '--no-password '
                '--database "{3}"'.format(self.class_params['host'],
                                          self.class_params['port'],
                                          self.class_params['username'],
                                          self.class_params['database']))

        get_server_details_mock.return_value = \
            self.class_params['name'],\
            self.class_params['host'],\
            self.class_params['port']

        db_mock.session.add.side_effect = db_session_add_mock

        restore_obj = RestoreMessage(self.class_params['sid'],
                                     self.class_params['bfile'],
                                     *self.class_params['args'])

        p = BatchProcess(desc=restore_obj,
                         cmd=self.class_params['cmd'],
                         args=self.class_params['args'])

        # Check that _create_process has been called
        self.assertTrue(db_mock.session.add.called)

        # Check start method
        self._check_start(popen_mock, p)

        # Check list method
        self._check_list(p, restore_obj)
示例#6
0
def deploy_on_rds(data):
    """Deploy the Postgres instance on RDS."""

    _cmd = 'python'
    _cmd_script = '{0}/pgacloud/pgacloud.py'.format(root)
    _label = None

    from subprocess import Popen, PIPE
    _label = data['instance_details']['name']

    args = [
        _cmd_script,
        data['cloud'],
        '--region',
        str(data['secret']['region']),
        'create-instance',
        '--name',
        data['instance_details']['name'],
        '--db-name',
        data['db_details']['db_name'],
        '--db-username',
        data['db_details']['db_username'],
        '--db-port',
        str(data['db_details']['db_port']),
        '--db-version',
        str(data['instance_details']['db_version']),
        '--instance-type',
        data['instance_details']['instance_type'],
        '--storage-type',
        data['instance_details']['storage_type'],
        '--storage-size',
        str(data['instance_details']['storage_size']),
        '--public-ip',
        str(data['instance_details']['public_ip']),
    ]

    if data['instance_details']['storage_type'] == 'io1':
        args.append('--storage-iops')
        args.append(str(data['instance_details']['storage_IOPS']))

    _cmd_msg = '{0} {1} {2}'.format(_cmd, _cmd_script, ' '.join(args))
    try:
        sid = _create_server({
            'gid': data['db_details']['gid'],
            'name': data['instance_details']['name'],
            'db': data['db_details']['db_name'],
            'username': data['db_details']['db_username'],
            'port': data['db_details']['db_port'],
            'cloud_status': -1
        })

        p = BatchProcess(desc=CloudProcessDesc(
            sid, _cmd_msg, data['cloud'], data['instance_details']['name']),
                         cmd=_cmd,
                         args=args)

        env = dict()
        env['AWS_ACCESS_KEY_ID'] = data['secret']['access_key']
        env['AWS_SECRET_ACCESS_KEY'] = data['secret']['secret_access_key']

        if 'session_token' in data['secret'] and\
                data['secret']['session_token'] is not None:
            env['AWS_SESSION_TOKEN'] = data['secret']['session_token']

        if 'db_password' in data['db_details']:
            env['AWS_DATABASE_PASSWORD'] = data['db_details']['db_password']

        p.set_env_variables(None, env=env)
        p.update_server_id(p.id, sid)
        p.start()

        return True, p, {'label': _label, 'sid': sid}
    except Exception as e:
        current_app.logger.exception(e)
        return False, None, str(e)
示例#7
0
def create_backup_objects_job(sid):
    """
    Args:
        sid: Server ID

        Creates a new job for backup task
        (Backup Database(s)/Schema(s)/Table(s))

    Returns:
        None
    """

    data = json.loads(request.data, encoding='utf-8')
    backup_obj_type = data.get('type', 'objects')

    try:
        backup_file = filename_with_file_manager_path(
            data['file'], (data.get('format', '') != 'directory'))
    except Exception as e:
        return bad_request(errormsg=str(e))

    # Fetch the server details like hostname, port, roles etc
    server = get_server(sid)

    if server is None:
        return make_json_response(
            success=0, errormsg=_("Could not find the specified server."))

    # To fetch MetaData for the server
    from pgadmin.utils.driver import get_driver
    driver = get_driver(PG_DEFAULT_DRIVER)
    manager = driver.connection_manager(server.id)
    conn = manager.connection()
    connected = conn.connected()

    if not connected:
        return make_json_response(
            success=0, errormsg=_("Please connect to the server first."))

    utility = manager.utility('backup') if backup_obj_type == 'objects' \
        else manager.utility('backup_server')

    ret_val = does_utility_exist(utility)
    if ret_val:
        return make_json_response(success=0, errormsg=ret_val)

    args = _get_args_params_values(data, conn, backup_obj_type, backup_file,
                                   server, manager)

    escaped_args = [escape_dquotes_process_arg(arg) for arg in args]
    try:
        bfile = data['file'].encode('utf-8') \
            if hasattr(data['file'], 'encode') else data['file']
        if backup_obj_type == 'objects':
            args.append(data['database'])
            escaped_args.append(data['database'])
            p = BatchProcess(desc=BackupMessage(BACKUP.OBJECT,
                                                server.id,
                                                bfile,
                                                *args,
                                                database=data['database']),
                             cmd=utility,
                             args=escaped_args)
        else:
            p = BatchProcess(desc=BackupMessage(
                BACKUP.SERVER if backup_obj_type != 'globals' else
                BACKUP.GLOBALS, server.id, bfile, *args),
                             cmd=utility,
                             args=escaped_args)

        manager.export_password_env(p.id)
        # Check for connection timeout and if it is greater than 0 then
        # set the environment variable PGCONNECT_TIMEOUT.
        if manager.connect_timeout > 0:
            env = dict()
            env['PGCONNECT_TIMEOUT'] = str(manager.connect_timeout)
            p.set_env_variables(server, env=env)
        else:
            p.set_env_variables(server)

        p.start()
        jid = p.id
    except Exception as e:
        current_app.logger.exception(e)
        return make_json_response(status=410, success=0, errormsg=str(e))

    # Return response
    return make_json_response(data={
        'job_id': jid,
        'desc': p.desc.message,
        'Success': 1
    })
示例#8
0
def create_import_export_job(sid):
    """
    Args:
        sid: Server ID

        Creates a new job for import and export table data functionality

    Returns:
        None
    """
    if request.form:
        data = json.loads(request.form['data'], encoding='utf-8')
    else:
        data = json.loads(request.data, encoding='utf-8')

    # Fetch the server details like hostname, port, roles etc
    server = Server.query.filter_by(id=sid).first()

    if server is None:
        return bad_request(errormsg=_("Could not find the given server"))

    # To fetch MetaData for the server
    from pgadmin.utils.driver import get_driver
    driver = get_driver(PG_DEFAULT_DRIVER)
    manager = driver.connection_manager(server.id)
    conn = manager.connection()
    connected = conn.connected()

    if not connected:
        return bad_request(errormsg=_("Please connect to the server first..."))

    # Get the utility path from the connection manager
    utility = manager.utility('sql')
    ret_val = does_utility_exist(utility)
    if ret_val:
        return make_json_response(success=0, errormsg=ret_val)

    # Get the storage path from preference
    storage_dir = get_storage_directory()

    if 'filename' in data:
        try:
            _file = filename_with_file_manager_path(data['filename'],
                                                    data['is_import'])
        except Exception as e:
            return bad_request(errormsg=str(e))

        if not _file:
            return bad_request(errormsg=_('Please specify a valid file'))

        if IS_WIN:
            _file = _file.replace('\\', '/')

        data['filename'] = _file
    else:
        return bad_request(errormsg=_('Please specify a valid file'))

    # Get required and ignored column list
    icols = _get_ignored_column_list(data, driver, conn)
    cols = _get_required_column_list(data, driver, conn)

    # Create the COPY FROM/TO  from template
    query = render_template('import_export/sql/cmd.sql',
                            conn=conn,
                            data=data,
                            columns=cols,
                            ignore_column_list=icols)

    args = ['--command', query]

    try:
        p = BatchProcess(desc=IEMessage(sid, data['schema'], data['table'],
                                        data['database'], storage_dir, utility,
                                        *args),
                         cmd=utility,
                         args=args)
        manager.export_password_env(p.id)

        env = dict()
        env['PGHOST'] = server.host
        env['PGPORT'] = str(server.port)
        env['PGUSER'] = server.username
        env['PGDATABASE'] = data['database']
        p.set_env_variables(server, env=env)
        p.start()
        jid = p.id
    except Exception as e:
        current_app.logger.exception(e)
        return bad_request(errormsg=str(e))

    # Return response
    return make_json_response(data={'job_id': jid, 'success': 1})
示例#9
0
def create_restore_job(sid):
    """
    Args:
        sid: Server ID

        Creates a new job for restore task

    Returns:
        None
    """
    if request.form:
        # Convert ImmutableDict to dict
        data = dict(request.form)
        data = json.loads(data['data'][0], encoding='utf-8')
    else:
        data = json.loads(request.data, encoding='utf-8')

    try:
        _file = filename_with_file_manager_path(data['file'])
    except Exception as e:
        return bad_request(errormsg=str(e))

    if _file is None:
        return make_json_response(success=0,
                                  errormsg=_("File couldn't be found!"))

    # Fetch the server details like hostname, port, roles etc
    server = Server.query.filter_by(id=sid).first()

    if server is None:
        return make_json_response(
            success=0, errormsg=_("Could not find the specified server."))

    # To fetch MetaData for the server
    from pgadmin.utils.driver import get_driver

    driver = get_driver(PG_DEFAULT_DRIVER)
    manager = driver.connection_manager(server.id)
    conn = manager.connection()
    connected = conn.connected()

    if not connected:
        return make_json_response(
            success=0, errormsg=_("Please connect to the server first."))

    utility = manager.utility('restore')

    args = []

    if 'list' in data:
        args.append('--list')
    else:

        def set_param(key, param):
            if key in data and data[key]:
                args.append(param)
                return True
            return False

        def set_value(key, param, value):
            if key in data:
                if value:
                    if value is True and data[key]:
                        args.append(param)
                        args.append(data[key])
                    else:
                        args.append(param)
                        args.append(value)
                    return True
            return False

        def set_multiple(key, param, with_schema=True):
            if key in data:
                if len(data[key]) > 0:
                    if with_schema:
                        # TODO:// This is temporary
                        # Once object tree is implemented then we will use
                        # list of tuples 'else' part
                        if isinstance(data[key], list):
                            s, t = data[key]
                            args.extend([
                                param,
                                driver.qtIdent(conn, s) + '.' +
                                driver.qtIdent(conn, t)
                            ])
                        else:
                            for s, o in data[key]:
                                args.extend([
                                    param,
                                    driver.qtIdent(conn, s) + '.' +
                                    driver.qtIdent(conn, o)
                                ])
                    else:
                        for o in data[key]:
                            args.extend([param, o])
                    return True
            return False

        args.extend([
            '--host', server.host, '--port',
            str(server.port), '--username', server.username, '--no-password'
        ])

        set_value('role', '--role', True)
        set_value('database', '--dbname', True)

        if data['format'] == 'directory':
            args.extend(['--format=d'])

        set_value('pre_data', '--section=pre-data', False)
        set_value('data', '--section=data', False)
        set_value('post_data', '--section=post-data', False)

        if not set_param('only_data', '--data-only'):
            set_param('dns_owner', '--no-owner')
            set_param('dns_privilege ', '--no-privileges')
            set_param('dns_tablespace', '--no-tablespaces')

        if not set_param('only_schema', '--schema-only'):
            set_param('disable_trigger', '--disable-triggers')

        set_param('include_create_database', '--create')
        set_param('clean', '--clean')
        set_param('single_transaction', '--single-transaction')
        set_param('no_data_fail_table ', '--no-data-for-failed-tables')
        set_param('use_set_session_auth ', '--use-set-session-authorization')
        set_param('exit_on_error', '--exit-on-error')

        set_value('no_of_jobs', '--jobs', True)
        set_param('verbose', '--verbose')

        set_multiple('schemas', '--schema', False)
        set_multiple('tables', '--table', False)
        set_multiple('functions', '--function', False)
        set_multiple('triggers', '--trigger', False)
        set_multiple('trigger_funcs', '--function', False)
        set_multiple('indexes', '--index', False)

    args.append(fs_short_path(_file))

    try:
        p = BatchProcess(desc=RestoreMessage(
            sid, data['file'].encode('utf-8') if hasattr(
                data['file'], 'encode') else data['file'], *args),
                         cmd=utility,
                         args=args)
        manager.export_password_env(p.id)
        p.start()
        jid = p.id
    except Exception as e:
        current_app.logger.exception(e)
        return make_json_response(status=410, success=0, errormsg=str(e))
    # Return response
    return make_json_response(data={'job_id': jid, 'Success': 1})
示例#10
0
def create_restore_job(sid):
    """
    Args:
        sid: Server ID

        Creates a new job for restore task

    Returns:
        None
    """
    if request.form:
        # Convert ImmutableDict to dict
        data = dict(request.form)
        data = json.loads(data['data'][0], encoding='utf-8')
    else:
        data = json.loads(request.data, encoding='utf-8')

    try:
        _file = filename_with_file_manager_path(data['file'])
    except Exception as e:
        return bad_request(errormsg=str(e))

    if _file is None:
        return make_json_response(
            status=410,
            success=0,
            errormsg=_("File could not be found.")
        )

    # Fetch the server details like hostname, port, roles etc
    server = Server.query.filter_by(
        id=sid
    ).first()

    if server is None:
        return make_json_response(
            success=0,
            errormsg=_("Could not find the specified server.")
        )

    # To fetch MetaData for the server
    from pgadmin.utils.driver import get_driver

    driver = get_driver(PG_DEFAULT_DRIVER)
    manager = driver.connection_manager(server.id)
    conn = manager.connection()
    connected = conn.connected()

    if not connected:
        return make_json_response(
            success=0,
            errormsg=_("Please connect to the server first.")
        )

    utility = manager.utility('restore')

    args = []

    if 'list' in data:
        args.append('--list')
    else:
        def set_param(key, param):
            if key in data and data[key]:
                args.append(param)
                return True
            return False

        def set_value(key, param, default_value=None):
            if key in data and data[key] is not None and data[key] != '':
                args.append(param)
                args.append(data[key])
            elif default_value is not None:
                args.append(param)
                args.append(default_value)

        def set_multiple(key, param, with_schema=True):
            if key in data:
                if len(data[key]) > 0:
                    if with_schema:
                        # TODO:// This is temporary
                        # Once object tree is implemented then we will use
                        # list of tuples 'else' part
                        if isinstance(data[key], list):
                            s, t = data[key]
                            args.extend([
                                param,
                                driver.qtIdent(
                                    conn, s
                                ) + '.' + driver.qtIdent(conn, t)
                            ])
                        else:
                            for s, o in data[key]:
                                args.extend([
                                    param,
                                    driver.qtIdent(
                                        conn, s
                                    ) + '.' + driver.qtIdent(conn, o)
                                ])
                    else:
                        for o in data[key]:
                            args.extend([param, o])
                    return True
            return False

        args.extend([
            '--host',
            manager.local_bind_host if manager.use_ssh_tunnel else server.host,
            '--port',
            str(manager.local_bind_port) if manager.use_ssh_tunnel
            else str(server.port),
            '--username', server.username, '--no-password'
        ])

        set_value('role', '--role')
        set_value('database', '--dbname')

        if data['format'] == 'directory':
            args.extend(['--format=d'])

        set_param('pre_data', '--section=pre-data')
        set_param('data', '--section=data')
        set_param('post_data', '--section=post-data')

        if not set_param('only_data', '--data-only'):
            set_param('dns_owner', '--no-owner')
            set_param('dns_privilege', '--no-privileges')
            set_param('dns_tablespace', '--no-tablespaces')

        if not set_param('only_schema', '--schema-only'):
            set_param('disable_trigger', '--disable-triggers')

        set_param('include_create_database', '--create')
        set_param('clean', '--clean')
        set_param('single_transaction', '--single-transaction')
        set_param('no_data_fail_table', '--no-data-for-failed-tables')
        set_param('use_set_session_auth', '--use-set-session-authorization')
        set_param('exit_on_error', '--exit-on-error')

        if manager.version >= 110000:
            set_param('no_comments', '--no-comments')

        set_value('no_of_jobs', '--jobs')
        set_param('verbose', '--verbose')

        set_multiple('schemas', '--schema', False)
        set_multiple('tables', '--table', False)
        set_multiple('functions', '--function', False)
        set_multiple('triggers', '--trigger', False)
        set_multiple('trigger_funcs', '--function', False)
        set_multiple('indexes', '--index', False)

    args.append(fs_short_path(_file))

    try:
        p = BatchProcess(
            desc=RestoreMessage(
                sid,
                data['file'].encode('utf-8') if hasattr(
                    data['file'], 'encode'
                ) else data['file'],
                *args
            ),
            cmd=utility, args=args
        )
        manager.export_password_env(p.id)
        # Check for connection timeout and if it is greater than 0 then
        # set the environment variable PGCONNECT_TIMEOUT.
        if manager.connect_timeout > 0:
            env = dict()
            env['PGCONNECT_TIMEOUT'] = str(manager.connect_timeout)
            p.set_env_variables(server, env=env)
        else:
            p.set_env_variables(server)

        p.start()
        jid = p.id
    except Exception as e:
        current_app.logger.exception(e)
        return make_json_response(
            status=410,
            success=0,
            errormsg=str(e)
        )
    # Return response
    return make_json_response(
        data={'job_id': jid, 'Success': 1}
    )
示例#11
0
def create_import_export_job(sid):
    """
    Args:
        sid: Server ID

        Creates a new job for import and export table data functionality

    Returns:
        None
    """
    if request.form:
        # Convert ImmutableDict to dict
        data = dict(request.form)
        data = json.loads(data['data'][0])
    else:
        data = json.loads(request.data.decode())

    # Fetch the server details like hostname, port, roles etc
    server = Server.query.filter_by(id=sid).first()

    if server is None:
        return make_json_response(success=0,
                                  errormsg=_("Couldn't find the given server"))

    # To fetch MetaData for the server
    from pgadmin.utils.driver import get_driver
    driver = get_driver(PG_DEFAULT_DRIVER)
    manager = driver.connection_manager(server.id)
    conn = manager.connection()
    connected = conn.connected()

    if not connected:
        return make_json_response(
            success=0, errormsg=_("Please connect to the server first..."))

    # Get the utility path from the connection manager
    utility = manager.utility('sql')

    # Get the storage path from preference
    storage_dir = get_storage_directory()

    if 'filename' in data:
        if os.name == 'nt':
            data['filename'] = data['filename'].replace('/', '\\')
            if storage_dir:
                storage_dir = storage_dir.replace('/', '\\')
            data['filename'] = data['filename'].replace('\\', '\\\\')
            data['filename'] = os.path.join(storage_dir,
                                            data['filename'].lstrip('/'))
        else:
            data['filename'] = os.path.join(storage_dir,
                                            data['filename'].lstrip('/'))
    else:
        return make_json_response(data={
            'status': False,
            'info': 'Please specify a valid file'
        })

    cols = None
    icols = None

    if data['icolumns']:
        ignore_cols = data['icolumns']

        # format the ignore column list required as per copy command
        # requirement
        if ignore_cols and len(ignore_cols) > 0:
            for col in ignore_cols:
                if icols:
                    icols += ', '
                else:
                    icols = '('
                icols += driver.qtIdent(conn, col)
            icols += ')'

    # format the column import/export list required as per copy command
    # requirement
    if data['columns']:
        columns = data['columns']
        if columns and len(columns) > 0:
            for col in columns:
                if cols:
                    cols += ', '
                else:
                    cols = '('
                cols += driver.qtIdent(conn, col)
            cols += ')'

    # Create the COPY FROM/TO  from template
    query = render_template('import_export/sql/cmd.sql',
                            conn=conn,
                            data=data,
                            columns=cols,
                            ignore_column_list=icols)

    args = [
        '--host', server.host, '--port',
        str(server.port), '--username', server.username, '--dbname',
        driver.qtIdent(conn, data['database']), '--command', query
    ]

    try:
        p = BatchProcess(desc=Message(sid, data['schema'], data['table'],
                                      data['database'], storage_dir),
                         cmd=utility,
                         args=args)
        manager.export_password_env(p.id)
        p.start()
        jid = p.id
    except Exception as e:
        current_app.logger.exception(e)
        return make_json_response(status=410, success=0, errormsg=str(e))

    # Return response
    return make_json_response(data={'job_id': jid, 'success': 1})
示例#12
0
def create_maintenance_job(sid, did):
    """
    Args:
        sid: Server ID
        did: Database ID

        Creates a new job for maintenance vacuum operation

    Returns:
        None
    """
    if request.form:
        # Convert ImmutableDict to dict
        data = dict(request.form)
        data = json.loads(data['data'][0])
    else:
        data = json.loads(request.data.decode())

    index_name = None

    if 'primary_key' in data and data['primary_key']:
        index_name = data['primary_key']
    elif 'unique_constraint' in data and data['unique_constraint']:
        index_name = data['unique_constraint']
    elif 'index' in data and data['index']:
        index_name = data['index']

    # Fetch the server details like hostname, port, roles etc
    server = Server.query.filter_by(
        id=sid).first()

    if server is None:
        return make_json_response(
            success=0,
            errormsg=_("Couldn't find the given server")
        )

    # To fetch MetaData for the server
    driver = get_driver(PG_DEFAULT_DRIVER)
    manager = driver.connection_manager(server.id)
    conn = manager.connection()
    connected = conn.connected()

    if not connected:
        return make_json_response(
            success=0,
            errormsg=_("Please connect to the server first.")
        )

    utility = manager.utility('sql')

    # Create the command for the vacuum operation
    query = render_template(
        'maintenance/sql/command.sql', conn=conn, data=data,
        index_name=index_name
    )

    args = [
        '--host', server.host, '--port', str(server.port),
        '--username', server.username, '--dbname',
        driver.qtIdent(conn, data['database']),
        '--command', query
    ]

    try:
        p = BatchProcess(
            desc=Message(sid, data, query),
            cmd=utility, args=args
        )
        manager.export_password_env(p.id)
        p.start()
        jid = p.id
    except Exception as e:
        current_app.logger.exception(e)
        return make_json_response(
            status=410,
            success=0,
            errormsg=str(e)
        )

    # Return response
    return make_json_response(
        data={'job_id': jid, 'status': True, 'info': 'Maintenance job created.'}
    )
示例#13
0
def create_maintenance_job(sid, did):
    """
    Args:
        sid: Server ID
        did: Database ID

        Creates a new job for maintenance vacuum operation

    Returns:
        None
    """
    if request.form:
        # Convert ImmutableDict to dict
        data = dict(request.form)
        data = json.loads(data['data'][0], encoding='utf-8')
    else:
        data = json.loads(request.data, encoding='utf-8')

    index_name = None

    if 'primary_key' in data and data['primary_key']:
        index_name = data['primary_key']
    elif 'unique_constraint' in data and data['unique_constraint']:
        index_name = data['unique_constraint']
    elif 'index' in data and data['index']:
        index_name = data['index']

    # Fetch the server details like hostname, port, roles etc
    server = Server.query.filter_by(id=sid).first()

    if server is None:
        return make_json_response(
            success=0, errormsg=_("Could not find the given server"))

    # To fetch MetaData for the server
    driver = get_driver(PG_DEFAULT_DRIVER)
    manager = driver.connection_manager(server.id)
    conn = manager.connection()
    connected = conn.connected()

    if not connected:
        return make_json_response(
            success=0, errormsg=_("Please connect to the server first."))

    utility = manager.utility('sql')

    # Create the command for the vacuum operation
    query = render_template('maintenance/sql/command.sql',
                            conn=conn,
                            data=data,
                            index_name=index_name)

    args = [
        '--host',
        manager.local_bind_host if manager.use_ssh_tunnel else server.host,
        '--port',
        str(manager.local_bind_port)
        if manager.use_ssh_tunnel else str(server.port), '--username',
        server.username, '--dbname', data['database'], '--command', query
    ]

    try:
        p = BatchProcess(desc=Message(sid, data, query),
                         cmd=utility,
                         args=args)
        manager.export_password_env(p.id)
        # Check for connection timeout and if it is greater than 0 then
        # set the environment variable PGCONNECT_TIMEOUT.
        if manager.connect_timeout > 0:
            env = dict()
            env['PGCONNECT_TIMEOUT'] = str(manager.connect_timeout)
            p.set_env_variables(server, env=env)
        else:
            p.set_env_variables(server)

        p.start()
        jid = p.id
    except Exception as e:
        current_app.logger.exception(e)
        return make_json_response(status=410, success=0, errormsg=str(e))

    # Return response
    return make_json_response(data={
        'job_id': jid,
        'status': True,
        'info': 'Maintenance job created.'
    })
示例#14
0
def create_import_export_job(sid):
    """
    Args:
        sid: Server ID

        Creates a new job for import and export table data functionality

    Returns:
        None
    """
    if request.form:
        # Convert ImmutableDict to dict
        data = dict(request.form)
        data = json.loads(data['data'][0])
    else:
        data = json.loads(request.data.decode())

    # Fetch the server details like hostname, port, roles etc
    server = Server.query.filter_by(
        id=sid).first()

    if server is None:
        return make_json_response(
            success=0,
            errormsg=_("Couldn't find the given server")
        )

    # To fetch MetaData for the server
    from pgadmin.utils.driver import get_driver
    driver = get_driver(PG_DEFAULT_DRIVER)
    manager = driver.connection_manager(server.id)
    conn = manager.connection()
    connected = conn.connected()

    if not connected:
        return make_json_response(
            success=0,
            errormsg=_("Please connect to the server first...")
        )

    # Get the utility path from the connection manager
    utility = manager.utility('sql')

    # Get the storage path from preference
    storage_dir = get_storage_directory()

    if 'filename' in data:
        if os.name == 'nt':
            data['filename'] = data['filename'].replace('/', '\\')
            if storage_dir:
                storage_dir = storage_dir.replace('/', '\\')
            data['filename'] = data['filename'].replace('\\', '\\\\')
            data['filename'] = os.path.join(storage_dir, data['filename'].lstrip('/'))
        else:
            data['filename'] = os.path.join(storage_dir, data['filename'].lstrip('/'))
    else:
        return make_json_response(
            data={'status': False, 'info': 'Please specify a valid file'}
        )

    cols = None
    icols = None

    if data['icolumns']:
        ignore_cols = data['icolumns']

        # format the ignore column list required as per copy command
        # requirement
        if ignore_cols and len(ignore_cols) > 0:
            for col in ignore_cols:
                if icols:
                    icols += ', '
                else:
                    icols = '('
                icols += driver.qtIdent(conn, col)
            icols += ')'

    # format the column import/export list required as per copy command
    # requirement
    if data['columns']:
        columns = data['columns']
        if columns and len(columns) > 0:
            for col in columns:
                if cols:
                    cols += ', '
                else:
                    cols = '('
                cols += driver.qtIdent(conn, col)
            cols += ')'

    # Create the COPY FROM/TO  from template
    query = render_template(
        'import_export/sql/cmd.sql',
        conn=conn,
        data=data,
        columns=cols,
        ignore_column_list=icols
    )

    args = [
        '--host', server.host, '--port', str(server.port),
        '--username', server.username, '--dbname',
        driver.qtIdent(conn, data['database']),
        '--command', query
    ]

    try:
        p = BatchProcess(
            desc=Message(
                sid,
                data['schema'],
                data['table'],
                data['database'],
                storage_dir
            ),
            cmd=utility, args=args
        )
        manager.export_password_env(p.id)
        p.start()
        jid = p.id
    except Exception as e:
        current_app.logger.exception(e)
        return make_json_response(
            status=410,
            success=0,
            errormsg=str(e)
        )

    # Return response
    return make_json_response(
        data={'job_id': jid, 'success': 1}
    )
示例#15
0
def create_import_export_job(sid):
    """
    Args:
        sid: Server ID

        Creates a new job for import and export table data functionality

    Returns:
        None
    """
    if request.form:
        # Convert ImmutableDict to dict
        data = dict(request.form)
        data = json.loads(data['data'][0], encoding='utf-8')
    else:
        data = json.loads(request.data, encoding='utf-8')

    # Fetch the server details like hostname, port, roles etc
    server = Server.query.filter_by(
        id=sid).first()

    if server is None:
        return bad_request(errormsg=_("Could not find the given server"))

    # To fetch MetaData for the server
    from pgadmin.utils.driver import get_driver
    driver = get_driver(PG_DEFAULT_DRIVER)
    manager = driver.connection_manager(server.id)
    conn = manager.connection()
    connected = conn.connected()

    if not connected:
        return bad_request(errormsg=_("Please connect to the server first..."))

    # Get the utility path from the connection manager
    utility = manager.utility('sql')

    # Get the storage path from preference
    storage_dir = get_storage_directory()

    if 'filename' in data:
        try:
            _file = filename_with_file_manager_path(
                data['filename'], data['is_import'])
        except Exception as e:
            return bad_request(errormsg=str(e))

        if not _file:
            return bad_request(errormsg=_('Please specify a valid file'))

        if IS_WIN:
            _file = _file.replace('\\', '/')

        data['filename'] = _file
    else:
        return bad_request(errormsg=_('Please specify a valid file'))

    cols = None
    icols = None

    if data['icolumns']:
        ignore_cols = data['icolumns']

        # format the ignore column list required as per copy command
        # requirement
        if ignore_cols and len(ignore_cols) > 0:
            for col in ignore_cols:
                if icols:
                    icols += ', '
                else:
                    icols = '('
                icols += driver.qtIdent(conn, col)
            icols += ')'

    # format the column import/export list required as per copy command
    # requirement
    if data['columns']:
        columns = data['columns']
        if columns and len(columns) > 0:
            for col in columns:
                if cols:
                    cols += ', '
                else:
                    cols = '('
                cols += driver.qtIdent(conn, col)
            cols += ')'

    # Create the COPY FROM/TO  from template
    query = render_template(
        'import_export/sql/cmd.sql',
        conn=conn,
        data=data,
        columns=cols,
        ignore_column_list=icols
    )

    args = ['--command', query]

    try:
        p = BatchProcess(
            desc=IEMessage(
                sid,
                data['schema'],
                data['table'],
                data['database'],
                storage_dir,
                utility, *args
            ),
            cmd=utility, args=args
        )
        manager.export_password_env(p.id)

        env = dict()
        env['PGHOST'] = server.host
        env['PGPORT'] = str(server.port)
        env['PGUSER'] = server.username
        env['PGDATABASE'] = data['database']
        p.set_env_variables(server, env=env)
        p.start()
        jid = p.id
    except Exception as e:
        current_app.logger.exception(e)
        return bad_request(errormsg=str(e))

    # Return response
    return make_json_response(
        data={'job_id': jid, 'success': 1}
    )
示例#16
0
def deploy_on_cloud():
    """Deploy on Cloud"""

    data = json.loads(request.data, encoding='utf-8')
    from subprocess import Popen, PIPE
    _cmd = 'python'
    _cmd_script = '{0}/pgacloud/pgacloud.py'.format(root)

    args = [
        _cmd_script,
        '--debug',
        data['cloud'],
        '--region',
        str(data['secret']['aws_region']),
        'create-instance',
        '--name',
        data['instance_details']['aws_name'],
        '--db-name',
        data['db_details']['aws_db_name'],
        '--db-username',
        data['db_details']['aws_db_username'],
        '--db-port',
        str(data['db_details']['aws_db_port']),
        '--db-version',
        str(data['instance_details']['aws_db_version']),
        '--instance-type',
        data['instance_details']['aws_instance_type'],
        '--storage-type',
        data['instance_details']['aws_storage_type'],
        '--storage-size',
        str(data['instance_details']['aws_storage_size']),
        '--public-ip',
        str(data['instance_details']['aws_public_ip']),
    ]

    if data['instance_details']['aws_storage_type'] == 'io1':
        args.append('--storage-iops')
        args.append(str(data['instance_details']['aws_storage_IOPS']))

    _cmd_msg = '{0} {1} {2}'.format(_cmd, _cmd_script, ' '.join(args))
    try:
        sid = _create_server({
            'gid': data['db_details']['gid'],
            'name': data['instance_details']['aws_name'],
            'db': data['db_details']['aws_db_name'],
            'username': data['db_details']['aws_db_username'],
            'port': data['db_details']['aws_db_port'],
            'cloud_status': -1
        })

        p = BatchProcess(desc=CloudProcessDesc(
            sid, _cmd_msg, data['cloud'],
            data['instance_details']['aws_name']),
                         cmd=_cmd,
                         args=args)

        env = dict()
        env['AWS_ACCESS_KEY_ID'] = data['secret']['aws_access_key']
        env['AWS_SECRET_ACCESS_KEY'] = data['secret']['aws_secret_access_key']

        if 'aws_session_token' in data['secret'] and\
                data['secret']['aws_session_token'] is not None:
            env['AWS_SESSION_TOKEN'] = data['secret']['aws_session_token']

        if 'aws_db_password' in data['db_details']:
            env['AWS_DATABASE_PASSWORD'] = data['db_details'][
                'aws_db_password']

        p.set_env_variables(None, env=env)
        p.update_server_id(p.id, sid)
        p.start()

    except Exception as e:
        current_app.logger.exception(e)
        return make_json_response(status=410, success=0, errormsg=str(e))

    # Return response
    return make_json_response(success=1,
                              data={
                                  'job_id': 1,
                                  'node': {
                                      '_id': sid,
                                      '_pid': data['db_details']['gid'],
                                      'connected': False,
                                      '_type': 'server',
                                      'icon': 'icon-server-cloud-deploy',
                                      'id': 'server_{}'.format(sid),
                                      'inode': True,
                                      'label':
                                      data['instance_details']['aws_name'],
                                      'server_type': 'pg',
                                      'module': 'pgadmin.node.server',
                                      'cloud_status': -1
                                  }
                              })
示例#17
0
def create_backup_objects_job(sid):
    """
    Args:
        sid: Server ID

        Creates a new job for backup task
        (Backup Database(s)/Schema(s)/Table(s))

    Returns:
        None
    """
    if request.form:
        # Convert ImmutableDict to dict
        data = dict(request.form)
        data = json.loads(data['data'][0], encoding='utf-8')
    else:
        data = json.loads(request.data, encoding='utf-8')

    # Remove ratio from data in case of empty string
    if 'ratio' in data and data['ratio'] == '':
        data.pop("ratio")

    try:
        backup_file = filename_with_file_manager_path(data['file'])
    except Exception as e:
        return bad_request(errormsg=str(e))

    # Fetch the server details like hostname, port, roles etc
    server = Server.query.filter_by(
        id=sid, user_id=current_user.id
    ).first()

    if server is None:
        return make_json_response(
            success=0,
            errormsg=_("Could not find the specified server.")
        )

    # To fetch MetaData for the server
    from pgadmin.utils.driver import get_driver
    driver = get_driver(PG_DEFAULT_DRIVER)
    manager = driver.connection_manager(server.id)
    conn = manager.connection()
    connected = conn.connected()

    if not connected:
        return make_json_response(
            success=0,
            errormsg=_("Please connect to the server first.")
        )

    utility = manager.utility('backup')
    args = [
        '--file',
        backup_file,
        '--host',
        server.host,
        '--port',
        str(server.port),
        '--username',
        server.username,
        '--no-password'
    ]

    def set_param(key, param):
        if key in data and data[key]:
            args.append(param)

    def set_value(key, param, value):
        if key in data:
            if value:
                if value is True and data[key]:
                    args.append(param)
                    args.append(data[key])
                else:
                    args.append(param)
                    args.append(value)

    set_param('verbose', '--verbose')
    set_param('dqoute', '--quote-all-identifiers')
    set_value('role', '--role', True)
    if data['format'] is not None:
        if data['format'] == 'custom':
            args.extend(['--format=c'])

            set_param('blobs', '--blobs')
            set_value('ratio', '--compress', True)

        elif data['format'] == 'tar':
            args.extend(['--format=t'])

            set_param('blobs', '--blobs')

        elif data['format'] == 'plain':
            args.extend(['--format=p'])
            if 'only_data' in data and data['only_data']:
                args.append('--data-only')
                set_param('disable_trigger', '--disable-triggers')
            else:
                set_param('only_schema', '--schema-only')
                set_param('dns_owner', '--no-owner')
                set_param('include_create_database', '--create')
                set_param('include_drop_database', '--clean')
        elif data['format'] == 'directory':
            args.extend(['--format=d'])

    set_param('pre_data', '--section=pre-data')
    set_param('data', '--section=data')
    set_param('post_data', '--section=post-data')
    set_param('dns_privilege', '--no-privileges')
    set_param('dns_tablespace', '--no-tablespaces')
    set_param('dns_unlogged_tbl_data', '--no-unlogged-table-data')
    set_param('use_insert_commands', '--inserts')
    set_param('use_column_inserts', '--column-inserts')
    set_param('disable_quoting', '--disable-dollar-quoting')
    set_param('with_oids', '--oids')
    set_param('use_set_session_auth', '--use-set-session-authorization')

    set_value('encoding', '--encoding', True)
    set_value('no_of_jobs', '--jobs', True)

    for s in data['schemas']:
        args.extend(['--schema', s])

    for s, t in data['tables']:
        args.extend([
            '--table', driver.qtIdent(conn, s, t)
        ])

    args.append(data['database'])

    try:
        p = BatchProcess(
            desc=BackupMessage(
                BACKUP.OBJECT, sid,
                data['file'].encode('utf-8') if hasattr(
                    data['file'], 'encode'
                ) else data['file'],
                *args,
                database=data['database']
            ),
            cmd=utility, args=args
        )
        manager.export_password_env(p.id)
        p.set_env_variables(server)
        p.start()
        jid = p.id
    except Exception as e:
        current_app.logger.exception(e)
        return make_json_response(
            status=410,
            success=0,
            errormsg=str(e)
        )

    # Return response
    return make_json_response(
        data={'job_id': jid, 'Success': 1}
    )
示例#18
0
def create_backup_job(sid):
    """
    Args:
        sid: Server ID

        Creates a new job for backup task (Backup Server/Globals)

    Returns:
        None
    """
    if request.form:
        # Convert ImmutableDict to dict
        data = dict(request.form)
        data = json.loads(data['data'][0], encoding='utf-8')
    else:
        data = json.loads(request.data, encoding='utf-8')

    try:
        backup_file = filename_with_file_manager_path(data['file'])
    except Exception as e:
        return bad_request(errormsg=str(e))

    # Fetch the server details like hostname, port, roles etc
    server = Server.query.filter_by(
        id=sid, user_id=current_user.id
    ).first()

    if server is None:
        return make_json_response(
            success=0,
            errormsg=_("Could not find the specified server.")
        )

    # To fetch MetaData for the server
    from pgadmin.utils.driver import get_driver
    driver = get_driver(PG_DEFAULT_DRIVER)
    manager = driver.connection_manager(server.id)
    conn = manager.connection()
    connected = conn.connected()

    if not connected:
        return make_json_response(
            success=0,
            errormsg=_("Please connect to the server first.")
        )

    utility = manager.utility('backup_server')

    args = [
        '--file',
        backup_file,
        '--host',
        server.host,
        '--port',
        str(server.port),
        '--username',
        server.username,
        '--no-password',
        '--database',
        server.maintenance_db
    ]

    if 'role' in data and data['role']:
        args.append('--role')
        args.append(data['role'])
    if 'verbose' in data and data['verbose']:
        args.append('--verbose')
    if 'dqoute' in data and data['dqoute']:
        args.append('--quote-all-identifiers')
    if data['type'] == 'global':
        args.append('--globals-only')

    try:
        p = BatchProcess(
            desc=BackupMessage(
                BACKUP.SERVER if data['type'] != 'global' else BACKUP.GLOBALS,
                sid,
                data['file'].encode('utf-8') if hasattr(
                    data['file'], 'encode'
                ) else data['file'],
                *args
            ),
            cmd=utility, args=args
        )
        manager.export_password_env(p.id)
        p.set_env_variables(server)
        p.start()
        jid = p.id
    except Exception as e:
        current_app.logger.exception(e)
        return make_json_response(
            status=410,
            success=0,
            errormsg=str(e)
        )
    # Return response
    return make_json_response(
        data={'job_id': jid, 'success': 1}
    )
示例#19
0
def create_restore_job(sid):
    """
    Args:
        sid: Server ID

        Creates a new job for restore task

    Returns:
        None
    """
    if request.form:
        # Convert ImmutableDict to dict
        data = dict(request.form)
        data = json.loads(data['data'][0])
    else:
        data = json.loads(request.data.decode())

    backup_file = filename_with_file_manager_path(data['file'])

    # Fetch the server details like hostname, port, roles etc
    server = Server.query.filter_by(
        id=sid
    ).first()

    if server is None:
        return make_json_response(
            success=0,
            errormsg=_("Could not find the specified server.")
        )

    # To fetch MetaData for the server
    from pgadmin.utils.driver import get_driver

    driver = get_driver(PG_DEFAULT_DRIVER)
    manager = driver.connection_manager(server.id)
    conn = manager.connection()
    connected = conn.connected()

    if not connected:
        return make_json_response(
            success=0,
            errormsg=_("Please connect to the server first.")
        )

    utility = manager.utility('restore')

    args = []

    if 'list' in data:
        args.append('--list')
    else:
        def set_param(key, param):
            if key in data and data[key]:
                args.append(param)
                return True
            return False

        def set_value(key, param, value):
            if key in data:
                if value:
                    if value is True and data[key]:
                        args.append(param)
                        args.append(data[key])
                    else:
                        args.append(param)
                        args.append(value)
                    return True
            return False

        def set_multiple(key, param, with_schema=True):
            if key in data:
                if len(data[key]) > 0:
                    if with_schema:
                        # TODO:// This is temporary
                        # Once object tree is implemented then we will use
                        # list of tuples 'else' part
                        if isinstance(data[key], list):
                            s, t = data[key]
                            args.extend([
                                param,
                                driver.qtIdent(
                                    conn, s
                                ) + '.' + driver.qtIdent(conn, t)
                            ])
                        else:
                            for s, o in data[key]:
                                args.extend([
                                    param,
                                    driver.qtIdent(
                                        conn, s
                                    ) + '.' + driver.qtIdent(conn, o)
                                ])
                    else:
                        for o in data[key]:
                            args.extend([param, o])
                    return True
            return False

        args.extend([
            '--host', server.host, '--port', server.port,
            '--username', server.username, '--no-password'
        ])

        set_value('role', '--role', True)
        set_value('database', '--dbname', True)

        if data['format'] == 'directory':
            args.extend(['--format=d'])

        set_value('pre_data', '--section=pre-data', False)
        set_value('data', '--section=data', False)
        set_value('post_data', '--section=post-data', False)

        if not set_param('only_data', '--data-only'):
            set_param('dns_owner', '--no-owner')
            set_param('dns_privilege ', '--no-privileges')
            set_param('dns_tablespace', '--no-tablespaces')

        if not set_param('only_schema', '--schema-only'):
            set_param('disable_trigger', '--disable-triggers')

        set_param('include_create_database', '--create')
        set_param('clean', '--clean')
        set_param('single_transaction', '--single-transaction')
        set_param('no_data_fail_table ', '--no-data-for-failed-tables')
        set_param('use_set_session_auth ', '--use-set-session-authorization')
        set_param('exit_on_error', '--exit-on-error')

        set_value('no_of_jobs', '--jobs', True)
        set_param('verbose', '--verbose')

        set_multiple('schemas', '--schema', False)
        set_multiple('tables', '--table')
        set_multiple('functions', '--function')
        set_multiple('triggers', '--trigger')
        set_multiple('trigger_funcs', '--function')
        set_multiple('indexes', '--index')

    args.append(backup_file)

    try:
        p = BatchProcess(
            desc=RestoreMessage(sid, data['file']),
            cmd=utility, args=args
        )
        manager.export_password_env(p.id)
        p.start()
        jid = p.id
    except Exception as e:
        current_app.logger.exception(e)
        return make_json_response(
            status=410,
            success=0,
            errormsg=str(e)
        )
    # Return response
    return make_json_response(
        data={'job_id': jid, 'Success': 1}
    )
示例#20
0
def create_backup_job(sid):
    """
    Args:
        sid: Server ID

        Creates a new job for backup task (Backup Server/Globals)

    Returns:
        None
    """
    if request.form:
        # Convert ImmutableDict to dict
        data = dict(request.form)
        data = json.loads(data['data'][0], encoding='utf-8')
    else:
        data = json.loads(request.data, encoding='utf-8')

    try:
        backup_file = filename_with_file_manager_path(data['file'])
    except Exception as e:
        return bad_request(errormsg=str(e))

    # Fetch the server details like hostname, port, roles etc
    server = Server.query.filter_by(
        id=sid, user_id=current_user.id
    ).first()

    if server is None:
        return make_json_response(
            success=0,
            errormsg=_("Could not find the specified server.")
        )

    # To fetch MetaData for the server
    from pgadmin.utils.driver import get_driver
    driver = get_driver(PG_DEFAULT_DRIVER)
    manager = driver.connection_manager(server.id)
    conn = manager.connection()
    connected = conn.connected()

    if not connected:
        return make_json_response(
            success=0,
            errormsg=_("Please connect to the server first.")
        )

    utility = manager.utility('backup_server')

    args = [
        '--file',
        backup_file,
        '--host',
        manager.local_bind_host if manager.use_ssh_tunnel else server.host,
        '--port',
        str(manager.local_bind_port) if manager.use_ssh_tunnel
        else str(server.port),
        '--username',
        server.username,
        '--no-password',
        '--database',
        server.maintenance_db
    ]

    if 'role' in data and data['role']:
        args.append('--role')
        args.append(data['role'])
    if 'verbose' in data and data['verbose']:
        args.append('--verbose')
    if 'dqoute' in data and data['dqoute']:
        args.append('--quote-all-identifiers')
    if data['type'] == 'global':
        args.append('--globals-only')

    try:
        p = BatchProcess(
            desc=BackupMessage(
                BACKUP.SERVER if data['type'] != 'global' else BACKUP.GLOBALS,
                sid,
                data['file'].encode('utf-8') if hasattr(
                    data['file'], 'encode'
                ) else data['file'],
                *args
            ),
            cmd=utility, args=args
        )
        manager.export_password_env(p.id)
        # Check for connection timeout and if it is greater than 0 then
        # set the environment variable PGCONNECT_TIMEOUT.
        if manager.connect_timeout > 0:
            env = dict()
            env['PGCONNECT_TIMEOUT'] = str(manager.connect_timeout)
            p.set_env_variables(server, env=env)
        else:
            p.set_env_variables(server)

        p.start()
        jid = p.id
    except Exception as e:
        current_app.logger.exception(e)
        return make_json_response(
            status=410,
            success=0,
            errormsg=str(e)
        )
    # Return response
    return make_json_response(
        data={'job_id': jid, 'success': 1}
    )
示例#21
0
    def runTest(self, current_user_mock, current_user, db_mock,
                popen_mock, get_server_details_mock):
        with self.app.app_context():
            current_user.id = 1
            current_user_mock.id = 1
            current_app.PGADMIN_RUNTIME = False

            def db_session_add_mock(j):
                cmd_obj = loads(j.desc)
                self.assertTrue(isinstance(cmd_obj, IProcessDesc))

                self.assertEqual(cmd_obj.bfile, self.params['filename'])
                self.assertEqual(cmd_obj.database,
                                 self.class_params['database'])

                command = ' "' + self.class_params['args'][0] + '"' + \
                          ' "' + '\\' + self.class_params['args'][1].format(
                              self.params['schema'],
                              self.params['table'],
                              self.params['columns'][0],
                              self.params['columns'][1],
                              self.params['filename'],
                              '\\' + self.params['quote']
                ) + '"'
                self.assertEqual(cmd_obj._cmd, command)

            db_mock.session.add.side_effect = db_session_add_mock
            db_mock.session.commit = MagicMock(return_value=True)

            get_server_details_mock.return_value = \
                self.class_params['name'], \
                self.class_params['host'], \
                self.class_params['port']

            args = self.class_params['args'][1].format(
                self.params['schema'],
                self.params['table'],
                self.params['columns'][0],
                self.params['columns'][1],
                self.params['filename'],
                self.params['quote']
            )

            import_export_obj = IEMessage(
                *[self.class_params['args'][0], args],
                **{
                    'sid': self.class_params['sid'],
                    'schema': self.params['schema'],
                    'table': self.params['table'],
                    'is_import': self.params['is_import'],
                    'database': self.params['database'],
                    'filename': self.params['filename'],
                    'storage': self.params['storage'],
                }
            )

            p = BatchProcess(
                desc=import_export_obj,
                cmd=self.class_params['cmd'],
                args=args
            )

            # Check that _create_process has been called
            self.assertTrue(db_mock.session.add.called)

            # Check start method
            self._check_start(popen_mock, p, import_export_obj)

            # Check list method
            self._check_list(p, import_export_obj)
示例#22
0
def create_backup_objects_job(sid):
    """
    Args:
        sid: Server ID

        Creates a new job for backup task
        (Backup Database(s)/Schema(s)/Table(s))

    Returns:
        None
    """
    if request.form:
        # Convert ImmutableDict to dict
        data = dict(request.form)
        data = json.loads(data['data'][0], encoding='utf-8')
    else:
        data = json.loads(request.data, encoding='utf-8')

    # Remove ratio from data in case of empty string
    if 'ratio' in data and data['ratio'] == '':
        data.pop("ratio")

    try:
        if data['format'] == 'directory':
            backup_file = filename_with_file_manager_path(data['file'], False)
        else:
            backup_file = filename_with_file_manager_path(data['file'])
    except Exception as e:
        return bad_request(errormsg=str(e))

    # Fetch the server details like hostname, port, roles etc
    server = Server.query.filter_by(
        id=sid, user_id=current_user.id
    ).first()

    if server is None:
        return make_json_response(
            success=0,
            errormsg=_("Could not find the specified server.")
        )

    # To fetch MetaData for the server
    from pgadmin.utils.driver import get_driver
    driver = get_driver(PG_DEFAULT_DRIVER)
    manager = driver.connection_manager(server.id)
    conn = manager.connection()
    connected = conn.connected()

    if not connected:
        return make_json_response(
            success=0,
            errormsg=_("Please connect to the server first.")
        )

    utility = manager.utility('backup')
    args = [
        '--file',
        backup_file,
        '--host',
        manager.local_bind_host if manager.use_ssh_tunnel else server.host,
        '--port',
        str(manager.local_bind_port) if manager.use_ssh_tunnel
        else str(server.port),
        '--username',
        server.username,
        '--no-password'
    ]

    def set_param(key, param):
        if key in data and data[key]:
            args.append(param)

    def set_value(key, param, value):
        if key in data:
            if value:
                if value is True and data[key]:
                    args.append(param)
                    args.append(data[key])
                else:
                    args.append(param)
                    args.append(value)

    set_param('verbose', '--verbose')
    set_param('dqoute', '--quote-all-identifiers')
    set_value('role', '--role', True)
    if data['format'] is not None:
        if data['format'] == 'custom':
            args.extend(['--format=c'])

            set_param('blobs', '--blobs')
            set_value('ratio', '--compress', True)

        elif data['format'] == 'tar':
            args.extend(['--format=t'])

            set_param('blobs', '--blobs')

        elif data['format'] == 'plain':
            args.extend(['--format=p'])
            if 'only_data' in data and data['only_data']:
                args.append('--data-only')
                set_param('disable_trigger', '--disable-triggers')
            else:
                set_param('only_schema', '--schema-only')
                set_param('dns_owner', '--no-owner')
                set_param('include_create_database', '--create')
                set_param('include_drop_database', '--clean')
        elif data['format'] == 'directory':
            args.extend(['--format=d'])

    set_param('pre_data', '--section=pre-data')
    set_param('data', '--section=data')
    set_param('post_data', '--section=post-data')
    set_param('dns_privilege', '--no-privileges')
    set_param('dns_tablespace', '--no-tablespaces')
    set_param('dns_unlogged_tbl_data', '--no-unlogged-table-data')
    set_param('use_insert_commands', '--inserts')
    set_param('use_column_inserts', '--column-inserts')
    set_param('disable_quoting', '--disable-dollar-quoting')
    set_param('with_oids', '--oids')
    set_param('use_set_session_auth', '--use-set-session-authorization')

    set_value('encoding', '--encoding', True)
    set_value('no_of_jobs', '--jobs', True)

    for s in data['schemas']:
        args.extend(['--schema', s])

    for s, t in data['tables']:
        args.extend([
            '--table', driver.qtIdent(conn, s, t)
        ])

    args.append(data['database'])

    try:
        p = BatchProcess(
            desc=BackupMessage(
                BACKUP.OBJECT, sid,
                data['file'].encode('utf-8') if hasattr(
                    data['file'], 'encode'
                ) else data['file'],
                *args,
                database=data['database']
            ),
            cmd=utility, args=args
        )
        manager.export_password_env(p.id)
        # Check for connection timeout and if it is greater than 0 then
        # set the environment variable PGCONNECT_TIMEOUT.
        if manager.connect_timeout > 0:
            env = dict()
            env['PGCONNECT_TIMEOUT'] = str(manager.connect_timeout)
            p.set_env_variables(server, env=env)
        else:
            p.set_env_variables(server)

        p.start()
        jid = p.id
    except Exception as e:
        current_app.logger.exception(e)
        return make_json_response(
            status=410,
            success=0,
            errormsg=str(e)
        )

    # Return response
    return make_json_response(
        data={'job_id': jid, 'Success': 1}
    )
示例#23
0
def create_backup_job(sid):
    """
    Args:
        sid: Server ID

        Creates a new job for backup task (Backup Server/Globals)

    Returns:
        None
    """
    if request.form:
        # Convert ImmutableDict to dict
        data = dict(request.form)
        data = json.loads(data['data'][0])
    else:
        data = json.loads(request.data.decode())

    backup_file = filename_with_file_manager_path(data['file'])

    # Fetch the server details like hostname, port, roles etc
    server = Server.query.filter_by(
        id=sid, user_id=current_user.id
    ).first()

    if server is None:
        return make_json_response(
            success=0,
            errormsg=_("Could not find the specified server.")
        )

    # To fetch MetaData for the server
    from pgadmin.utils.driver import get_driver
    driver = get_driver(PG_DEFAULT_DRIVER)
    manager = driver.connection_manager(server.id)
    conn = manager.connection()
    connected = conn.connected()

    if not connected:
        return make_json_response(
            success=0,
            errormsg=_("Please connect to the server first.")
        )

    utility = manager.utility('backup_server')

    args = [
        '--file',
        backup_file,
        '--host',
        server.host,
        '--port',
        str(server.port),
        '--username',
        server.username,
        '--no-password',
        '--database',
        driver.qtIdent(conn, server.maintenance_db)
    ]
    if 'role' in data and data['role']:
        args.append('--role')
        args.append(data['role'])
    if 'verbose' in data and data['verbose']:
        args.append('--verbose')
    if 'dqoute' in data and data['dqoute']:
        args.append('--quote-all-identifiers')
    if data['type'] == 'global':
        args.append('--globals-only')

    try:
        p = BatchProcess(
            desc=BackupMessage(
                BACKUP.SERVER if data['type'] != 'global' else BACKUP.GLOBALS,
                sid, data['file']
            ),
            cmd=utility, args=args
        )
        manager.export_password_env(p.id)
        p.start()
        jid = p.id
    except Exception as e:
        current_app.logger.exception(e)
        return make_json_response(
            status=410,
            success=0,
            errormsg=str(e)
        )
    # Return response
    return make_json_response(
        data={'job_id': jid, 'success': 1}
    )
示例#24
0
def create_backup_objects_job(sid):
    """
    Args:
        sid: Server ID

        Creates a new job for backup task
        (Backup Database(s)/Schema(s)/Table(s))

    Returns:
        None
    """
    if request.form:
        # Convert ImmutableDict to dict
        data = dict(request.form)
        data = json.loads(data['data'][0], encoding='utf-8')
    else:
        data = json.loads(request.data, encoding='utf-8')

    backup_obj_type = 'objects'
    if 'type' in data:
        backup_obj_type = data['type']

    try:
        if 'format' in data and data['format'] == 'directory':
            backup_file = filename_with_file_manager_path(data['file'], False)
        else:
            backup_file = filename_with_file_manager_path(data['file'])
    except Exception as e:
        return bad_request(errormsg=str(e))

    # Fetch the server details like hostname, port, roles etc
    server = Server.query.filter_by(
        id=sid, user_id=current_user.id
    ).first()

    if server is None:
        return make_json_response(
            success=0,
            errormsg=_("Could not find the specified server.")
        )

    # To fetch MetaData for the server
    from pgadmin.utils.driver import get_driver
    driver = get_driver(PG_DEFAULT_DRIVER)
    manager = driver.connection_manager(server.id)
    conn = manager.connection()
    connected = conn.connected()

    if not connected:
        return make_json_response(
            success=0,
            errormsg=_("Please connect to the server first.")
        )

    utility = manager.utility('backup') if backup_obj_type == 'objects' \
        else manager.utility('backup_server')

    args = [
        '--file',
        backup_file,
        '--host',
        manager.local_bind_host if manager.use_ssh_tunnel else server.host,
        '--port',
        str(manager.local_bind_port) if manager.use_ssh_tunnel
        else str(server.port),
        '--username',
        server.username,
        '--no-password'
    ]

    if backup_obj_type != 'objects':
        args.append('--database')
        args.append(server.maintenance_db)

    if backup_obj_type == 'globals':
        args.append('--globals-only')

    def set_param(key, param):
        if key in data and data[key]:
            args.append(param)

    def set_value(key, param, default_value=None):
        if key in data and data[key] is not None and data[key] != '':
            args.append(param)
            args.append(data[key])
        elif default_value is not None:
            args.append(param)
            args.append(default_value)

    set_param('verbose', '--verbose')
    set_param('dqoute', '--quote-all-identifiers')
    set_value('role', '--role')

    if backup_obj_type == 'objects' and \
            'format' in data and data['format'] is not None:
        if data['format'] == 'custom':
            args.extend(['--format=c'])
            set_param('blobs', '--blobs')
            set_value('ratio', '--compress')
        elif data['format'] == 'tar':
            args.extend(['--format=t'])
            set_param('blobs', '--blobs')
        elif data['format'] == 'plain':
            args.extend(['--format=p'])
        elif data['format'] == 'directory':
            args.extend(['--format=d'])

    if 'only_data' in data and data['only_data']:
        set_param('only_data', '--data-only')
        if 'format' in data and data['format'] == 'plain':
            set_param('disable_trigger', '--disable-triggers')
    elif 'only_schema' in data and data['only_schema']:
        set_param('only_schema', '--schema-only')

    set_param('dns_owner', '--no-owner')
    set_param('include_create_database', '--create')
    set_param('include_drop_database', '--clean')
    set_param('pre_data', '--section=pre-data')
    set_param('data', '--section=data')
    set_param('post_data', '--section=post-data')
    set_param('dns_privilege', '--no-privileges')
    set_param('dns_tablespace', '--no-tablespaces')
    set_param('dns_unlogged_tbl_data', '--no-unlogged-table-data')
    set_param('use_insert_commands', '--inserts')
    set_param('use_column_inserts', '--column-inserts')
    set_param('disable_quoting', '--disable-dollar-quoting')
    set_param('with_oids', '--oids')
    set_param('use_set_session_auth', '--use-set-session-authorization')

    if manager.version >= 110000:
        set_param('no_comments', '--no-comments')
        set_param('load_via_partition_root', '--load-via-partition-root')

    set_value('encoding', '--encoding')
    set_value('no_of_jobs', '--jobs')

    if 'schemas' in data:
        for s in data['schemas']:
            args.extend(['--schema', s])

    if 'tables' in data:
        for s, t in data['tables']:
            args.extend([
                '--table', driver.qtIdent(conn, s, t)
            ])

    try:
        if backup_obj_type == 'objects':
            args.append(data['database'])
            p = BatchProcess(
                desc=BackupMessage(
                    BACKUP.OBJECT, sid,
                    data['file'].encode('utf-8') if hasattr(
                        data['file'], 'encode'
                    ) else data['file'],
                    *args,
                    database=data['database']
                ),
                cmd=utility, args=args
            )
        else:
            p = BatchProcess(
                desc=BackupMessage(
                    BACKUP.SERVER if backup_obj_type != 'globals'
                    else BACKUP.GLOBALS,
                    sid,
                    data['file'].encode('utf-8') if hasattr(
                        data['file'], 'encode'
                    ) else data['file'],
                    *args
                ),
                cmd=utility, args=args
            )

        manager.export_password_env(p.id)
        # Check for connection timeout and if it is greater than 0 then
        # set the environment variable PGCONNECT_TIMEOUT.
        if manager.connect_timeout > 0:
            env = dict()
            env['PGCONNECT_TIMEOUT'] = str(manager.connect_timeout)
            p.set_env_variables(server, env=env)
        else:
            p.set_env_variables(server)

        p.start()
        jid = p.id
    except Exception as e:
        current_app.logger.exception(e)
        return make_json_response(
            status=410,
            success=0,
            errormsg=str(e)
        )

    # Return response
    return make_json_response(
        data={'job_id': jid, 'Success': 1}
    )
示例#25
0
def deploy_on_azure(data):
    """Deploy the Postgres instance on Azure."""
    _cmd = 'python'
    _cmd_script = '{0}/pgacloud/pgacloud.py'.format(root)
    _label = data['instance_details']['name']

    if 'high_availability' in data['instance_details']:
        if data['instance_details']['high_availability']:
            data['instance_details']['high_availability'] = "ZoneRedundant"
        else:
            data['instance_details']['high_availability'] = "Disabled"

    args = [
        _cmd_script, 'azure', '--region',
        str(data['instance_details']['region']), '--resource-group',
        data['instance_details']['resource_group'], 'create-instance',
        '--name', data['instance_details']['name'], '--db-username',
        data['db_details']['db_username'], '--db-major-version',
        str(data['instance_details']['db_version']), '--instance_tier_type',
        data['instance_details']['db_instance_class'], '--instance-type',
        data['instance_details']['instance_type'], '--storage-size',
        str(data['instance_details']['storage_size']), '--public-ips',
        str(data['instance_details']['public_ips']), '--availability-zone',
        str(data['instance_details']['availability_zone']),
        '--high-availability', data['instance_details']['high_availability']
    ]

    _cmd_msg = '{0} {1} {2}'.format(_cmd, _cmd_script, ' '.join(args))
    try:
        sid = _create_server({
            'gid': data['db_details']['gid'],
            'name': data['instance_details']['name'],
            'db': 'postgres',
            'username': data['db_details']['db_username'],
            'port': 5432,
            'cloud_status': -1
        })

        p = BatchProcess(desc=CloudProcessDesc(
            sid, _cmd_msg, data['cloud'], data['instance_details']['name']),
                         cmd=_cmd,
                         args=args)

        env = dict()

        azure = session['azure']['azure_obj']
        env['AZURE_SUBSCRIPTION_ID'] = azure.subscription_id
        env['AUTH_TYPE'] = data['secret']['auth_type']
        env['AZURE_CRED_CACHE_NAME'] = azure.azure_cache_name
        env['AZURE_CRED_CACHE_LOCATION'] = azure.azure_cache_location
        if azure.authentication_record_json is not None:
            env['AUTHENTICATION_RECORD_JSON'] = \
                azure.authentication_record_json
            env['AZURE_TENANT_ID'] = data['secret']['azure_tenant_id']

        if 'db_password' in data['db_details']:
            env['AZURE_DATABASE_PASSWORD'] = data['db_details']['db_password']

        p.set_env_variables(None, env=env)
        p.update_server_id(p.id, sid)
        p.start()

        # add pid: cache file dict in session['azure_cache_files_list']
        if 'azure_cache_files_list' in session and \
                session['azure_cache_files_list'] is not None:
            session['azure_cache_files_list'][p.id] = azure.azure_cache_name
        else:
            session['azure_cache_files_list'] = {p.id: azure.azure_cache_name}
        del session['azure']['azure_cache_file_name']
        return True, p, {'label': _label, 'sid': sid}
    except Exception as e:
        current_app.logger.exception(e)
        return False, None, str(e)
    finally:
        del session['azure']['azure_obj']
示例#26
0
def create_import_export_job(sid):
    """
    Args:
        sid: Server ID

        Creates a new job for import and export table data functionality

    Returns:
        None
    """
    if request.form:
        # Convert ImmutableDict to dict
        data = dict(request.form)
        data = json.loads(data['data'][0], encoding='utf-8')
    else:
        data = json.loads(request.data, encoding='utf-8')

    # Fetch the server details like hostname, port, roles etc
    server = Server.query.filter_by(id=sid).first()

    if server is None:
        return bad_request(errormsg=_("Could not find the given server"))

    # To fetch MetaData for the server
    from pgadmin.utils.driver import get_driver
    driver = get_driver(PG_DEFAULT_DRIVER)
    manager = driver.connection_manager(server.id)
    conn = manager.connection()
    connected = conn.connected()

    if not connected:
        return bad_request(errormsg=_("Please connect to the server first..."))

    # Get the utility path from the connection manager
    utility = manager.utility('sql')

    # Get the storage path from preference
    storage_dir = get_storage_directory()

    if 'filename' in data:
        try:
            _file = filename_with_file_manager_path(data['filename'],
                                                    data['is_import'])
        except Exception as e:
            return bad_request(errormsg=str(e))

        if not _file:
            return bad_request(errormsg=_('Please specify a valid file'))

        if IS_WIN:
            _file = _file.replace('\\', '/')

        data['filename'] = _file
    else:
        return bad_request(errormsg=_('Please specify a valid file'))

    cols = None
    icols = None

    if data['icolumns']:
        ignore_cols = data['icolumns']

        # format the ignore column list required as per copy command
        # requirement
        if ignore_cols and len(ignore_cols) > 0:
            for col in ignore_cols:
                if icols:
                    icols += ', '
                else:
                    icols = '('
                icols += driver.qtIdent(conn, col)
            icols += ')'

    # format the column import/export list required as per copy command
    # requirement
    if data['columns']:
        columns = data['columns']
        if columns and len(columns) > 0:
            for col in columns:
                if cols:
                    cols += ', '
                else:
                    cols = '('
                cols += driver.qtIdent(conn, col)
            cols += ')'

    # Create the COPY FROM/TO  from template
    query = render_template('import_export/sql/cmd.sql',
                            conn=conn,
                            data=data,
                            columns=cols,
                            ignore_column_list=icols)

    args = ['--command', query]

    try:
        p = BatchProcess(desc=IEMessage(sid, data['schema'], data['table'],
                                        data['database'], storage_dir, utility,
                                        *args),
                         cmd=utility,
                         args=args)
        manager.export_password_env(p.id)

        def export_pg_env(env):
            env['PGHOST'] = server.host
            env['PGPORT'] = str(server.port)
            env['PGUSER'] = server.username
            env['PGDATABASE'] = data['database']

        p.start(export_pg_env)
        jid = p.id
    except Exception as e:
        current_app.logger.exception(e)
        return bad_request(errormsg=str(e))

    # Return response
    return make_json_response(data={'job_id': jid, 'success': 1})
示例#27
0
def deploy_on_biganimal(data):
    """Deploy Postgres instance on BigAnimal"""
    _cmd = 'python'
    _cmd_script = '{0}/pgacloud/pgacloud.py'.format(root)
    _label = data['instance_details']['name']
    _private_network = '1' if str(
        data['instance_details']['cloud_type']) == 'private' else '0'
    _instance_size = data['instance_details']['instance_size'].split('||')[1]

    cluster_arch = SINGLE_CLUSTER_ARCH
    nodes = 1

    if data['db_details']['high_availability']:
        cluster_arch = HA_CLUSTER_ARCH
        nodes = int(data['db_details']['replicas']) + nodes

    args = [
        _cmd_script, data['cloud'], 'create-instance', '--name',
        data['instance_details']['name'], '--region',
        str(data['instance_details']['region']), '--db-type',
        str(data['db_details']['database_type']), '--db-version',
        str(data['db_details']['postgres_version']), '--volume-type',
        str(data['instance_details']['volume_type']), '--volume-properties',
        str(data['instance_details']['volume_properties']), '--instance-type',
        str(_instance_size), '--private-network', _private_network,
        '--cluster-arch', cluster_arch, '--nodes',
        str(nodes)
    ]

    if 'biganimal_public_ip' in data['instance_details']:
        args.append('--public-ip')
        args.append(str(data['instance_details']['biganimal_public_ip']))

    _cmd_msg = '{0} {1} {2}'.format(_cmd, _cmd_script, ' '.join(args))
    try:
        sid = _create_server({
            'gid': data['db_details']['gid'],
            'name': data['instance_details']['name'],
            'db': 'edb_admin',
            'username': '******',
            'port': 5432,
            'cloud_status': -1
        })

        p = BatchProcess(desc=CloudProcessDesc(
            sid, _cmd_msg, data['cloud'], data['instance_details']['name']),
                         cmd=_cmd,
                         args=args)

        env = dict()
        biganimal_obj = pickle.loads(session['biganimal']['provider_obj'])
        env['BIGANIMAL_ACCESS_KEY'] = biganimal_obj.access_token

        if 'password' in data['db_details']:
            env['BIGANIMAL_DATABASE_PASSWORD'] = data['db_details']['password']

        p.set_env_variables(None, env=env)
        p.update_server_id(p.id, sid)
        p.start()

        return True, p, {'label': _label, 'sid': sid}

    except Exception as e:
        current_app.logger.exception(e)
        return False, None, str(e)