Example #1
0
def create_backup_objects_job(sid):
    """
    Args:
        sid: Server ID

        Creates a new job for backup task
        (Backup Database(s)/Schema(s)/Table(s))

    Returns:
        None
    """
    if request.form:
        # Convert ImmutableDict to dict
        data = dict(request.form)
        data = json.loads(data['data'][0], encoding='utf-8')
    else:
        data = json.loads(request.data, encoding='utf-8')

    # Remove ratio from data in case of empty string
    if 'ratio' in data and data['ratio'] == '':
        data.pop("ratio")

    try:
        backup_file = filename_with_file_manager_path(data['file'])
    except Exception as e:
        return bad_request(errormsg=str(e))

    # Fetch the server details like hostname, port, roles etc
    server = Server.query.filter_by(
        id=sid, user_id=current_user.id
    ).first()

    if server is None:
        return make_json_response(
            success=0,
            errormsg=_("Could not find the specified server.")
        )

    # To fetch MetaData for the server
    from pgadmin.utils.driver import get_driver
    driver = get_driver(PG_DEFAULT_DRIVER)
    manager = driver.connection_manager(server.id)
    conn = manager.connection()
    connected = conn.connected()

    if not connected:
        return make_json_response(
            success=0,
            errormsg=_("Please connect to the server first.")
        )

    utility = manager.utility('backup')
    args = [
        '--file',
        backup_file,
        '--host',
        server.host,
        '--port',
        str(server.port),
        '--username',
        server.username,
        '--no-password'
    ]

    def set_param(key, param):
        if key in data and data[key]:
            args.append(param)

    def set_value(key, param, value):
        if key in data:
            if value:
                if value is True and data[key]:
                    args.append(param)
                    args.append(data[key])
                else:
                    args.append(param)
                    args.append(value)

    set_param('verbose', '--verbose')
    set_param('dqoute', '--quote-all-identifiers')
    set_value('role', '--role', True)
    if data['format'] is not None:
        if data['format'] == 'custom':
            args.extend(['--format=c'])

            set_param('blobs', '--blobs')
            set_value('ratio', '--compress', True)

        elif data['format'] == 'tar':
            args.extend(['--format=t'])

            set_param('blobs', '--blobs')

        elif data['format'] == 'plain':
            args.extend(['--format=p'])
            if 'only_data' in data and data['only_data']:
                args.append('--data-only')
                set_param('disable_trigger', '--disable-triggers')
            else:
                set_param('only_schema', '--schema-only')
                set_param('dns_owner', '--no-owner')
                set_param('include_create_database', '--create')
                set_param('include_drop_database', '--clean')
        elif data['format'] == 'directory':
            args.extend(['--format=d'])

    set_param('pre_data', '--section=pre-data')
    set_param('data', '--section=data')
    set_param('post_data', '--section=post-data')
    set_param('dns_privilege', '--no-privileges')
    set_param('dns_tablespace', '--no-tablespaces')
    set_param('dns_unlogged_tbl_data', '--no-unlogged-table-data')
    set_param('use_insert_commands', '--inserts')
    set_param('use_column_inserts', '--column-inserts')
    set_param('disable_quoting', '--disable-dollar-quoting')
    set_param('with_oids', '--oids')
    set_param('use_set_session_auth', '--use-set-session-authorization')

    set_value('encoding', '--encoding', True)
    set_value('no_of_jobs', '--jobs', True)

    for s in data['schemas']:
        args.extend(['--schema', s])

    for s, t in data['tables']:
        args.extend([
            '--table', driver.qtIdent(conn, s, t)
        ])

    args.append(data['database'])

    try:
        p = BatchProcess(
            desc=BackupMessage(
                BACKUP.OBJECT, sid,
                data['file'].encode('utf-8') if hasattr(
                    data['file'], 'encode'
                ) else data['file'],
                *args,
                database=data['database']
            ),
            cmd=utility, args=args
        )
        manager.export_password_env(p.id)
        p.set_env_variables(server)
        p.start()
        jid = p.id
    except Exception as e:
        current_app.logger.exception(e)
        return make_json_response(
            status=410,
            success=0,
            errormsg=str(e)
        )

    # Return response
    return make_json_response(
        data={'job_id': jid, 'Success': 1}
    )
Example #2
0
def create_maintenance_job(sid, did):
    """
    Args:
        sid: Server ID
        did: Database ID

        Creates a new job for maintenance vacuum operation

    Returns:
        None
    """
    if request.form:
        data = json.loads(request.form['data'], encoding='utf-8')
    else:
        data = json.loads(request.data, encoding='utf-8')

    index_name = None

    if 'primary_key' in data and data['primary_key']:
        index_name = data['primary_key']
    elif 'unique_constraint' in data and data['unique_constraint']:
        index_name = data['unique_constraint']
    elif 'index' in data and data['index']:
        index_name = data['index']

    # Fetch the server details like hostname, port, roles etc
    server = Server.query.filter_by(id=sid).first()

    if server is None:
        return make_json_response(
            success=0, errormsg=_("Could not find the given server"))

    # To fetch MetaData for the server
    driver = get_driver(PG_DEFAULT_DRIVER)
    manager = driver.connection_manager(server.id)
    conn = manager.connection()
    connected = conn.connected()

    if not connected:
        return make_json_response(
            success=0, errormsg=_("Please connect to the server first."))

    utility = manager.utility('sql')
    ret_val = does_utility_exist(utility)
    if ret_val:
        return make_json_response(success=0, errormsg=ret_val)

    # Create the command for the vacuum operation
    query = render_template('maintenance/sql/command.sql',
                            conn=conn,
                            data=data,
                            index_name=index_name)

    args = [
        '--host',
        manager.local_bind_host if manager.use_ssh_tunnel else server.host,
        '--port',
        str(manager.local_bind_port)
        if manager.use_ssh_tunnel else str(server.port), '--username',
        server.username, '--dbname', data['database'], '--command', query
    ]

    try:
        p = BatchProcess(desc=Message(sid, data, query),
                         cmd=utility,
                         args=args)
        manager.export_password_env(p.id)
        # Check for connection timeout and if it is greater than 0 then
        # set the environment variable PGCONNECT_TIMEOUT.
        if manager.connect_timeout > 0:
            env = dict()
            env['PGCONNECT_TIMEOUT'] = str(manager.connect_timeout)
            p.set_env_variables(server, env=env)
        else:
            p.set_env_variables(server)

        p.start()
        jid = p.id
    except Exception as e:
        current_app.logger.exception(e)
        return make_json_response(status=410, success=0, errormsg=str(e))

    # Return response
    return make_json_response(data={
        'job_id': jid,
        'status': True,
        'info': _('Maintenance job created.')
    })
Example #3
0
def create_maintenance_job(sid, did):
    """
    Args:
        sid: Server ID
        did: Database ID

        Creates a new job for maintenance vacuum operation

    Returns:
        None
    """
    if request.form:
        # Convert ImmutableDict to dict
        data = dict(request.form)
        data = json.loads(data['data'][0], encoding='utf-8')
    else:
        data = json.loads(request.data, encoding='utf-8')

    index_name = None

    if 'primary_key' in data and data['primary_key']:
        index_name = data['primary_key']
    elif 'unique_constraint' in data and data['unique_constraint']:
        index_name = data['unique_constraint']
    elif 'index' in data and data['index']:
        index_name = data['index']

    # Fetch the server details like hostname, port, roles etc
    server = Server.query.filter_by(
        id=sid).first()

    if server is None:
        return make_json_response(
            success=0,
            errormsg=_("Could not find the given server")
        )

    # To fetch MetaData for the server
    driver = get_driver(PG_DEFAULT_DRIVER)
    manager = driver.connection_manager(server.id)
    conn = manager.connection()
    connected = conn.connected()

    if not connected:
        return make_json_response(
            success=0,
            errormsg=_("Please connect to the server first.")
        )

    utility = manager.utility('sql')

    # Create the command for the vacuum operation
    query = render_template(
        'maintenance/sql/command.sql', conn=conn, data=data,
        index_name=index_name
    )

    args = [
        '--host', server.host, '--port', str(server.port),
        '--username', server.username, '--dbname',
        data['database'],
        '--command', query
    ]

    try:
        p = BatchProcess(
            desc=Message(sid, data, query),
            cmd=utility, args=args
        )
        manager.export_password_env(p.id)
        p.set_env_variables(server)
        p.start()
        jid = p.id
    except Exception as e:
        current_app.logger.exception(e)
        return make_json_response(
            status=410,
            success=0,
            errormsg=str(e)
        )

    # Return response
    return make_json_response(
        data={'job_id': jid, 'status': True,
              'info': 'Maintenance job created.'}
    )
Example #4
0
def create_backup_job(sid):
    """
    Args:
        sid: Server ID

        Creates a new job for backup task (Backup Server/Globals)

    Returns:
        None
    """
    if request.form:
        # Convert ImmutableDict to dict
        data = dict(request.form)
        data = json.loads(data['data'][0], encoding='utf-8')
    else:
        data = json.loads(request.data, encoding='utf-8')

    try:
        backup_file = filename_with_file_manager_path(data['file'])
    except Exception as e:
        return bad_request(errormsg=str(e))

    # Fetch the server details like hostname, port, roles etc
    server = Server.query.filter_by(
        id=sid, user_id=current_user.id
    ).first()

    if server is None:
        return make_json_response(
            success=0,
            errormsg=_("Could not find the specified server.")
        )

    # To fetch MetaData for the server
    from pgadmin.utils.driver import get_driver
    driver = get_driver(PG_DEFAULT_DRIVER)
    manager = driver.connection_manager(server.id)
    conn = manager.connection()
    connected = conn.connected()

    if not connected:
        return make_json_response(
            success=0,
            errormsg=_("Please connect to the server first.")
        )

    utility = manager.utility('backup_server')

    args = [
        '--file',
        backup_file,
        '--host',
        server.host,
        '--port',
        str(server.port),
        '--username',
        server.username,
        '--no-password',
        '--database',
        server.maintenance_db
    ]

    if 'role' in data and data['role']:
        args.append('--role')
        args.append(data['role'])
    if 'verbose' in data and data['verbose']:
        args.append('--verbose')
    if 'dqoute' in data and data['dqoute']:
        args.append('--quote-all-identifiers')
    if data['type'] == 'global':
        args.append('--globals-only')

    try:
        p = BatchProcess(
            desc=BackupMessage(
                BACKUP.SERVER if data['type'] != 'global' else BACKUP.GLOBALS,
                sid,
                data['file'].encode('utf-8') if hasattr(
                    data['file'], 'encode'
                ) else data['file'],
                *args
            ),
            cmd=utility, args=args
        )
        manager.export_password_env(p.id)
        p.set_env_variables(server)
        p.start()
        jid = p.id
    except Exception as e:
        current_app.logger.exception(e)
        return make_json_response(
            status=410,
            success=0,
            errormsg=str(e)
        )
    # Return response
    return make_json_response(
        data={'job_id': jid, 'success': 1}
    )
Example #5
0
def create_import_export_job(sid):
    """
    Args:
        sid: Server ID

        Creates a new job for import and export table data functionality

    Returns:
        None
    """
    if request.form:
        data = json.loads(request.form['data'], encoding='utf-8')
    else:
        data = json.loads(request.data, encoding='utf-8')

    # Fetch the server details like hostname, port, roles etc
    server = Server.query.filter_by(
        id=sid).first()

    if server is None:
        return bad_request(errormsg=_("Could not find the given server"))

    # To fetch MetaData for the server
    from pgadmin.utils.driver import get_driver
    driver = get_driver(PG_DEFAULT_DRIVER)
    manager = driver.connection_manager(server.id)
    conn = manager.connection()
    connected = conn.connected()

    if not connected:
        return bad_request(errormsg=_("Please connect to the server first..."))

    # Get the utility path from the connection manager
    utility = manager.utility('sql')
    ret_val = does_utility_exist(utility)
    if ret_val:
        return make_json_response(
            success=0,
            errormsg=ret_val
        )

    # Get the storage path from preference
    storage_dir = get_storage_directory()

    if 'filename' in data:
        try:
            _file = filename_with_file_manager_path(
                data['filename'], data['is_import'])
        except Exception as e:
            return bad_request(errormsg=str(e))

        if not _file:
            return bad_request(errormsg=_('Please specify a valid file'))
        elif IS_WIN:
            _file = _file.replace('\\', '/')

        data['filename'] = _file
    else:
        return bad_request(errormsg=_('Please specify a valid file'))

    # Get required and ignored column list
    icols = _get_ignored_column_list(data, driver, conn)
    cols = _get_required_column_list(data, driver, conn)

    # Create the COPY FROM/TO  from template
    query = render_template(
        'import_export/sql/cmd.sql',
        conn=conn,
        data=data,
        columns=cols,
        ignore_column_list=icols
    )

    args = ['--command', query]

    try:

        io_params = {
            'sid': sid,
            'schema': data['schema'],
            'table': data['table'],
            'database': data['database'],
            'is_import': data['is_import'],
            'filename': data['filename'],
            'storage': storage_dir,
            'utility': utility
        }

        p = BatchProcess(
            desc=IEMessage(
                *args,
                **io_params
            ),
            cmd=utility, args=args
        )
        manager.export_password_env(p.id)

        env = dict()

        env['PGHOST'] = \
            manager.local_bind_host if manager.use_ssh_tunnel else server.host
        env['PGPORT'] = \
            str(manager.local_bind_port) if manager.use_ssh_tunnel else str(
                server.port)
        env['PGUSER'] = server.username
        env['PGDATABASE'] = data['database']
        p.set_env_variables(server, env=env)
        p.start()
        jid = p.id
    except Exception as e:
        current_app.logger.exception(e)
        return bad_request(errormsg=str(e))

    # Return response
    return make_json_response(
        data={'job_id': jid, 'success': 1}
    )
Example #6
0
def create_restore_job(sid):
    """
    Args:
        sid: Server ID

        Creates a new job for restore task

    Returns:
        None
    """
    if request.form:
        # Convert ImmutableDict to dict
        data = dict(request.form)
        data = json.loads(data['data'][0], encoding='utf-8')
    else:
        data = json.loads(request.data, encoding='utf-8')

    try:
        _file = filename_with_file_manager_path(data['file'])
    except Exception as e:
        return bad_request(errormsg=str(e))

    if _file is None:
        return make_json_response(
            success=0,
            errormsg=_("File could not be found.")
        )

    # Fetch the server details like hostname, port, roles etc
    server = Server.query.filter_by(
        id=sid
    ).first()

    if server is None:
        return make_json_response(
            success=0,
            errormsg=_("Could not find the specified server.")
        )

    # To fetch MetaData for the server
    from pgadmin.utils.driver import get_driver

    driver = get_driver(PG_DEFAULT_DRIVER)
    manager = driver.connection_manager(server.id)
    conn = manager.connection()
    connected = conn.connected()

    if not connected:
        return make_json_response(
            success=0,
            errormsg=_("Please connect to the server first.")
        )

    utility = manager.utility('restore')

    args = []

    if 'list' in data:
        args.append('--list')
    else:
        def set_param(key, param):
            if key in data and data[key]:
                args.append(param)
                return True
            return False

        def set_value(key, param, value):
            if key in data:
                if value:
                    if value is True and data[key]:
                        args.append(param)
                        args.append(data[key])
                    else:
                        args.append(param)
                        args.append(value)
                    return True
            return False

        def set_multiple(key, param, with_schema=True):
            if key in data:
                if len(data[key]) > 0:
                    if with_schema:
                        # TODO:// This is temporary
                        # Once object tree is implemented then we will use
                        # list of tuples 'else' part
                        if isinstance(data[key], list):
                            s, t = data[key]
                            args.extend([
                                param,
                                driver.qtIdent(
                                    conn, s
                                ) + '.' + driver.qtIdent(conn, t)
                            ])
                        else:
                            for s, o in data[key]:
                                args.extend([
                                    param,
                                    driver.qtIdent(
                                        conn, s
                                    ) + '.' + driver.qtIdent(conn, o)
                                ])
                    else:
                        for o in data[key]:
                            args.extend([param, o])
                    return True
            return False

        args.extend([
            '--host', server.host, '--port', str(server.port),
            '--username', server.username, '--no-password'
        ])

        set_value('role', '--role', True)
        set_value('database', '--dbname', True)

        if data['format'] == 'directory':
            args.extend(['--format=d'])

        set_value('pre_data', '--section=pre-data', False)
        set_value('data', '--section=data', False)
        set_value('post_data', '--section=post-data', False)

        if not set_param('only_data', '--data-only'):
            set_param('dns_owner', '--no-owner')
            set_param('dns_privilege ', '--no-privileges')
            set_param('dns_tablespace', '--no-tablespaces')

        if not set_param('only_schema', '--schema-only'):
            set_param('disable_trigger', '--disable-triggers')

        set_param('include_create_database', '--create')
        set_param('clean', '--clean')
        set_param('single_transaction', '--single-transaction')
        set_param('no_data_fail_table ', '--no-data-for-failed-tables')
        set_param('use_set_session_auth ', '--use-set-session-authorization')
        set_param('exit_on_error', '--exit-on-error')

        set_value('no_of_jobs', '--jobs', True)
        set_param('verbose', '--verbose')

        set_multiple('schemas', '--schema', False)
        set_multiple('tables', '--table', False)
        set_multiple('functions', '--function', False)
        set_multiple('triggers', '--trigger', False)
        set_multiple('trigger_funcs', '--function', False)
        set_multiple('indexes', '--index', False)

    args.append(fs_short_path(_file))

    try:
        p = BatchProcess(
            desc=RestoreMessage(
                sid,
                data['file'].encode('utf-8') if hasattr(
                    data['file'], 'encode'
                ) else data['file'],
                *args
            ),
            cmd=utility, args=args
        )
        manager.export_password_env(p.id)
        p.start()
        jid = p.id
    except Exception as e:
        current_app.logger.exception(e)
        return make_json_response(
            status=410,
            success=0,
            errormsg=str(e)
        )
    # Return response
    return make_json_response(
        data={'job_id': jid, 'Success': 1}
    )
Example #7
0
def create_backup_job(sid):
    """
    Args:
        sid: Server ID

        Creates a new job for backup task (Backup Server/Globals)

    Returns:
        None
    """
    if request.form:
        # Convert ImmutableDict to dict
        data = dict(request.form)
        data = json.loads(data['data'][0], encoding='utf-8')
    else:
        data = json.loads(request.data, encoding='utf-8')

    try:
        backup_file = filename_with_file_manager_path(data['file'])
    except Exception as e:
        return bad_request(errormsg=str(e))

    # Fetch the server details like hostname, port, roles etc
    server = Server.query.filter_by(id=sid, user_id=current_user.id).first()

    if server is None:
        return make_json_response(
            success=0, errormsg=_("Could not find the specified server."))

    # To fetch MetaData for the server
    from pgadmin.utils.driver import get_driver
    driver = get_driver(PG_DEFAULT_DRIVER)
    manager = driver.connection_manager(server.id)
    conn = manager.connection()
    connected = conn.connected()

    if not connected:
        return make_json_response(
            success=0, errormsg=_("Please connect to the server first."))

    utility = manager.utility('backup_server')

    args = [
        '--file', backup_file, '--host',
        manager.local_bind_host if manager.use_ssh_tunnel else server.host,
        '--port',
        str(manager.local_bind_port)
        if manager.use_ssh_tunnel else str(server.port), '--username',
        server.username, '--no-password', '--database', server.maintenance_db
    ]

    if 'role' in data and data['role']:
        args.append('--role')
        args.append(data['role'])
    if 'verbose' in data and data['verbose']:
        args.append('--verbose')
    if 'dqoute' in data and data['dqoute']:
        args.append('--quote-all-identifiers')
    if data['type'] == 'global':
        args.append('--globals-only')

    try:
        p = BatchProcess(desc=BackupMessage(
            BACKUP.SERVER if data['type'] != 'global' else BACKUP.GLOBALS,
            sid, data['file'].encode('utf-8') if hasattr(
                data['file'], 'encode') else data['file'], *args),
                         cmd=utility,
                         args=args)
        manager.export_password_env(p.id)
        p.set_env_variables(server)
        p.start()
        jid = p.id
    except Exception as e:
        current_app.logger.exception(e)
        return make_json_response(status=410, success=0, errormsg=str(e))
    # Return response
    return make_json_response(data={'job_id': jid, 'success': 1})
Example #8
0
def create_backup_objects_job(sid):
    """
    Args:
        sid: Server ID

        Creates a new job for backup task
        (Backup Database(s)/Schema(s)/Table(s))

    Returns:
        None
    """
    if request.form:
        # Convert ImmutableDict to dict
        data = dict(request.form)
        data = json.loads(data['data'][0], encoding='utf-8')
    else:
        data = json.loads(request.data, encoding='utf-8')

    backup_obj_type = 'objects'
    if 'type' in data:
        backup_obj_type = data['type']

    try:
        if 'format' in data and data['format'] == 'directory':
            backup_file = filename_with_file_manager_path(data['file'], False)
        else:
            backup_file = filename_with_file_manager_path(data['file'])
    except Exception as e:
        return bad_request(errormsg=str(e))

    # Fetch the server details like hostname, port, roles etc
    server = Server.query.filter_by(
        id=sid, user_id=current_user.id
    ).first()

    if server is None:
        return make_json_response(
            success=0,
            errormsg=_("Could not find the specified server.")
        )

    # To fetch MetaData for the server
    from pgadmin.utils.driver import get_driver
    driver = get_driver(PG_DEFAULT_DRIVER)
    manager = driver.connection_manager(server.id)
    conn = manager.connection()
    connected = conn.connected()

    if not connected:
        return make_json_response(
            success=0,
            errormsg=_("Please connect to the server first.")
        )

    utility = manager.utility('backup') if backup_obj_type == 'objects' \
        else manager.utility('backup_server')

    args = [
        '--file',
        backup_file,
        '--host',
        manager.local_bind_host if manager.use_ssh_tunnel else server.host,
        '--port',
        str(manager.local_bind_port) if manager.use_ssh_tunnel
        else str(server.port),
        '--username',
        server.username,
        '--no-password'
    ]

    if backup_obj_type != 'objects':
        args.append('--database')
        args.append(server.maintenance_db)

    if backup_obj_type == 'globals':
        args.append('--globals-only')

    def set_param(key, param):
        if key in data and data[key]:
            args.append(param)

    def set_value(key, param, default_value=None):
        if key in data and data[key] is not None and data[key] != '':
            args.append(param)
            args.append(data[key])
        elif default_value is not None:
            args.append(param)
            args.append(default_value)

    set_param('verbose', '--verbose')
    set_param('dqoute', '--quote-all-identifiers')
    set_value('role', '--role')

    if backup_obj_type == 'objects' and \
            'format' in data and data['format'] is not None:
        if data['format'] == 'custom':
            args.extend(['--format=c'])
            set_param('blobs', '--blobs')
            set_value('ratio', '--compress')
        elif data['format'] == 'tar':
            args.extend(['--format=t'])
            set_param('blobs', '--blobs')
        elif data['format'] == 'plain':
            args.extend(['--format=p'])
        elif data['format'] == 'directory':
            args.extend(['--format=d'])

    if 'only_data' in data and data['only_data']:
        set_param('only_data', '--data-only')
        if 'format' in data and data['format'] == 'plain':
            set_param('disable_trigger', '--disable-triggers')
    elif 'only_schema' in data and data['only_schema']:
        set_param('only_schema', '--schema-only')

    set_param('dns_owner', '--no-owner')
    set_param('include_create_database', '--create')
    set_param('include_drop_database', '--clean')
    set_param('pre_data', '--section=pre-data')
    set_param('data', '--section=data')
    set_param('post_data', '--section=post-data')
    set_param('dns_privilege', '--no-privileges')
    set_param('dns_tablespace', '--no-tablespaces')
    set_param('dns_unlogged_tbl_data', '--no-unlogged-table-data')
    set_param('use_insert_commands', '--inserts')
    set_param('use_column_inserts', '--column-inserts')
    set_param('disable_quoting', '--disable-dollar-quoting')
    set_param('with_oids', '--oids')
    set_param('use_set_session_auth', '--use-set-session-authorization')

    if manager.version >= 110000:
        set_param('no_comments', '--no-comments')
        set_param('load_via_partition_root', '--load-via-partition-root')

    set_value('encoding', '--encoding')
    set_value('no_of_jobs', '--jobs')

    if 'schemas' in data:
        for s in data['schemas']:
            args.extend(['--schema', s])

    if 'tables' in data:
        for s, t in data['tables']:
            args.extend([
                '--table', driver.qtIdent(conn, s, t)
            ])

    try:
        if backup_obj_type == 'objects':
            args.append(data['database'])
            p = BatchProcess(
                desc=BackupMessage(
                    BACKUP.OBJECT, sid,
                    data['file'].encode('utf-8') if hasattr(
                        data['file'], 'encode'
                    ) else data['file'],
                    *args,
                    database=data['database']
                ),
                cmd=utility, args=args
            )
        else:
            p = BatchProcess(
                desc=BackupMessage(
                    BACKUP.SERVER if backup_obj_type != 'globals'
                    else BACKUP.GLOBALS,
                    sid,
                    data['file'].encode('utf-8') if hasattr(
                        data['file'], 'encode'
                    ) else data['file'],
                    *args
                ),
                cmd=utility, args=args
            )

        manager.export_password_env(p.id)
        # Check for connection timeout and if it is greater than 0 then
        # set the environment variable PGCONNECT_TIMEOUT.
        if manager.connect_timeout > 0:
            env = dict()
            env['PGCONNECT_TIMEOUT'] = str(manager.connect_timeout)
            p.set_env_variables(server, env=env)
        else:
            p.set_env_variables(server)

        p.start()
        jid = p.id
    except Exception as e:
        current_app.logger.exception(e)
        return make_json_response(
            status=410,
            success=0,
            errormsg=str(e)
        )

    # Return response
    return make_json_response(
        data={'job_id': jid, 'Success': 1}
    )
Example #9
0
def create_restore_job(sid):
    """
    Args:
        sid: Server ID

        Creates a new job for restore task

    Returns:
        None
    """
    if request.form:
        # Convert ImmutableDict to dict
        data = dict(request.form)
        data = json.loads(data['data'][0])
    else:
        data = json.loads(request.data.decode())

    backup_file = filename_with_file_manager_path(data['file'])

    # Fetch the server details like hostname, port, roles etc
    server = Server.query.filter_by(
        id=sid
    ).first()

    if server is None:
        return make_json_response(
            success=0,
            errormsg=_("Could not find the specified server.")
        )

    # To fetch MetaData for the server
    from pgadmin.utils.driver import get_driver

    driver = get_driver(PG_DEFAULT_DRIVER)
    manager = driver.connection_manager(server.id)
    conn = manager.connection()
    connected = conn.connected()

    if not connected:
        return make_json_response(
            success=0,
            errormsg=_("Please connect to the server first.")
        )

    utility = manager.utility('restore')

    args = []

    if 'list' in data:
        args.append('--list')
    else:
        def set_param(key, param):
            if key in data and data[key]:
                args.append(param)
                return True
            return False

        def set_value(key, param, value):
            if key in data:
                if value:
                    if value is True and data[key]:
                        args.append(param)
                        args.append(data[key])
                    else:
                        args.append(param)
                        args.append(value)
                    return True
            return False

        def set_multiple(key, param, with_schema=True):
            if key in data:
                if len(data[key]) > 0:
                    if with_schema:
                        # TODO:// This is temporary
                        # Once object tree is implemented then we will use
                        # list of tuples 'else' part
                        if isinstance(data[key], list):
                            s, t = data[key]
                            args.extend([
                                param,
                                driver.qtIdent(
                                    conn, s
                                ) + '.' + driver.qtIdent(conn, t)
                            ])
                        else:
                            for s, o in data[key]:
                                args.extend([
                                    param,
                                    driver.qtIdent(
                                        conn, s
                                    ) + '.' + driver.qtIdent(conn, o)
                                ])
                    else:
                        for o in data[key]:
                            args.extend([param, o])
                    return True
            return False

        args.extend([
            '--host', server.host, '--port', server.port,
            '--username', server.username, '--no-password'
        ])

        set_value('role', '--role', True)
        set_value('database', '--dbname', True)

        if data['format'] == 'directory':
            args.extend(['--format=d'])

        set_value('pre_data', '--section=pre-data', False)
        set_value('data', '--section=data', False)
        set_value('post_data', '--section=post-data', False)

        if not set_param('only_data', '--data-only'):
            set_param('dns_owner', '--no-owner')
            set_param('dns_privilege ', '--no-privileges')
            set_param('dns_tablespace', '--no-tablespaces')

        if not set_param('only_schema', '--schema-only'):
            set_param('disable_trigger', '--disable-triggers')

        set_param('include_create_database', '--create')
        set_param('clean', '--clean')
        set_param('single_transaction', '--single-transaction')
        set_param('no_data_fail_table ', '--no-data-for-failed-tables')
        set_param('use_set_session_auth ', '--use-set-session-authorization')
        set_param('exit_on_error', '--exit-on-error')

        set_value('no_of_jobs', '--jobs', True)
        set_param('verbose', '--verbose')

        set_multiple('schemas', '--schema', False)
        set_multiple('tables', '--table')
        set_multiple('functions', '--function')
        set_multiple('triggers', '--trigger')
        set_multiple('trigger_funcs', '--function')
        set_multiple('indexes', '--index')

    args.append(backup_file)

    try:
        p = BatchProcess(
            desc=RestoreMessage(sid, data['file']),
            cmd=utility, args=args
        )
        manager.export_password_env(p.id)
        p.start()
        jid = p.id
    except Exception as e:
        current_app.logger.exception(e)
        return make_json_response(
            status=410,
            success=0,
            errormsg=str(e)
        )
    # Return response
    return make_json_response(
        data={'job_id': jid, 'Success': 1}
    )
Example #10
0
def create_backup_objects_job(sid):
    """
    Args:
        sid: Server ID

        Creates a new job for backup task
        (Backup Database(s)/Schema(s)/Table(s))

    Returns:
        None
    """

    data = json.loads(request.data, encoding='utf-8')
    backup_obj_type = data.get('type', 'objects')

    try:
        backup_file = filename_with_file_manager_path(
            data['file'], (data.get('format', '') != 'directory'))
    except Exception as e:
        return bad_request(errormsg=str(e))

    # Fetch the server details like hostname, port, roles etc
    server = Server.query.filter_by(id=sid, user_id=current_user.id).first()

    if server is None:
        return make_json_response(
            success=0, errormsg=_("Could not find the specified server."))

    # To fetch MetaData for the server
    from pgadmin.utils.driver import get_driver
    driver = get_driver(PG_DEFAULT_DRIVER)
    manager = driver.connection_manager(server.id)
    conn = manager.connection()
    connected = conn.connected()

    if not connected:
        return make_json_response(
            success=0, errormsg=_("Please connect to the server first."))

    utility = manager.utility('backup') if backup_obj_type == 'objects' \
        else manager.utility('backup_server')

    ret_val = does_utility_exist(utility)
    if ret_val:
        return make_json_response(success=0, errormsg=ret_val)

    args = _get_args_params_values(data, conn, backup_obj_type, backup_file,
                                   server, manager)

    escaped_args = [escape_dquotes_process_arg(arg) for arg in args]
    try:
        bfile = data['file'].encode('utf-8') \
            if hasattr(data['file'], 'encode') else data['file']
        if backup_obj_type == 'objects':
            args.append(data['database'])
            escaped_args.append(data['database'])
            p = BatchProcess(desc=BackupMessage(BACKUP.OBJECT,
                                                sid,
                                                bfile,
                                                *args,
                                                database=data['database']),
                             cmd=utility,
                             args=escaped_args)
        else:
            p = BatchProcess(desc=BackupMessage(
                BACKUP.SERVER if backup_obj_type != 'globals' else
                BACKUP.GLOBALS, sid, bfile, *args),
                             cmd=utility,
                             args=escaped_args)

        manager.export_password_env(p.id)
        # Check for connection timeout and if it is greater than 0 then
        # set the environment variable PGCONNECT_TIMEOUT.
        if manager.connect_timeout > 0:
            env = dict()
            env['PGCONNECT_TIMEOUT'] = str(manager.connect_timeout)
            p.set_env_variables(server, env=env)
        else:
            p.set_env_variables(server)

        p.start()
        jid = p.id
    except Exception as e:
        current_app.logger.exception(e)
        return make_json_response(status=410, success=0, errormsg=str(e))

    # Return response
    return make_json_response(data={'job_id': jid, 'Success': 1})
Example #11
0
def deploy_on_azure(data):
    """Deploy the Postgres instance on Azure."""
    _cmd = 'python'
    _cmd_script = '{0}/pgacloud/pgacloud.py'.format(root)
    _label = data['instance_details']['name']

    if 'high_availability' in data['instance_details']:
        if data['instance_details']['high_availability']:
            data['instance_details']['high_availability'] = "ZoneRedundant"
        else:
            data['instance_details']['high_availability'] = "Disabled"

    args = [
        _cmd_script, 'azure', '--region',
        str(data['instance_details']['region']), '--resource-group',
        data['instance_details']['resource_group'], 'create-instance',
        '--name', data['instance_details']['name'], '--db-username',
        data['db_details']['db_username'], '--db-major-version',
        str(data['instance_details']['db_version']), '--instance_tier_type',
        data['instance_details']['db_instance_class'], '--instance-type',
        data['instance_details']['instance_type'], '--storage-size',
        str(data['instance_details']['storage_size']), '--public-ips',
        str(data['instance_details']['public_ips']), '--availability-zone',
        str(data['instance_details']['availability_zone']),
        '--high-availability', data['instance_details']['high_availability']
    ]

    _cmd_msg = '{0} {1} {2}'.format(_cmd, _cmd_script, ' '.join(args))
    try:
        sid = _create_server({
            'gid': data['db_details']['gid'],
            'name': data['instance_details']['name'],
            'db': 'postgres',
            'username': data['db_details']['db_username'],
            'port': 5432,
            'cloud_status': -1
        })

        p = BatchProcess(desc=CloudProcessDesc(
            sid, _cmd_msg, data['cloud'], data['instance_details']['name']),
                         cmd=_cmd,
                         args=args)

        env = dict()

        azure = session['azure']['azure_obj']
        env['AZURE_SUBSCRIPTION_ID'] = azure.subscription_id
        env['AUTH_TYPE'] = data['secret']['auth_type']
        env['AZURE_CRED_CACHE_NAME'] = azure.azure_cache_name
        env['AZURE_CRED_CACHE_LOCATION'] = azure.azure_cache_location
        if azure.authentication_record_json is not None:
            env['AUTHENTICATION_RECORD_JSON'] = \
                azure.authentication_record_json
            env['AZURE_TENANT_ID'] = data['secret']['azure_tenant_id']

        if 'db_password' in data['db_details']:
            env['AZURE_DATABASE_PASSWORD'] = data['db_details']['db_password']

        p.set_env_variables(None, env=env)
        p.update_server_id(p.id, sid)
        p.start()

        # add pid: cache file dict in session['azure_cache_files_list']
        if 'azure_cache_files_list' in session and \
                session['azure_cache_files_list'] is not None:
            session['azure_cache_files_list'][p.id] = azure.azure_cache_name
        else:
            session['azure_cache_files_list'] = {p.id: azure.azure_cache_name}
        del session['azure']['azure_cache_file_name']
        return True, p, {'label': _label, 'sid': sid}
    except Exception as e:
        current_app.logger.exception(e)
        return False, None, str(e)
    finally:
        del session['azure']['azure_obj']
Example #12
0
def create_maintenance_job(sid, did):
    """
    Args:
        sid: Server ID
        did: Database ID

        Creates a new job for maintenance vacuum operation

    Returns:
        None
    """
    if request.form:
        # Convert ImmutableDict to dict
        data = dict(request.form)
        data = json.loads(data['data'][0])
    else:
        data = json.loads(request.data.decode())

    index_name = None

    if 'primary_key' in data and data['primary_key']:
        index_name = data['primary_key']
    elif 'unique_constraint' in data and data['unique_constraint']:
        index_name = data['unique_constraint']
    elif 'index' in data and data['index']:
        index_name = data['index']

    # Fetch the server details like hostname, port, roles etc
    server = Server.query.filter_by(
        id=sid).first()

    if server is None:
        return make_json_response(
            success=0,
            errormsg=_("Couldn't find the given server")
        )

    # To fetch MetaData for the server
    driver = get_driver(PG_DEFAULT_DRIVER)
    manager = driver.connection_manager(server.id)
    conn = manager.connection()
    connected = conn.connected()

    if not connected:
        return make_json_response(
            success=0,
            errormsg=_("Please connect to the server first.")
        )

    utility = manager.utility('sql')

    # Create the command for the vacuum operation
    query = render_template(
        'maintenance/sql/command.sql', conn=conn, data=data,
        index_name=index_name
    )

    args = [
        '--host', server.host, '--port', str(server.port),
        '--username', server.username, '--dbname',
        driver.qtIdent(conn, data['database']),
        '--command', query
    ]

    try:
        p = BatchProcess(
            desc=Message(sid, data, query),
            cmd=utility, args=args
        )
        manager.export_password_env(p.id)
        p.start()
        jid = p.id
    except Exception as e:
        current_app.logger.exception(e)
        return make_json_response(
            status=410,
            success=0,
            errormsg=str(e)
        )

    # Return response
    return make_json_response(
        data={'job_id': jid, 'status': True, 'info': 'Maintenance job created.'}
    )
Example #13
0
def create_import_export_job(sid):
    """
    Args:
        sid: Server ID

        Creates a new job for import and export table data functionality

    Returns:
        None
    """
    if request.form:
        # Convert ImmutableDict to dict
        data = dict(request.form)
        data = json.loads(data['data'][0])
    else:
        data = json.loads(request.data.decode())

    # Fetch the server details like hostname, port, roles etc
    server = Server.query.filter_by(id=sid).first()

    if server is None:
        return make_json_response(success=0,
                                  errormsg=_("Couldn't find the given server"))

    # To fetch MetaData for the server
    from pgadmin.utils.driver import get_driver
    driver = get_driver(PG_DEFAULT_DRIVER)
    manager = driver.connection_manager(server.id)
    conn = manager.connection()
    connected = conn.connected()

    if not connected:
        return make_json_response(
            success=0, errormsg=_("Please connect to the server first..."))

    # Get the utility path from the connection manager
    utility = manager.utility('sql')

    # Get the storage path from preference
    storage_dir = get_storage_directory()

    if 'filename' in data:
        if os.name == 'nt':
            data['filename'] = data['filename'].replace('/', '\\')
            if storage_dir:
                storage_dir = storage_dir.replace('/', '\\')
            data['filename'] = data['filename'].replace('\\', '\\\\')
            data['filename'] = os.path.join(storage_dir,
                                            data['filename'].lstrip('/'))
        else:
            data['filename'] = os.path.join(storage_dir,
                                            data['filename'].lstrip('/'))
    else:
        return make_json_response(data={
            'status': False,
            'info': 'Please specify a valid file'
        })

    cols = None
    icols = None

    if data['icolumns']:
        ignore_cols = data['icolumns']

        # format the ignore column list required as per copy command
        # requirement
        if ignore_cols and len(ignore_cols) > 0:
            for col in ignore_cols:
                if icols:
                    icols += ', '
                else:
                    icols = '('
                icols += driver.qtIdent(conn, col)
            icols += ')'

    # format the column import/export list required as per copy command
    # requirement
    if data['columns']:
        columns = data['columns']
        if columns and len(columns) > 0:
            for col in columns:
                if cols:
                    cols += ', '
                else:
                    cols = '('
                cols += driver.qtIdent(conn, col)
            cols += ')'

    # Create the COPY FROM/TO  from template
    query = render_template('import_export/sql/cmd.sql',
                            conn=conn,
                            data=data,
                            columns=cols,
                            ignore_column_list=icols)

    args = [
        '--host', server.host, '--port',
        str(server.port), '--username', server.username, '--dbname',
        driver.qtIdent(conn, data['database']), '--command', query
    ]

    try:
        p = BatchProcess(desc=Message(sid, data['schema'], data['table'],
                                      data['database'], storage_dir),
                         cmd=utility,
                         args=args)
        manager.export_password_env(p.id)
        p.start()
        jid = p.id
    except Exception as e:
        current_app.logger.exception(e)
        return make_json_response(status=410, success=0, errormsg=str(e))

    # Return response
    return make_json_response(data={'job_id': jid, 'success': 1})
Example #14
0
def create_import_export_job(sid):
    """
    Args:
        sid: Server ID

        Creates a new job for import and export table data functionality

    Returns:
        None
    """
    if request.form:
        # Convert ImmutableDict to dict
        data = dict(request.form)
        data = json.loads(data['data'][0], encoding='utf-8')
    else:
        data = json.loads(request.data, encoding='utf-8')

    # Fetch the server details like hostname, port, roles etc
    server = Server.query.filter_by(id=sid).first()

    if server is None:
        return bad_request(errormsg=_("Could not find the given server"))

    # To fetch MetaData for the server
    from pgadmin.utils.driver import get_driver
    driver = get_driver(PG_DEFAULT_DRIVER)
    manager = driver.connection_manager(server.id)
    conn = manager.connection()
    connected = conn.connected()

    if not connected:
        return bad_request(errormsg=_("Please connect to the server first..."))

    # Get the utility path from the connection manager
    utility = manager.utility('sql')

    # Get the storage path from preference
    storage_dir = get_storage_directory()

    if 'filename' in data:
        try:
            _file = filename_with_file_manager_path(data['filename'],
                                                    data['is_import'])
        except Exception as e:
            return bad_request(errormsg=str(e))

        if not _file:
            return bad_request(errormsg=_('Please specify a valid file'))

        if IS_WIN:
            _file = _file.replace('\\', '/')

        data['filename'] = _file
    else:
        return bad_request(errormsg=_('Please specify a valid file'))

    cols = None
    icols = None

    if data['icolumns']:
        ignore_cols = data['icolumns']

        # format the ignore column list required as per copy command
        # requirement
        if ignore_cols and len(ignore_cols) > 0:
            for col in ignore_cols:
                if icols:
                    icols += ', '
                else:
                    icols = '('
                icols += driver.qtIdent(conn, col)
            icols += ')'

    # format the column import/export list required as per copy command
    # requirement
    if data['columns']:
        columns = data['columns']
        if columns and len(columns) > 0:
            for col in columns:
                if cols:
                    cols += ', '
                else:
                    cols = '('
                cols += driver.qtIdent(conn, col)
            cols += ')'

    # Create the COPY FROM/TO  from template
    query = render_template('import_export/sql/cmd.sql',
                            conn=conn,
                            data=data,
                            columns=cols,
                            ignore_column_list=icols)

    args = ['--command', query]

    try:
        p = BatchProcess(desc=IEMessage(sid, data['schema'], data['table'],
                                        data['database'], storage_dir, utility,
                                        *args),
                         cmd=utility,
                         args=args)
        manager.export_password_env(p.id)

        env = dict()
        env['PGHOST'] = server.host
        env['PGPORT'] = str(server.port)
        env['PGUSER'] = server.username
        env['PGDATABASE'] = data['database']
        p.set_env_variables(server, env=env)
        p.start()
        jid = p.id
    except Exception as e:
        current_app.logger.exception(e)
        return bad_request(errormsg=str(e))

    # Return response
    return make_json_response(data={'job_id': jid, 'success': 1})
Example #15
0
def create_backup_objects_job(sid):
    """
    Args:
        sid: Server ID

        Creates a new job for backup task
        (Backup Database(s)/Schema(s)/Table(s))

    Returns:
        None
    """
    if request.form:
        # Convert ImmutableDict to dict
        data = dict(request.form)
        data = json.loads(data['data'][0], encoding='utf-8')
    else:
        data = json.loads(request.data, encoding='utf-8')

    # Remove ratio from data in case of empty string
    if 'ratio' in data and data['ratio'] == '':
        data.pop("ratio")

    try:
        backup_file = filename_with_file_manager_path(data['file'])
    except Exception as e:
        return bad_request(errormsg=str(e))

    # Fetch the server details like hostname, port, roles etc
    server = Server.query.filter_by(id=sid, user_id=current_user.id).first()

    if server is None:
        return make_json_response(
            success=0, errormsg=_("Could not find the specified server."))

    # To fetch MetaData for the server
    from pgadmin.utils.driver import get_driver
    driver = get_driver(PG_DEFAULT_DRIVER)
    manager = driver.connection_manager(server.id)
    conn = manager.connection()
    connected = conn.connected()

    if not connected:
        return make_json_response(
            success=0, errormsg=_("Please connect to the server first."))

    utility = manager.utility('backup')
    args = [
        '--file', backup_file, '--host',
        manager.local_bind_host if manager.use_ssh_tunnel else server.host,
        '--port',
        str(manager.local_bind_port) if manager.use_ssh_tunnel else str(
            server.port), '--username', server.username, '--no-password'
    ]

    def set_param(key, param):
        if key in data and data[key]:
            args.append(param)

    def set_value(key, param, value):
        if key in data:
            if value:
                if value is True and data[key]:
                    args.append(param)
                    args.append(data[key])
                else:
                    args.append(param)
                    args.append(value)

    set_param('verbose', '--verbose')
    set_param('dqoute', '--quote-all-identifiers')
    set_value('role', '--role', True)
    if data['format'] is not None:
        if data['format'] == 'custom':
            args.extend(['--format=c'])

            set_param('blobs', '--blobs')
            set_value('ratio', '--compress', True)

        elif data['format'] == 'tar':
            args.extend(['--format=t'])

            set_param('blobs', '--blobs')

        elif data['format'] == 'plain':
            args.extend(['--format=p'])
            if 'only_data' in data and data['only_data']:
                args.append('--data-only')
                set_param('disable_trigger', '--disable-triggers')
            else:
                set_param('only_schema', '--schema-only')
                set_param('dns_owner', '--no-owner')
                set_param('include_create_database', '--create')
                set_param('include_drop_database', '--clean')
        elif data['format'] == 'directory':
            args.extend(['--format=d'])

    set_param('pre_data', '--section=pre-data')
    set_param('data', '--section=data')
    set_param('post_data', '--section=post-data')
    set_param('dns_privilege', '--no-privileges')
    set_param('dns_tablespace', '--no-tablespaces')
    set_param('dns_unlogged_tbl_data', '--no-unlogged-table-data')
    set_param('use_insert_commands', '--inserts')
    set_param('use_column_inserts', '--column-inserts')
    set_param('disable_quoting', '--disable-dollar-quoting')
    set_param('with_oids', '--oids')
    set_param('use_set_session_auth', '--use-set-session-authorization')

    set_value('encoding', '--encoding', True)
    set_value('no_of_jobs', '--jobs', True)

    for s in data['schemas']:
        args.extend(['--schema', s])

    for s, t in data['tables']:
        args.extend(['--table', driver.qtIdent(conn, s, t)])

    args.append(data['database'])

    try:
        p = BatchProcess(
            desc=BackupMessage(BACKUP.OBJECT,
                               sid,
                               data['file'].encode('utf-8') if hasattr(
                                   data['file'], 'encode') else data['file'],
                               *args,
                               database=data['database']),
            cmd=utility,
            args=args)
        manager.export_password_env(p.id)
        p.set_env_variables(server)
        p.start()
        jid = p.id
    except Exception as e:
        current_app.logger.exception(e)
        return make_json_response(status=410, success=0, errormsg=str(e))

    # Return response
    return make_json_response(data={'job_id': jid, 'Success': 1})
Example #16
0
def create_import_export_job(sid):
    """
    Args:
        sid: Server ID

        Creates a new job for import and export table data functionality

    Returns:
        None
    """
    if request.form:
        # Convert ImmutableDict to dict
        data = dict(request.form)
        data = json.loads(data['data'][0], encoding='utf-8')
    else:
        data = json.loads(request.data, encoding='utf-8')

    # Fetch the server details like hostname, port, roles etc
    server = Server.query.filter_by(
        id=sid).first()

    if server is None:
        return bad_request(errormsg=_("Could not find the given server"))

    # To fetch MetaData for the server
    from pgadmin.utils.driver import get_driver
    driver = get_driver(PG_DEFAULT_DRIVER)
    manager = driver.connection_manager(server.id)
    conn = manager.connection()
    connected = conn.connected()

    if not connected:
        return bad_request(errormsg=_("Please connect to the server first..."))

    # Get the utility path from the connection manager
    utility = manager.utility('sql')

    # Get the storage path from preference
    storage_dir = get_storage_directory()

    if 'filename' in data:
        try:
            _file = filename_with_file_manager_path(
                data['filename'], data['is_import'])
        except Exception as e:
            return bad_request(errormsg=str(e))

        if not _file:
            return bad_request(errormsg=_('Please specify a valid file'))

        if IS_WIN:
            _file = _file.replace('\\', '/')

        data['filename'] = _file
    else:
        return bad_request(errormsg=_('Please specify a valid file'))

    cols = None
    icols = None

    if data['icolumns']:
        ignore_cols = data['icolumns']

        # format the ignore column list required as per copy command
        # requirement
        if ignore_cols and len(ignore_cols) > 0:
            for col in ignore_cols:
                if icols:
                    icols += ', '
                else:
                    icols = '('
                icols += driver.qtIdent(conn, col)
            icols += ')'

    # format the column import/export list required as per copy command
    # requirement
    if data['columns']:
        columns = data['columns']
        if columns and len(columns) > 0:
            for col in columns:
                if cols:
                    cols += ', '
                else:
                    cols = '('
                cols += driver.qtIdent(conn, col)
            cols += ')'

    # Create the COPY FROM/TO  from template
    query = render_template(
        'import_export/sql/cmd.sql',
        conn=conn,
        data=data,
        columns=cols,
        ignore_column_list=icols
    )

    args = ['--command', query]

    try:
        p = BatchProcess(
            desc=IEMessage(
                sid,
                data['schema'],
                data['table'],
                data['database'],
                storage_dir,
                utility, *args
            ),
            cmd=utility, args=args
        )
        manager.export_password_env(p.id)

        env = dict()
        env['PGHOST'] = server.host
        env['PGPORT'] = str(server.port)
        env['PGUSER'] = server.username
        env['PGDATABASE'] = data['database']
        p.set_env_variables(server, env=env)
        p.start()
        jid = p.id
    except Exception as e:
        current_app.logger.exception(e)
        return bad_request(errormsg=str(e))

    # Return response
    return make_json_response(
        data={'job_id': jid, 'success': 1}
    )
Example #17
0
def create_backup_objects_job(sid):
    """
    Args:
        sid: Server ID

        Creates a new job for backup task
        (Backup Database(s)/Schema(s)/Table(s))

    Returns:
        None
    """
    if request.form:
        data = json.loads(request.form['data'], encoding='utf-8')
    else:
        data = json.loads(request.data, encoding='utf-8')

    backup_obj_type = 'objects'
    if 'type' in data:
        backup_obj_type = data['type']

    try:
        if 'format' in data and data['format'] == 'directory':
            backup_file = filename_with_file_manager_path(data['file'], False)
        else:
            backup_file = filename_with_file_manager_path(data['file'])
    except Exception as e:
        return bad_request(errormsg=str(e))

    # Fetch the server details like hostname, port, roles etc
    server = Server.query.filter_by(
        id=sid, user_id=current_user.id
    ).first()

    if server is None:
        return make_json_response(
            success=0,
            errormsg=_("Could not find the specified server.")
        )

    # To fetch MetaData for the server
    from pgadmin.utils.driver import get_driver
    driver = get_driver(PG_DEFAULT_DRIVER)
    manager = driver.connection_manager(server.id)
    conn = manager.connection()
    connected = conn.connected()

    if not connected:
        return make_json_response(
            success=0,
            errormsg=_("Please connect to the server first.")
        )

    utility = manager.utility('backup') if backup_obj_type == 'objects' \
        else manager.utility('backup_server')

    ret_val = is_utility_exists(utility)
    if ret_val:
        return make_json_response(
            success=0,
            errormsg=ret_val
        )

    args = [
        '--file',
        backup_file,
        '--host',
        manager.local_bind_host if manager.use_ssh_tunnel else server.host,
        '--port',
        str(manager.local_bind_port) if manager.use_ssh_tunnel
        else str(server.port),
        '--username',
        server.username,
        '--no-password'
    ]

    if backup_obj_type != 'objects':
        args.append('--database')
        args.append(server.maintenance_db)

    if backup_obj_type == 'globals':
        args.append('--globals-only')

    def set_param(key, param):
        if key in data and data[key]:
            args.append(param)

    def set_value(key, param, default_value=None):
        if key in data and data[key] is not None and data[key] != '':
            args.append(param)
            args.append(data[key])
        elif default_value is not None:
            args.append(param)
            args.append(default_value)

    set_param('verbose', '--verbose')
    set_param('dqoute', '--quote-all-identifiers')
    set_value('role', '--role')

    if backup_obj_type == 'objects' and \
            'format' in data and data['format'] is not None:
        if data['format'] == 'custom':
            args.extend(['--format=c'])
            set_param('blobs', '--blobs')
            set_value('ratio', '--compress')
        elif data['format'] == 'tar':
            args.extend(['--format=t'])
            set_param('blobs', '--blobs')
        elif data['format'] == 'plain':
            args.extend(['--format=p'])
        elif data['format'] == 'directory':
            args.extend(['--format=d'])

    if 'only_data' in data and data['only_data']:
        set_param('only_data', '--data-only')
        if 'format' in data and data['format'] == 'plain':
            set_param('disable_trigger', '--disable-triggers')
    elif 'only_schema' in data and data['only_schema']:
        set_param('only_schema', '--schema-only')

    set_param('dns_owner', '--no-owner')
    set_param('include_create_database', '--create')
    set_param('include_drop_database', '--clean')
    set_param('pre_data', '--section=pre-data')
    set_param('data', '--section=data')
    set_param('post_data', '--section=post-data')
    set_param('dns_privilege', '--no-privileges')
    set_param('dns_tablespace', '--no-tablespaces')
    set_param('dns_unlogged_tbl_data', '--no-unlogged-table-data')
    set_param('use_insert_commands', '--inserts')
    set_param('use_column_inserts', '--column-inserts')
    set_param('disable_quoting', '--disable-dollar-quoting')
    set_param('with_oids', '--oids')
    set_param('use_set_session_auth', '--use-set-session-authorization')

    if manager.version >= 110000:
        set_param('no_comments', '--no-comments')
        set_param('load_via_partition_root', '--load-via-partition-root')

    set_value('encoding', '--encoding')
    set_value('no_of_jobs', '--jobs')

    if 'schemas' in data:
        for s in data['schemas']:
            args.extend(['--schema', s])

    if 'tables' in data:
        for s, t in data['tables']:
            args.extend([
                '--table', driver.qtIdent(conn, s, t)
            ])

    try:
        if backup_obj_type == 'objects':
            args.append(data['database'])
            p = BatchProcess(
                desc=BackupMessage(
                    BACKUP.OBJECT, sid,
                    data['file'].encode('utf-8') if hasattr(
                        data['file'], 'encode'
                    ) else data['file'],
                    *args,
                    database=data['database']
                ),
                cmd=utility, args=args
            )
        else:
            p = BatchProcess(
                desc=BackupMessage(
                    BACKUP.SERVER if backup_obj_type != 'globals'
                    else BACKUP.GLOBALS,
                    sid,
                    data['file'].encode('utf-8') if hasattr(
                        data['file'], 'encode'
                    ) else data['file'],
                    *args
                ),
                cmd=utility, args=args
            )

        manager.export_password_env(p.id)
        # Check for connection timeout and if it is greater than 0 then
        # set the environment variable PGCONNECT_TIMEOUT.
        if manager.connect_timeout > 0:
            env = dict()
            env['PGCONNECT_TIMEOUT'] = str(manager.connect_timeout)
            p.set_env_variables(server, env=env)
        else:
            p.set_env_variables(server)

        p.start()
        jid = p.id
    except Exception as e:
        current_app.logger.exception(e)
        return make_json_response(
            status=410,
            success=0,
            errormsg=str(e)
        )

    # Return response
    return make_json_response(
        data={'job_id': jid, 'Success': 1}
    )
Example #18
0
def create_restore_job(sid):
    """
    Args:
        sid: Server ID

        Creates a new job for restore task

    Returns:
        None
    """
    if request.form:
        # Convert ImmutableDict to dict
        data = dict(request.form)
        data = json.loads(data['data'][0], encoding='utf-8')
    else:
        data = json.loads(request.data, encoding='utf-8')

    try:
        _file = filename_with_file_manager_path(data['file'])
    except Exception as e:
        return bad_request(errormsg=str(e))

    if _file is None:
        return make_json_response(
            status=410,
            success=0,
            errormsg=_("File could not be found.")
        )

    # Fetch the server details like hostname, port, roles etc
    server = Server.query.filter_by(
        id=sid
    ).first()

    if server is None:
        return make_json_response(
            success=0,
            errormsg=_("Could not find the specified server.")
        )

    # To fetch MetaData for the server
    from pgadmin.utils.driver import get_driver

    driver = get_driver(PG_DEFAULT_DRIVER)
    manager = driver.connection_manager(server.id)
    conn = manager.connection()
    connected = conn.connected()

    if not connected:
        return make_json_response(
            success=0,
            errormsg=_("Please connect to the server first.")
        )

    utility = manager.utility('restore')

    args = []

    if 'list' in data:
        args.append('--list')
    else:
        def set_param(key, param):
            if key in data and data[key]:
                args.append(param)
                return True
            return False

        def set_value(key, param, default_value=None):
            if key in data and data[key] is not None and data[key] != '':
                args.append(param)
                args.append(data[key])
            elif default_value is not None:
                args.append(param)
                args.append(default_value)

        def set_multiple(key, param, with_schema=True):
            if key in data:
                if len(data[key]) > 0:
                    if with_schema:
                        # TODO:// This is temporary
                        # Once object tree is implemented then we will use
                        # list of tuples 'else' part
                        if isinstance(data[key], list):
                            s, t = data[key]
                            args.extend([
                                param,
                                driver.qtIdent(
                                    conn, s
                                ) + '.' + driver.qtIdent(conn, t)
                            ])
                        else:
                            for s, o in data[key]:
                                args.extend([
                                    param,
                                    driver.qtIdent(
                                        conn, s
                                    ) + '.' + driver.qtIdent(conn, o)
                                ])
                    else:
                        for o in data[key]:
                            args.extend([param, o])
                    return True
            return False

        args.extend([
            '--host',
            manager.local_bind_host if manager.use_ssh_tunnel else server.host,
            '--port',
            str(manager.local_bind_port) if manager.use_ssh_tunnel
            else str(server.port),
            '--username', server.username, '--no-password'
        ])

        set_value('role', '--role')
        set_value('database', '--dbname')

        if data['format'] == 'directory':
            args.extend(['--format=d'])

        set_param('pre_data', '--section=pre-data')
        set_param('data', '--section=data')
        set_param('post_data', '--section=post-data')

        if not set_param('only_data', '--data-only'):
            set_param('dns_owner', '--no-owner')
            set_param('dns_privilege', '--no-privileges')
            set_param('dns_tablespace', '--no-tablespaces')

        if not set_param('only_schema', '--schema-only'):
            set_param('disable_trigger', '--disable-triggers')

        set_param('include_create_database', '--create')
        set_param('clean', '--clean')
        set_param('single_transaction', '--single-transaction')
        set_param('no_data_fail_table', '--no-data-for-failed-tables')
        set_param('use_set_session_auth', '--use-set-session-authorization')
        set_param('exit_on_error', '--exit-on-error')

        if manager.version >= 110000:
            set_param('no_comments', '--no-comments')

        set_value('no_of_jobs', '--jobs')
        set_param('verbose', '--verbose')

        set_multiple('schemas', '--schema', False)
        set_multiple('tables', '--table', False)
        set_multiple('functions', '--function', False)
        set_multiple('triggers', '--trigger', False)
        set_multiple('trigger_funcs', '--function', False)
        set_multiple('indexes', '--index', False)

    args.append(fs_short_path(_file))

    try:
        p = BatchProcess(
            desc=RestoreMessage(
                sid,
                data['file'].encode('utf-8') if hasattr(
                    data['file'], 'encode'
                ) else data['file'],
                *args
            ),
            cmd=utility, args=args
        )
        manager.export_password_env(p.id)
        # Check for connection timeout and if it is greater than 0 then
        # set the environment variable PGCONNECT_TIMEOUT.
        if manager.connect_timeout > 0:
            env = dict()
            env['PGCONNECT_TIMEOUT'] = str(manager.connect_timeout)
            p.set_env_variables(server, env=env)
        else:
            p.set_env_variables(server)

        p.start()
        jid = p.id
    except Exception as e:
        current_app.logger.exception(e)
        return make_json_response(
            status=410,
            success=0,
            errormsg=str(e)
        )
    # Return response
    return make_json_response(
        data={'job_id': jid, 'Success': 1}
    )
Example #19
0
def create_import_export_job(sid):
    """
    Args:
        sid: Server ID

        Creates a new job for import and export table data functionality

    Returns:
        None
    """
    if request.form:
        # Convert ImmutableDict to dict
        data = dict(request.form)
        data = json.loads(data['data'][0])
    else:
        data = json.loads(request.data.decode())

    # Fetch the server details like hostname, port, roles etc
    server = Server.query.filter_by(
        id=sid).first()

    if server is None:
        return make_json_response(
            success=0,
            errormsg=_("Couldn't find the given server")
        )

    # To fetch MetaData for the server
    from pgadmin.utils.driver import get_driver
    driver = get_driver(PG_DEFAULT_DRIVER)
    manager = driver.connection_manager(server.id)
    conn = manager.connection()
    connected = conn.connected()

    if not connected:
        return make_json_response(
            success=0,
            errormsg=_("Please connect to the server first...")
        )

    # Get the utility path from the connection manager
    utility = manager.utility('sql')

    # Get the storage path from preference
    storage_dir = get_storage_directory()

    if 'filename' in data:
        if os.name == 'nt':
            data['filename'] = data['filename'].replace('/', '\\')
            if storage_dir:
                storage_dir = storage_dir.replace('/', '\\')
            data['filename'] = data['filename'].replace('\\', '\\\\')
            data['filename'] = os.path.join(storage_dir, data['filename'].lstrip('/'))
        else:
            data['filename'] = os.path.join(storage_dir, data['filename'].lstrip('/'))
    else:
        return make_json_response(
            data={'status': False, 'info': 'Please specify a valid file'}
        )

    cols = None
    icols = None

    if data['icolumns']:
        ignore_cols = data['icolumns']

        # format the ignore column list required as per copy command
        # requirement
        if ignore_cols and len(ignore_cols) > 0:
            for col in ignore_cols:
                if icols:
                    icols += ', '
                else:
                    icols = '('
                icols += driver.qtIdent(conn, col)
            icols += ')'

    # format the column import/export list required as per copy command
    # requirement
    if data['columns']:
        columns = data['columns']
        if columns and len(columns) > 0:
            for col in columns:
                if cols:
                    cols += ', '
                else:
                    cols = '('
                cols += driver.qtIdent(conn, col)
            cols += ')'

    # Create the COPY FROM/TO  from template
    query = render_template(
        'import_export/sql/cmd.sql',
        conn=conn,
        data=data,
        columns=cols,
        ignore_column_list=icols
    )

    args = [
        '--host', server.host, '--port', str(server.port),
        '--username', server.username, '--dbname',
        driver.qtIdent(conn, data['database']),
        '--command', query
    ]

    try:
        p = BatchProcess(
            desc=Message(
                sid,
                data['schema'],
                data['table'],
                data['database'],
                storage_dir
            ),
            cmd=utility, args=args
        )
        manager.export_password_env(p.id)
        p.start()
        jid = p.id
    except Exception as e:
        current_app.logger.exception(e)
        return make_json_response(
            status=410,
            success=0,
            errormsg=str(e)
        )

    # Return response
    return make_json_response(
        data={'job_id': jid, 'success': 1}
    )