예제 #1
0
def check_utility_exists(sid):
    """
    This function checks the utility file exist on the given path.

    Args:
        sid: Server ID
    Returns:
        None
    """
    server = Server.query.filter_by(
        id=sid, user_id=current_user.id
    ).first()

    if server is None:
        return make_json_response(
            success=0,
            errormsg=_("Could not find the specified server.")
        )

    from pgadmin.utils.driver import get_driver
    driver = get_driver(PG_DEFAULT_DRIVER)
    manager = driver.connection_manager(server.id)

    utility = manager.utility('sql')
    ret_val = is_utility_exists(utility)
    if ret_val:
        return make_json_response(
            success=0,
            errormsg=ret_val
        )

    return make_json_response(success=1)
    def setUp(self):
        if 'default_binary_paths' not in self.server or \
            self.server['default_binary_paths'] is None or \
            self.server['type'] not in self.server['default_binary_paths'] or\
                self.server['default_binary_paths'][self.server['type']] == '':
            self.skipTest(
                "default_binary_paths is not set for the server {0}".format(
                    self.server['name']))

        binary_path = os.path.join(
            self.server['default_binary_paths'][self.server['type']], 'psql')
        retVal = is_utility_exists(binary_path)
        if retVal is not None:
            self.skipTest(retVal)
예제 #3
0
def check_binary_path_or_skip_test(cls, utility_name):
    if 'default_binary_paths' not in cls.server or \
        cls.server['default_binary_paths'] is None or \
        cls.server['type'] not in cls.server['default_binary_paths'] or \
            cls.server['default_binary_paths'][cls.server['type']] == '':
        cls.skipTest(
            "default_binary_paths is not set for the server {0}".format(
                cls.server['name']))

        from pgadmin.utils import is_utility_exists
        binary_path = os.path.join(
            cls.server['default_binary_paths'][cls.server['type']],
            utility_name)
        retVal = is_utility_exists(binary_path)
        if retVal is not None:
            cls.skipTest(retVal)
예제 #4
0
def create_maintenance_job(sid, did):
    """
    Args:
        sid: Server ID
        did: Database ID

        Creates a new job for maintenance vacuum operation

    Returns:
        None
    """
    if request.form:
        # Convert ImmutableDict to dict
        data = dict(request.form)
        data = json.loads(data['data'][0], encoding='utf-8')
    else:
        data = json.loads(request.data, encoding='utf-8')

    index_name = None

    if 'primary_key' in data and data['primary_key']:
        index_name = data['primary_key']
    elif 'unique_constraint' in data and data['unique_constraint']:
        index_name = data['unique_constraint']
    elif 'index' in data and data['index']:
        index_name = data['index']

    # Fetch the server details like hostname, port, roles etc
    server = Server.query.filter_by(
        id=sid).first()

    if server is None:
        return make_json_response(
            success=0,
            errormsg=_("Could not find the given server")
        )

    # To fetch MetaData for the server
    driver = get_driver(PG_DEFAULT_DRIVER)
    manager = driver.connection_manager(server.id)
    conn = manager.connection()
    connected = conn.connected()

    if not connected:
        return make_json_response(
            success=0,
            errormsg=_("Please connect to the server first.")
        )

    utility = manager.utility('sql')
    ret_val = is_utility_exists(utility)
    if ret_val:
        return make_json_response(
            success=0,
            errormsg=ret_val
        )

    # Create the command for the vacuum operation
    query = render_template(
        'maintenance/sql/command.sql', conn=conn, data=data,
        index_name=index_name
    )

    args = [
        '--host',
        manager.local_bind_host if manager.use_ssh_tunnel else server.host,
        '--port',
        str(manager.local_bind_port) if manager.use_ssh_tunnel
        else str(server.port),
        '--username', server.username, '--dbname',
        data['database'],
        '--command', query
    ]

    try:
        p = BatchProcess(
            desc=Message(sid, data, query),
            cmd=utility, args=args
        )
        manager.export_password_env(p.id)
        # Check for connection timeout and if it is greater than 0 then
        # set the environment variable PGCONNECT_TIMEOUT.
        if manager.connect_timeout > 0:
            env = dict()
            env['PGCONNECT_TIMEOUT'] = str(manager.connect_timeout)
            p.set_env_variables(server, env=env)
        else:
            p.set_env_variables(server)

        p.start()
        jid = p.id
    except Exception as e:
        current_app.logger.exception(e)
        return make_json_response(
            status=410,
            success=0,
            errormsg=str(e)
        )

    # Return response
    return make_json_response(
        data={'job_id': jid, 'status': True,
              'info': 'Maintenance job created.'}
    )
예제 #5
0
def create_backup_objects_job(sid):
    """
    Args:
        sid: Server ID

        Creates a new job for backup task
        (Backup Database(s)/Schema(s)/Table(s))

    Returns:
        None
    """
    if request.form:
        data = json.loads(request.form['data'], encoding='utf-8')
    else:
        data = json.loads(request.data, encoding='utf-8')

    backup_obj_type = 'objects'
    if 'type' in data:
        backup_obj_type = data['type']

    try:
        if 'format' in data and data['format'] == 'directory':
            backup_file = filename_with_file_manager_path(data['file'], False)
        else:
            backup_file = filename_with_file_manager_path(data['file'])
    except Exception as e:
        return bad_request(errormsg=str(e))

    # Fetch the server details like hostname, port, roles etc
    server = Server.query.filter_by(id=sid, user_id=current_user.id).first()

    if server is None:
        return make_json_response(
            success=0, errormsg=_("Could not find the specified server."))

    # To fetch MetaData for the server
    from pgadmin.utils.driver import get_driver
    driver = get_driver(PG_DEFAULT_DRIVER)
    manager = driver.connection_manager(server.id)
    conn = manager.connection()
    connected = conn.connected()

    if not connected:
        return make_json_response(
            success=0, errormsg=_("Please connect to the server first."))

    utility = manager.utility('backup') if backup_obj_type == 'objects' \
        else manager.utility('backup_server')

    ret_val = is_utility_exists(utility)
    if ret_val:
        return make_json_response(success=0, errormsg=ret_val)

    args = [
        '--file', backup_file, '--host',
        manager.local_bind_host if manager.use_ssh_tunnel else server.host,
        '--port',
        str(manager.local_bind_port) if manager.use_ssh_tunnel else str(
            server.port), '--username', server.username, '--no-password'
    ]

    if backup_obj_type != 'objects':
        args.append('--database')
        args.append(server.maintenance_db)

    if backup_obj_type == 'globals':
        args.append('--globals-only')

    def set_param(key, param):
        if key in data and data[key]:
            args.append(param)

    def set_value(key, param, default_value=None):
        if key in data and data[key] is not None and data[key] != '':
            args.append(param)
            args.append(data[key])
        elif default_value is not None:
            args.append(param)
            args.append(default_value)

    set_param('verbose', '--verbose')
    set_param('dqoute', '--quote-all-identifiers')
    set_value('role', '--role')

    if backup_obj_type == 'objects' and \
            'format' in data and data['format'] is not None:
        if data['format'] == 'custom':
            args.extend(['--format=c'])
            set_param('blobs', '--blobs')
            set_value('ratio', '--compress')
        elif data['format'] == 'tar':
            args.extend(['--format=t'])
            set_param('blobs', '--blobs')
        elif data['format'] == 'plain':
            args.extend(['--format=p'])
        elif data['format'] == 'directory':
            args.extend(['--format=d'])

    if 'only_data' in data and data['only_data']:
        set_param('only_data', '--data-only')
        if 'format' in data and data['format'] == 'plain':
            set_param('disable_trigger', '--disable-triggers')
    elif 'only_schema' in data and data['only_schema']:
        set_param('only_schema', '--schema-only')

    set_param('dns_owner', '--no-owner')
    set_param('include_create_database', '--create')
    set_param('include_drop_database', '--clean')
    set_param('pre_data', '--section=pre-data')
    set_param('data', '--section=data')
    set_param('post_data', '--section=post-data')
    set_param('dns_privilege', '--no-privileges')
    set_param('dns_tablespace', '--no-tablespaces')
    set_param('dns_unlogged_tbl_data', '--no-unlogged-table-data')
    set_param('use_insert_commands', '--inserts')
    set_param('use_column_inserts', '--column-inserts')
    set_param('disable_quoting', '--disable-dollar-quoting')
    set_param('with_oids', '--oids')
    set_param('use_set_session_auth', '--use-set-session-authorization')

    if manager.version >= 110000:
        set_param('no_comments', '--no-comments')
        set_param('load_via_partition_root', '--load-via-partition-root')

    set_value('encoding', '--encoding')
    set_value('no_of_jobs', '--jobs')

    if 'schemas' in data:
        for s in data['schemas']:
            args.extend(['--schema', s])

    if 'tables' in data:
        for s, t in data['tables']:
            args.extend(['--table', driver.qtIdent(conn, s, t)])

    try:
        if backup_obj_type == 'objects':
            args.append(data['database'])
            p = BatchProcess(desc=BackupMessage(
                BACKUP.OBJECT,
                sid,
                data['file'].encode('utf-8') if hasattr(
                    data['file'], 'encode') else data['file'],
                *args,
                database=data['database']),
                             cmd=utility,
                             args=args)
        else:
            p = BatchProcess(desc=BackupMessage(
                BACKUP.SERVER if backup_obj_type != 'globals' else
                BACKUP.GLOBALS, sid, data['file'].encode('utf-8') if hasattr(
                    data['file'], 'encode') else data['file'], *args),
                             cmd=utility,
                             args=args)

        manager.export_password_env(p.id)
        # Check for connection timeout and if it is greater than 0 then
        # set the environment variable PGCONNECT_TIMEOUT.
        if manager.connect_timeout > 0:
            env = dict()
            env['PGCONNECT_TIMEOUT'] = str(manager.connect_timeout)
            p.set_env_variables(server, env=env)
        else:
            p.set_env_variables(server)

        p.start()
        jid = p.id
    except Exception as e:
        current_app.logger.exception(e)
        return make_json_response(status=410, success=0, errormsg=str(e))

    # Return response
    return make_json_response(data={'job_id': jid, 'Success': 1})
예제 #6
0
def create_import_export_job(sid):
    """
    Args:
        sid: Server ID

        Creates a new job for import and export table data functionality

    Returns:
        None
    """
    if request.form:
        # Convert ImmutableDict to dict
        data = dict(request.form)
        data = json.loads(data['data'][0], encoding='utf-8')
    else:
        data = json.loads(request.data, encoding='utf-8')

    # Fetch the server details like hostname, port, roles etc
    server = Server.query.filter_by(id=sid).first()

    if server is None:
        return bad_request(errormsg=_("Could not find the given server"))

    # To fetch MetaData for the server
    from pgadmin.utils.driver import get_driver
    driver = get_driver(PG_DEFAULT_DRIVER)
    manager = driver.connection_manager(server.id)
    conn = manager.connection()
    connected = conn.connected()

    if not connected:
        return bad_request(errormsg=_("Please connect to the server first..."))

    # Get the utility path from the connection manager
    utility = manager.utility('sql')
    ret_val = is_utility_exists(utility)
    if ret_val:
        return make_json_response(success=0, errormsg=ret_val)

    # Get the storage path from preference
    storage_dir = get_storage_directory()

    if 'filename' in data:
        try:
            _file = filename_with_file_manager_path(data['filename'],
                                                    data['is_import'])
        except Exception as e:
            return bad_request(errormsg=str(e))

        if not _file:
            return bad_request(errormsg=_('Please specify a valid file'))

        if IS_WIN:
            _file = _file.replace('\\', '/')

        data['filename'] = _file
    else:
        return bad_request(errormsg=_('Please specify a valid file'))

    cols = None
    icols = None

    if data['icolumns']:
        ignore_cols = data['icolumns']

        # format the ignore column list required as per copy command
        # requirement
        if ignore_cols and len(ignore_cols) > 0:
            for col in ignore_cols:
                if icols:
                    icols += ', '
                else:
                    icols = '('
                icols += driver.qtIdent(conn, col)
            icols += ')'

    # format the column import/export list required as per copy command
    # requirement
    if data['columns']:
        columns = data['columns']
        if columns and len(columns) > 0:
            for col in columns:
                if cols:
                    cols += ', '
                else:
                    cols = '('
                cols += driver.qtIdent(conn, col)
            cols += ')'

    # Create the COPY FROM/TO  from template
    query = render_template('import_export/sql/cmd.sql',
                            conn=conn,
                            data=data,
                            columns=cols,
                            ignore_column_list=icols)

    args = ['--command', query]

    try:
        p = BatchProcess(desc=IEMessage(sid, data['schema'], data['table'],
                                        data['database'], storage_dir, utility,
                                        *args),
                         cmd=utility,
                         args=args)
        manager.export_password_env(p.id)

        env = dict()
        env['PGHOST'] = server.host
        env['PGPORT'] = str(server.port)
        env['PGUSER'] = server.username
        env['PGDATABASE'] = data['database']
        p.set_env_variables(server, env=env)
        p.start()
        jid = p.id
    except Exception as e:
        current_app.logger.exception(e)
        return bad_request(errormsg=str(e))

    # Return response
    return make_json_response(data={'job_id': jid, 'success': 1})
예제 #7
0
def create_restore_job(sid):
    """
    Args:
        sid: Server ID

        Creates a new job for restore task

    Returns:
        None
    """
    if request.form:
        # Convert ImmutableDict to dict
        data = dict(request.form)
        data = json.loads(data['data'][0], encoding='utf-8')
    else:
        data = json.loads(request.data, encoding='utf-8')

    try:
        _file = filename_with_file_manager_path(data['file'])
    except Exception as e:
        return bad_request(errormsg=str(e))

    if _file is None:
        return make_json_response(status=410,
                                  success=0,
                                  errormsg=_("File could not be found."))

    # Fetch the server details like hostname, port, roles etc
    server = Server.query.filter_by(id=sid).first()

    if server is None:
        return make_json_response(
            success=0, errormsg=_("Could not find the specified server."))

    # To fetch MetaData for the server
    from pgadmin.utils.driver import get_driver

    driver = get_driver(PG_DEFAULT_DRIVER)
    manager = driver.connection_manager(server.id)
    conn = manager.connection()
    connected = conn.connected()

    if not connected:
        return make_json_response(
            success=0, errormsg=_("Please connect to the server first."))

    utility = manager.utility('restore')
    ret_val = is_utility_exists(utility)
    if ret_val:
        return make_json_response(success=0, errormsg=ret_val)

    args = []

    if 'list' in data:
        args.append('--list')
    else:

        def set_param(key, param):
            if key in data and data[key]:
                args.append(param)
                return True
            return False

        def set_value(key, param, default_value=None):
            if key in data and data[key] is not None and data[key] != '':
                args.append(param)
                args.append(data[key])
            elif default_value is not None:
                args.append(param)
                args.append(default_value)

        def set_multiple(key, param, with_schema=True):
            if key in data:
                if len(data[key]) > 0:
                    if with_schema:
                        # TODO:// This is temporary
                        # Once object tree is implemented then we will use
                        # list of tuples 'else' part
                        if isinstance(data[key], list):
                            s, t = data[key]
                            args.extend([
                                param,
                                driver.qtIdent(conn, s) + '.' +
                                driver.qtIdent(conn, t)
                            ])
                        else:
                            for s, o in data[key]:
                                args.extend([
                                    param,
                                    driver.qtIdent(conn, s) + '.' +
                                    driver.qtIdent(conn, o)
                                ])
                    else:
                        for o in data[key]:
                            args.extend([param, o])
                    return True
            return False

        args.extend([
            '--host',
            manager.local_bind_host if manager.use_ssh_tunnel else server.host,
            '--port',
            str(manager.local_bind_port) if manager.use_ssh_tunnel else str(
                server.port), '--username', server.username, '--no-password'
        ])

        set_value('role', '--role')
        set_value('database', '--dbname')

        if data['format'] == 'directory':
            args.extend(['--format=d'])

        set_param('pre_data', '--section=pre-data')
        set_param('data', '--section=data')
        set_param('post_data', '--section=post-data')

        if not set_param('only_data', '--data-only'):
            set_param('dns_owner', '--no-owner')
            set_param('dns_privilege', '--no-privileges')
            set_param('dns_tablespace', '--no-tablespaces')

        if not set_param('only_schema', '--schema-only'):
            set_param('disable_trigger', '--disable-triggers')

        set_param('include_create_database', '--create')
        set_param('clean', '--clean')
        set_param('single_transaction', '--single-transaction')
        set_param('no_data_fail_table', '--no-data-for-failed-tables')
        set_param('use_set_session_auth', '--use-set-session-authorization')
        set_param('exit_on_error', '--exit-on-error')

        if manager.version >= 110000:
            set_param('no_comments', '--no-comments')

        set_value('no_of_jobs', '--jobs')
        set_param('verbose', '--verbose')

        set_multiple('schemas', '--schema', False)
        set_multiple('tables', '--table', False)
        set_multiple('functions', '--function', False)
        set_multiple('triggers', '--trigger', False)
        set_multiple('trigger_funcs', '--function', False)
        set_multiple('indexes', '--index', False)

    args.append(fs_short_path(_file))

    try:
        p = BatchProcess(desc=RestoreMessage(
            sid, data['file'].encode('utf-8') if hasattr(
                data['file'], 'encode') else data['file'], *args),
                         cmd=utility,
                         args=args)
        manager.export_password_env(p.id)
        # Check for connection timeout and if it is greater than 0 then
        # set the environment variable PGCONNECT_TIMEOUT.
        if manager.connect_timeout > 0:
            env = dict()
            env['PGCONNECT_TIMEOUT'] = str(manager.connect_timeout)
            p.set_env_variables(server, env=env)
        else:
            p.set_env_variables(server)

        p.start()
        jid = p.id
    except Exception as e:
        current_app.logger.exception(e)
        return make_json_response(status=410, success=0, errormsg=str(e))
    # Return response
    return make_json_response(data={'job_id': jid, 'Success': 1})