def current_storage_dir(self): if config.SERVER_MODE: file = self.bfile try: # check if file name is encoded with UTF-8 file = self.bfile.decode('utf-8') except Exception: # do nothing if bfile is not encoded. pass path = get_complete_file_path(file) path = file if path is None else path if IS_WIN: path = os.path.realpath(path) storage_directory = os.path.basename(get_storage_directory()) if storage_directory in path: start = path.index(storage_directory) end = start + (len(storage_directory)) last_dir = os.path.dirname(path[end:]) else: last_dir = file last_dir = replace_path_for_win(last_dir) return None if hasattr(self, 'is_import') and self.is_import \ else last_dir return None
def filename_with_file_manager_path(_file, create_file=True): """ Args: file: File name returned from client file manager create_file: Set flag to False when file creation doesn't required Returns: Filename to use for backup with full path taken from preference """ # Set file manager directory from preference storage_dir = get_storage_directory() if storage_dir: _file = os.path.join(storage_dir, _file.lstrip(u'/').lstrip(u'\\')) elif not os.path.isabs(_file): _file = os.path.join(document_dir(), _file) if create_file: # Touch the file to get the short path of the file on windows. with open(_file, 'a'): pass short_path = fs_short_path(_file) # fs_short_path() function may return empty path on Windows # if directory doesn't exists. In that case we strip the last path # component and get the short path. if os.name == 'nt' and short_path == '': base_name = os.path.basename(_file) dir_name = os.path.dirname(_file) short_path = fs_short_path(dir_name) + '\\' + base_name return short_path
def filename_with_file_manager_path(_file, _present=False): """ Args: file: File name returned from client file manager Returns: Filename to use for backup with full path taken from preference """ # Set file manager directory from preference storage_dir = get_storage_directory() if storage_dir: _file = os.path.join(storage_dir, _file.lstrip(u'/').lstrip(u'\\')) elif not os.path.isabs(_file): _file = os.path.join(document_dir(), _file) if not _present: # Touch the file to get the short path of the file on windows. with open(_file, 'a'): pass else: if not os.path.isfile(_file): return None return fs_short_path(_file)
def save_file(): """ This function retrieves file_name and data from request. and then save the data to the file """ if request.data: file_data = json.loads(request.data.decode()) # retrieve storage directory path storage_manager_path = get_storage_directory() # generate full path of file file_path = unquote(file_data['file_name']) if storage_manager_path is not None: file_path = os.path.join(storage_manager_path, unquote(file_data['file_name'].lstrip('/'))) file_content = file_data['file_content'] # write to file try: with open(file_path, 'w') as output_file: output_file.write(file_content) except IOError as e: if e.strerror == 'Permission denied': err_msg = "Error: {0}".format(e.strerror) else: err_msg = "Error: {0}".format(e.strerror) return internal_server_error(errormsg=err_msg) except Exception as e: err_msg = "Error: {0}".format(e.strerror) return internal_server_error(errormsg=err_msg) return make_json_response(data={ 'status': True, })
def load_file(): """ This function gets name of file from request data reads the data and sends back in reponse """ if request.data: file_data = json.loads(request.data, encoding='utf-8') file_path = unquote(file_data['file_name']) if hasattr(str, 'decode'): file_path = unquote( file_data['file_name']).encode('utf-8').decode('utf-8') # retrieve storage directory path storage_manager_path = get_storage_directory() if storage_manager_path: # generate full path of file file_path = os.path.join(storage_manager_path, file_path.lstrip('/').lstrip('\\')) (status, err_msg, is_binary, is_startswith_bom, enc) = Filemanager.check_file_for_bom_and_binary(file_path) if not status: return internal_server_error(errormsg=gettext(err_msg)) if is_binary: return internal_server_error( errormsg=gettext("File type not supported")) return Response(read_file_generator(file_path, enc), mimetype='text/plain')
def oidc_login(): auth_obj = AuthSourceManager(None, ['oidc']) print("Logging auth_obj") print(auth_obj) session['_auth_source_manager_obj'] = auth_obj.as_dict() print("added _auth_source_manager_obj to session") oidc_auth_source = get_auth_sources("oidc") print("Logging oidc_auth_source") print(oidc_auth_source) unique_id = "u" + oidc.user_getfield('sub') + "@cyton" display_name = oidc.user_getfield('preferred_username') email = oidc.user_getfield('email') if email is None or email == "None": email = unique_id user = User.query.filter_by(username=unique_id).first() if user is None: res, user = create_user({ 'username': unique_id, 'email': email, 'role': 2, 'active': True, 'is_active': True, 'auth_source': 'oidc' }) print("Logging res and user") print(res) print(user) print("querying for user") user = User.query.filter_by(username=unique_id).first() print("Logging user:"******"loading servers.json for user") storage_dir = get_storage_directory() print("storage_dir") print(storage_dir) system('rm -f ' + storage_dir + '/pgpassfile') system('cp /pgadmin4/pgpass/pgpassfile ' + storage_dir + '/') system('chmod 0600 ' + storage_dir + '/pgpassfile') system('/usr/local/bin/python /pgadmin4/setup.py --load-servers "' + environ.get('PGADMIN_SERVER_JSON_FILE') + '" --user ' + unique_id) return redirect(get_post_login_redirect())
def save_file(): """ This function retrieves file_name and data from request. and then save the data to the file """ if request.data: file_data = json.loads(request.data, encoding='utf-8') # retrieve storage directory path storage_manager_path = get_storage_directory() # generate full path of file file_path = unquote(file_data['file_name']) if hasattr(str, 'decode'): file_path = unquote( file_data['file_name'] ).encode('utf-8').decode('utf-8') try: Filemanager.check_access_permission(storage_manager_path, file_path) except Exception as e: return internal_server_error(errormsg=str(e)) if storage_manager_path is not None: file_path = os.path.join( storage_manager_path, file_path.lstrip('/').lstrip('\\') ) if hasattr(str, 'decode'): file_content = file_data['file_content'] else: file_content = file_data['file_content'].encode() # write to file try: with open(file_path, 'wb+') as output_file: if hasattr(str, 'decode'): output_file.write(file_content.encode('utf-8')) else: output_file.write(file_content) except IOError as e: if e.strerror == 'Permission denied': err_msg = "Error: {0}".format(e.strerror) else: err_msg = "Error: {0}".format(e.strerror) return internal_server_error(errormsg=err_msg) except Exception as e: err_msg = "Error: {0}".format(e.strerror) return internal_server_error(errormsg=err_msg) return make_json_response( data={ 'status': True, } )
def load_file(): """ This function gets name of file from request data reads the data and sends back in reponse """ if request.data: file_data = json.loads(request.data.decode()) # retrieve storage directory path storage_manager_path = get_storage_directory() if storage_manager_path is None: storage_manager_path = "" # generate full path of file file_path = os.path.join( storage_manager_path, unquote(file_data['file_name'].lstrip('/')) ) file_data = None # check if file type is text or binary textchars = bytearray( [7, 8, 9, 10, 12, 13, 27]) + bytearray( range(0x20, 0x7f)) + bytearray(range(0x80, 0x100)) is_binary_string = lambda bytes: bool( bytes.translate(None, textchars) ) # read file try: with open(file_path, 'rb') as fileObj: is_binary = is_binary_string(fileObj.read(1024)) if not is_binary: fileObj.seek(0) file_data = fileObj.read() else: return internal_server_error( errormsg=gettext("File type not supported") ) except IOError as e: # we don't want to expose real path of file # so only show error message. if e.strerror == 'Permission denied': err_msg = "Error: {0}".format(e.strerror) else: err_msg = "Error: {0}".format(e.strerror) return internal_server_error(errormsg=err_msg) except Exception as e: err_msg = "Error: {0}".format(e.strerror) return internal_server_error(errormsg=err_msg) return make_json_response( data={ 'status': True, 'result': file_data, } )
def load_file(): """ This function gets name of file from request data reads the data and sends back in reponse """ if request.data: file_data = json.loads(request.data, encoding='utf-8') file_path = unquote(file_data['file_name']) if hasattr(str, 'decode'): file_path = unquote( file_data['file_name']).encode('utf-8').decode('utf-8') # retrieve storage directory path storage_manager_path = get_storage_directory() if storage_manager_path: # generate full path of file file_path = os.path.join(storage_manager_path, file_path.lstrip('/').lstrip('\\')) file_data = None # check if file type is text or binary textchars = bytearray([7, 8, 9, 10, 12, 13, 27]) + bytearray( range(0x20, 0x7f)) + bytearray(range(0x80, 0x100)) is_binary_string = lambda bytes: bool(bytes.translate(None, textchars)) # read file try: with open(file_path, 'rb') as fileObj: is_binary = is_binary_string(fileObj.read(1024)) if not is_binary: fileObj.seek(0) if hasattr(str, 'decode'): file_data = fileObj.read().decode('utf-8') else: file_data = fileObj.read() else: return internal_server_error( errormsg=gettext("File type not supported")) except IOError as e: # we don't want to expose real path of file # so only show error message. if e.strerror == 'Permission denied': err_msg = "Error: {0}".format(e.strerror) else: err_msg = "Error: {0}".format(e.strerror) return internal_server_error(errormsg=err_msg) except Exception as e: err_msg = "Error: {0}".format(e.strerror) return internal_server_error(errormsg=err_msg) return make_json_response(data={ 'status': True, 'result': file_data, })
def __init__(self, trans_id): self.trans_id = trans_id self.patherror = encode_json({ 'Error': gettext('No permission to operate on specified path.'), 'Code': 0 }) self.dir = get_storage_directory() if self.dir is not None and isinstance(self.dir, list): self.dir = ""
def __init__(self, trans_id): self.trans_id = trans_id self.patherror = encode_json( { 'Error': gettext('No permission to operate on specified path.'), 'Code': -1 } ) self.dir = get_storage_directory() if self.dir is not None and isinstance(self.dir, list): self.dir = ""
def save_file(): """ This function retrieves file_name and data from request. and then save the data to the file """ if request.data: file_data = json.loads(request.data, encoding='utf-8') # retrieve storage directory path storage_manager_path = get_storage_directory() # generate full path of file file_path = unquote(file_data['file_name']) try: Filemanager.check_access_permission(storage_manager_path, file_path) except Exception as e: return internal_server_error(errormsg=str(e)) if storage_manager_path is not None: file_path = os.path.join( storage_manager_path, file_path.lstrip('/').lstrip('\\') ) # Get value for encoding if file is already loaded to SQL editor def get_file_encoding_of_loaded_file(file_name): encoding = 'utf-8' for ele in Filemanager.loaded_file_encoding_list: if file_name in ele: encoding = ele[file_name] return encoding enc = get_file_encoding_of_loaded_file(os.path.basename(file_path)) file_content = file_data['file_content'].encode(enc) # write to file try: with open(file_path, 'wb+') as output_file: output_file.write(file_content) except IOError as e: err_msg = gettext("Error: {0}").format(e.strerror) return internal_server_error(errormsg=err_msg) except Exception as e: err_msg = gettext("Error: {0}").format(e.strerror) return internal_server_error(errormsg=err_msg) return make_json_response( data={ 'status': True, } )
def filename_with_file_manager_path(file): """ Args: file: File name returned from client file manager Returns: Filename to use for backup with full path taken from preference """ # Set file manager directory from preference storage_dir = get_storage_directory() if storage_dir: return os.path.join(storage_dir, file.lstrip('/')) return file
def check_ssl_fields(self, data): """ This function will allow us to check and set defaults for SSL fields Args: data: Response data Returns: Flag and Data """ flag = False if 'sslmode' in data and data['sslmode'] in self.SSL_MODES: flag = True ssl_fields = [ 'sslcert', 'sslkey', 'sslrootcert', 'sslcrl', 'sslcompression' ] # Required SSL fields for SERVER mode from user required_ssl_fields_server_mode = ['sslcert', 'sslkey'] for field in ssl_fields: if field not in data: # In Server mode, # we will set dummy SSL certificate file path which will # prevent using default SSL certificates from web servers if config.SERVER_MODE and \ field in required_ssl_fields_server_mode: # Set file manager directory from preference import os storage_dir = get_storage_directory() file_extn = '.key' if field.endswith('key') else '.crt' dummy_ssl_file = os.path.join( storage_dir, '.postgresql', 'postgresql' + file_extn ) data[field] = dummy_ssl_file # For Desktop mode, we will allow to default else: data[field] = None return flag, data
def filename_with_file_manager_path(_file): """ Args: file: File name returned from client file manager Returns: Filename to use for backup with full path taken from preference """ # Set file manager directory from preference storage_dir = get_storage_directory() if storage_dir: _file = os.path.join(storage_dir, _file.lstrip(u'/').lstrip(u'\\')) elif not os.path.isabs(_file): _file = os.path.join(document_dir(), _file) if not os.path.isfile(_file): return None return fs_short_path(_file)
def filename_with_file_manager_path(_file): """ Args: file: File name returned from client file manager Returns: Filename to use for backup with full path taken from preference """ # Set file manager directory from preference storage_dir = get_storage_directory() if storage_dir: _file = os.path.join(storage_dir, _file.lstrip(u'/').lstrip(u'\\')) elif not os.path.isabs(_file): _file = os.path.join(document_dir(), _file) if not os.path.isfile(_file) and not os.path.exists(_file): return None return fs_short_path(_file)
def save_file(): """ This function retrieves file_name and data from request. and then save the data to the file """ if request.data: file_data = json.loads(request.data.decode()) # retrieve storage directory path storage_manager_path = get_storage_directory() # generate full path of file file_path = unquote(file_data['file_name']) if storage_manager_path is not None: file_path = os.path.join( storage_manager_path, unquote(file_data['file_name'].lstrip('/')) ) file_content = file_data['file_content'] # write to file try: with open(file_path, 'w') as output_file: output_file.write(file_content) except IOError as e: if e.strerror == 'Permission denied': err_msg = "Error: {0}".format(e.strerror) else: err_msg = "Error: {0}".format(e.strerror) return internal_server_error(errormsg=err_msg) except Exception as e: err_msg = "Error: {0}".format(e.strerror) return internal_server_error(errormsg=err_msg) return make_json_response( data={ 'status': True, } )
def load_file(): """ This function gets name of file from request data reads the data and sends back in reponse """ if request.data: file_data = json.loads(request.data, encoding='utf-8') file_path = unquote(file_data['file_name']) if hasattr(str, 'decode'): file_path = unquote( file_data['file_name'] ).encode('utf-8').decode('utf-8') # retrieve storage directory path storage_manager_path = get_storage_directory() if storage_manager_path: # generate full path of file file_path = os.path.join( storage_manager_path, file_path.lstrip('/').lstrip('\\') ) (status, err_msg, is_binary, is_startswith_bom, enc) = Filemanager.check_file_for_bom_and_binary( file_path ) if not status: return internal_server_error( errormsg=gettext(err_msg) ) if is_binary: return internal_server_error( errormsg=gettext("File type not supported") ) return Response(read_file_generator(file_path, enc), mimetype='text/plain')
def filename_with_file_manager_path(_file, create_file=True): """ Args: file: File name returned from client file manager create_file: Set flag to False when file creation doesn't required Returns: Filename to use for backup with full path taken from preference """ # Set file manager directory from preference storage_dir = get_storage_directory() if storage_dir: _file = os.path.join(storage_dir, _file.lstrip(u'/').lstrip(u'\\')) elif not os.path.isabs(_file): _file = os.path.join(document_dir(), _file) if create_file: # Touch the file to get the short path of the file on windows. with open(_file, 'a'): pass return fs_short_path(_file)
def create_new_transaction(params): """ It will also create a unique transaction id and store the information into session variable. Args: capabilities: Allow/Disallow user to perform selection, rename, delete etc. """ # Define configs for dialog types # select file, select folder, create mode Filemanager.suspend_windows_warning() fm_type = params['dialog_type'] storage_dir = get_storage_directory() # It is used in utitlity js to decide to # show or hide select file type options show_volumes = isinstance(storage_dir, list) or not storage_dir supp_types = allow_upload_files = params['supported_types'] \ if 'supported_types' in params else [] if fm_type == 'select_file': capabilities = ['select_file', 'rename', 'upload', 'create'] supp_types = supp_types files_only = True folders_only = False title = "Select File" elif fm_type == 'select_folder': capabilities = ['select_folder', 'rename', 'create'] files_only = False folders_only = True title = "Select Folder" elif fm_type == 'create_file': capabilities = ['select_file', 'rename', 'create'] supp_types = supp_types files_only = True folders_only = False title = "Create File" elif fm_type == 'storage_dialog': capabilities = ['select_folder', 'select_file', 'download', 'rename', 'delete', 'upload', 'create'] supp_types = supp_types files_only = True folders_only = False title = "Storage Manager" # get last visited directory, if not present then traverse in reverse # order to find closest parent directory last_dir = blueprint.last_directory_visited.get() check_dir_exists = False if storage_dir is None: if last_dir is None: last_dir = "/" else: check_dir_exists = True else: if last_dir is not None: check_dir_exists = True else: last_dir = u"/" if check_dir_exists: if len(last_dir) > 1 and \ (last_dir.endswith('/') or last_dir.endswith('\\')): last_dir = last_dir[:-1] while last_dir: if os.path.exists( storage_dir if storage_dir is not None else '' + last_dir): break if _platform == 'win32': index = max(last_dir.rfind('\\'), last_dir.rfind('/')) else: index = last_dir.rfind('/') last_dir = last_dir[0:index] if not last_dir: last_dir = u"/" if _platform == 'win32': if not (last_dir.endswith('\\') or last_dir.endswith('/')): last_dir += u"\\" else: if not last_dir.endswith('/'): last_dir += u"/" # create configs using above configs configs = { # for JS json compatibility "fileroot": last_dir.replace('\\', '\\\\'), "dialog_type": fm_type, "title": title, "upload": { "multiple": True }, "capabilities": capabilities, "security": { "uploadPolicy": "", "uploadRestrictions": allow_upload_files }, "files_only": files_only, "folders_only": folders_only, "supported_types": supp_types, "platform_type": _platform, "show_volumes": show_volumes } # Create a unique id for the transaction trans_id = str(random.randint(1, 9999999)) if 'fileManagerData' not in session: file_manager_data = dict() else: file_manager_data = session['fileManagerData'] file_upload_size = blueprint.get_file_size_preference().get() configs['upload']['fileSizeLimit'] = file_upload_size file_manager_data[trans_id] = configs session['fileManagerData'] = file_manager_data Filemanager.resume_windows_warning() return trans_id
def create_new_transaction(params): """ It will also create a unique transaction id and store the information into session variable. Args: capabilities: Allow/Disallow user to perform selection, rename, delete etc. """ # Define configs for dialog types # select file, select folder, create mode Filemanager.suspend_windows_warning() fm_type = params['dialog_type'] storage_dir = get_storage_directory() # It is used in utitlity js to decide to # show or hide select file type options show_volumes = isinstance(storage_dir, list) or not storage_dir supp_types = allow_upload_files = params['supported_types'] \ if 'supported_types' in params else [] if fm_type == 'select_file': capabilities = ['select_file', 'rename', 'upload', 'create'] supp_types = supp_types files_only = True folders_only = False title = "Select File" elif fm_type == 'select_folder': capabilities = ['select_folder', 'rename', 'create'] files_only = False folders_only = True title = "Select Folder" elif fm_type == 'create_file': capabilities = ['select_file', 'rename', 'create'] supp_types = supp_types files_only = True folders_only = False title = "Create File" elif fm_type == 'storage_dialog': capabilities = [ 'select_folder', 'select_file', 'download', 'rename', 'delete', 'upload', 'create' ] supp_types = supp_types files_only = True folders_only = False title = "Storage Manager" # get last visited directory, if not present then traverse in reverse # order to find closest parent directory last_dir = blueprint.last_directory_visited.get() check_dir_exists = False if storage_dir is None: if last_dir is None: last_dir = "/" else: check_dir_exists = True else: if last_dir is not None: check_dir_exists = True else: last_dir = u"/" if check_dir_exists: if len(last_dir) > 1 and \ (last_dir.endswith('/') or last_dir.endswith('\\')): last_dir = last_dir[:-1] while last_dir: if os.path.exists( storage_dir if storage_dir is not None else '' + last_dir): break if _platform == 'win32': index = max(last_dir.rfind('\\'), last_dir.rfind('/')) else: index = last_dir.rfind('/') last_dir = last_dir[0:index] if not last_dir: last_dir = u"/" if _platform == 'win32': if not (last_dir.endswith('\\') or last_dir.endswith('/')): last_dir += u"\\" else: if not last_dir.endswith('/'): last_dir += u"/" # create configs using above configs configs = { # for JS json compatibility "fileroot": last_dir.replace('\\', '\\\\'), "dialog_type": fm_type, "title": title, "upload": { "multiple": True }, "capabilities": capabilities, "security": { "uploadPolicy": "", "uploadRestrictions": allow_upload_files }, "files_only": files_only, "folders_only": folders_only, "supported_types": supp_types, "platform_type": _platform, "show_volumes": show_volumes } # Create a unique id for the transaction trans_id = str(random.randint(1, 9999999)) if 'fileManagerData' not in session: file_manager_data = dict() else: file_manager_data = session['fileManagerData'] file_upload_size = blueprint.get_file_size_preference().get() configs['upload']['fileSizeLimit'] = file_upload_size file_manager_data[trans_id] = configs session['fileManagerData'] = file_manager_data Filemanager.resume_windows_warning() return trans_id
def create_import_export_job(sid): """ Args: sid: Server ID Creates a new job for import and export table data functionality Returns: None """ if request.form: # Convert ImmutableDict to dict data = dict(request.form) data = json.loads(data['data'][0]) else: data = json.loads(request.data.decode()) # Fetch the server details like hostname, port, roles etc server = Server.query.filter_by(id=sid).first() if server is None: return make_json_response(success=0, errormsg=_("Couldn't find the given server")) # To fetch MetaData for the server from pgadmin.utils.driver import get_driver driver = get_driver(PG_DEFAULT_DRIVER) manager = driver.connection_manager(server.id) conn = manager.connection() connected = conn.connected() if not connected: return make_json_response( success=0, errormsg=_("Please connect to the server first...")) # Get the utility path from the connection manager utility = manager.utility('sql') # Get the storage path from preference storage_dir = get_storage_directory() if 'filename' in data: if os.name == 'nt': data['filename'] = data['filename'].replace('/', '\\') if storage_dir: storage_dir = storage_dir.replace('/', '\\') data['filename'] = data['filename'].replace('\\', '\\\\') data['filename'] = os.path.join(storage_dir, data['filename'].lstrip('/')) else: data['filename'] = os.path.join(storage_dir, data['filename'].lstrip('/')) else: return make_json_response(data={ 'status': False, 'info': 'Please specify a valid file' }) cols = None icols = None if data['icolumns']: ignore_cols = data['icolumns'] # format the ignore column list required as per copy command # requirement if ignore_cols and len(ignore_cols) > 0: for col in ignore_cols: if icols: icols += ', ' else: icols = '(' icols += driver.qtIdent(conn, col) icols += ')' # format the column import/export list required as per copy command # requirement if data['columns']: columns = data['columns'] if columns and len(columns) > 0: for col in columns: if cols: cols += ', ' else: cols = '(' cols += driver.qtIdent(conn, col) cols += ')' # Create the COPY FROM/TO from template query = render_template('import_export/sql/cmd.sql', conn=conn, data=data, columns=cols, ignore_column_list=icols) args = [ '--host', server.host, '--port', str(server.port), '--username', server.username, '--dbname', driver.qtIdent(conn, data['database']), '--command', query ] try: p = BatchProcess(desc=Message(sid, data['schema'], data['table'], data['database'], storage_dir), cmd=utility, args=args) manager.export_password_env(p.id) p.start() jid = p.id except Exception as e: current_app.logger.exception(e) return make_json_response(status=410, success=0, errormsg=str(e)) # Return response return make_json_response(data={'job_id': jid, 'success': 1})
def check_file_for_bom_and_binary(filename, enc="utf-8"): """ This utility function will check if file is Binary file and/or if it startswith BOM character Args: filename: File enc: Encoding for the file Returns: Status(Error?), Error message, Binary file flag, BOM character flag and Encoding to open file """ status = True err_msg = None is_startswith_bom = False is_binary = False # check if file type is text or binary text_chars = bytearray([7, 8, 9, 10, 12, 13, 27]) \ + bytearray(range(0x20, 0x7f)) \ + bytearray(range(0x80, 0x100)) def is_binary_string(bytes_data): """Checks if string data is binary""" return bool(bytes_data.translate(None, text_chars)) # read the file try: with open(filename, 'rb') as f: file_data = f.read(1024) # Check for BOM in file data for encoding, boms in \ ('utf-8-sig', (codecs.BOM_UTF8,)), \ ('utf-16', (codecs.BOM_UTF16_LE, codecs.BOM_UTF16_BE)), \ ('utf-32', (codecs.BOM_UTF32_LE, codecs.BOM_UTF32_BE)): if any(file_data.startswith(bom) for bom in boms): is_startswith_bom = True enc = encoding # Check if string is binary is_binary = is_binary_string(file_data) except IOError as ex: status = False # we don't want to expose real path of file # so only show error message. if ex.strerror == 'Permission denied': err_msg = u"Error: {0}".format(ex.strerror) else: err_msg = u"Error: {0}".format(str(ex)) except Exception as ex: status = False err_msg = u"Error: {0}".format(str(ex)) # Remove root storage path from error message # when running in Server mode if not status and not current_app.PGADMIN_RUNTIME: storage_directory = get_storage_directory() if storage_directory: err_msg = err_msg.replace(storage_directory, '') return status, err_msg, is_binary, is_startswith_bom, enc
def create_import_export_job(sid): """ Args: sid: Server ID Creates a new job for import and export table data functionality Returns: None """ if request.form: data = json.loads(request.form['data'], encoding='utf-8') else: data = json.loads(request.data, encoding='utf-8') # Fetch the server details like hostname, port, roles etc server = Server.query.filter_by(id=sid).first() if server is None: return bad_request(errormsg=_("Could not find the given server")) # To fetch MetaData for the server from pgadmin.utils.driver import get_driver driver = get_driver(PG_DEFAULT_DRIVER) manager = driver.connection_manager(server.id) conn = manager.connection() connected = conn.connected() if not connected: return bad_request(errormsg=_("Please connect to the server first...")) # Get the utility path from the connection manager utility = manager.utility('sql') ret_val = does_utility_exist(utility) if ret_val: return make_json_response(success=0, errormsg=ret_val) # Get the storage path from preference storage_dir = get_storage_directory() if 'filename' in data: try: _file = filename_with_file_manager_path(data['filename'], data['is_import']) except Exception as e: return bad_request(errormsg=str(e)) if not _file: return bad_request(errormsg=_('Please specify a valid file')) if IS_WIN: _file = _file.replace('\\', '/') data['filename'] = _file else: return bad_request(errormsg=_('Please specify a valid file')) # Get required and ignored column list icols = _get_ignored_column_list(data, driver, conn) cols = _get_required_column_list(data, driver, conn) # Create the COPY FROM/TO from template query = render_template('import_export/sql/cmd.sql', conn=conn, data=data, columns=cols, ignore_column_list=icols) args = ['--command', query] try: p = BatchProcess(desc=IEMessage(sid, data['schema'], data['table'], data['database'], storage_dir, utility, *args), cmd=utility, args=args) manager.export_password_env(p.id) env = dict() env['PGHOST'] = server.host env['PGPORT'] = str(server.port) env['PGUSER'] = server.username env['PGDATABASE'] = data['database'] p.set_env_variables(server, env=env) p.start() jid = p.id except Exception as e: current_app.logger.exception(e) return bad_request(errormsg=str(e)) # Return response return make_json_response(data={'job_id': jid, 'success': 1})
def create_new_transaction(params): """ It will also create a unique transaction id and store the information into session variable. Args: capabilities: Allow/Disallow user to perform selection, rename, delete etc. """ # Define configs for dialog types # select file, select folder, create mode fm_type = params['dialog_type'] storage_dir = get_storage_directory() # It is used in utitlity js to decide to # show or hide select file type options show_volumes = isinstance(storage_dir, list) or not storage_dir supp_types = allow_upload_files = params['supported_types'] \ if 'supported_types' in params else [] if fm_type == 'select_file': capabilities = ['select_file', 'rename', 'upload', 'create'] supp_types = supp_types files_only = True folders_only = False title = "Select File" elif fm_type == 'select_folder': capabilities = ['select_folder', 'rename', 'create'] files_only = False folders_only = True title = "Select Folder" elif fm_type == 'create_file': capabilities = ['select_file', 'rename', 'create'] supp_types = supp_types files_only = True folders_only = False title = "Create File" elif fm_type == 'storage_dialog': capabilities = [ 'select_folder', 'select_file', 'download', 'rename', 'delete', 'upload', 'create' ] supp_types = supp_types files_only = True folders_only = False title = "Storage Manager" # create configs using above configs configs = { "fileroot": "/", "dialog_type": fm_type, "title": title, "upload": { "multiple": True }, "capabilities": capabilities, "security": { "uploadPolicy": "", "uploadRestrictions": allow_upload_files }, "files_only": files_only, "folders_only": folders_only, "supported_types": supp_types, "platform_type": _platform, "show_volumes": show_volumes } # Create a unique id for the transaction trans_id = str(random.randint(1, 9999999)) if 'fileManagerData' not in session: file_manager_data = dict() else: file_manager_data = session['fileManagerData'] file_upload_size = blueprint.get_file_size_preference().get() configs['upload']['fileSizeLimit'] = file_upload_size file_manager_data[trans_id] = configs session['fileManagerData'] = file_manager_data return trans_id
def __init__(self, trans_id): self.trans_id = trans_id self.dir = get_storage_directory() if self.dir is not None and isinstance(self.dir, list): self.dir = ""
def create_import_export_job(sid): """ Args: sid: Server ID Creates a new job for import and export table data functionality Returns: None """ if request.form: # Convert ImmutableDict to dict data = dict(request.form) data = json.loads(data['data'][0], encoding='utf-8') else: data = json.loads(request.data, encoding='utf-8') # Fetch the server details like hostname, port, roles etc server = Server.query.filter_by( id=sid).first() if server is None: return bad_request(errormsg=_("Could not find the given server")) # To fetch MetaData for the server from pgadmin.utils.driver import get_driver driver = get_driver(PG_DEFAULT_DRIVER) manager = driver.connection_manager(server.id) conn = manager.connection() connected = conn.connected() if not connected: return bad_request(errormsg=_("Please connect to the server first...")) # Get the utility path from the connection manager utility = manager.utility('sql') # Get the storage path from preference storage_dir = get_storage_directory() if 'filename' in data: try: _file = filename_with_file_manager_path( data['filename'], data['is_import']) except Exception as e: return bad_request(errormsg=str(e)) if not _file: return bad_request(errormsg=_('Please specify a valid file')) if IS_WIN: _file = _file.replace('\\', '/') data['filename'] = _file else: return bad_request(errormsg=_('Please specify a valid file')) cols = None icols = None if data['icolumns']: ignore_cols = data['icolumns'] # format the ignore column list required as per copy command # requirement if ignore_cols and len(ignore_cols) > 0: for col in ignore_cols: if icols: icols += ', ' else: icols = '(' icols += driver.qtIdent(conn, col) icols += ')' # format the column import/export list required as per copy command # requirement if data['columns']: columns = data['columns'] if columns and len(columns) > 0: for col in columns: if cols: cols += ', ' else: cols = '(' cols += driver.qtIdent(conn, col) cols += ')' # Create the COPY FROM/TO from template query = render_template( 'import_export/sql/cmd.sql', conn=conn, data=data, columns=cols, ignore_column_list=icols ) args = ['--command', query] try: p = BatchProcess( desc=IEMessage( sid, data['schema'], data['table'], data['database'], storage_dir, utility, *args ), cmd=utility, args=args ) manager.export_password_env(p.id) env = dict() env['PGHOST'] = server.host env['PGPORT'] = str(server.port) env['PGUSER'] = server.username env['PGDATABASE'] = data['database'] p.set_env_variables(server, env=env) p.start() jid = p.id except Exception as e: current_app.logger.exception(e) return bad_request(errormsg=str(e)) # Return response return make_json_response( data={'job_id': jid, 'success': 1} )
def create_new_transaction(params): """ It will also create a unique transaction id and store the information into session variable. Args: capabilities: Allow/Disallow user to perform selection, rename, delete etc. """ # Define configs for dialog types # select file, select folder, create mode Filemanager.suspend_windows_warning() fm_type = params['dialog_type'] storage_dir = get_storage_directory() # It is used in utitlity js to decide to # show or hide select file type options show_volumes = isinstance(storage_dir, list) or not storage_dir supp_types = allow_upload_files = params.get('supported_types', []) # tuples with (capabilities, files_only, folders_only, title) capability_map = { 'select_file': ( ['select_file', 'rename', 'upload', 'create'], True, False, gettext("Select File") ), 'select_folder': ( ['select_folder', 'rename', 'create'], False, True, gettext("Select Folder") ), 'create_file': ( ['select_file', 'rename', 'create'], True, False, gettext("Create File") ), 'storage_dialog': ( ['select_folder', 'select_file', 'download', 'rename', 'delete', 'upload', 'create'], True, False, gettext("Storage Manager") ), } capabilities, files_only, folders_only, title = capability_map[fm_type] # Using os.path.join to make sure we have trailing '/' or '\' homedir = '/' if (config.SERVER_MODE) \ else os.path.join(os.path.expanduser('~'), '') # get last visited directory, if not present then traverse in reverse # order to find closest parent directory last_dir = blueprint.last_directory_visited.get() check_dir_exists = False if last_dir is None: last_dir = "/" else: check_dir_exists = True if not config.SERVER_MODE and last_dir == "/" or last_dir == "/": last_dir = homedir if check_dir_exists: last_dir = Filemanager.get_closest_parent(storage_dir, last_dir) # create configs using above configs configs = { # for JS json compatibility "fileroot": last_dir.replace('\\', '\\\\'), "homedir": homedir.replace('\\', '\\\\'), "dialog_type": fm_type, "title": title, "upload": { "multiple": True }, "capabilities": capabilities, "security": { "uploadPolicy": "", "uploadRestrictions": allow_upload_files }, "files_only": files_only, "folders_only": folders_only, "supported_types": supp_types, "platform_type": _platform, "show_volumes": show_volumes } # Create a unique id for the transaction trans_id = str(random.randint(1, 9999999)) if 'fileManagerData' not in session: file_manager_data = dict() else: file_manager_data = session['fileManagerData'] file_upload_size = blueprint.get_file_size_preference().get() configs['upload']['fileSizeLimit'] = file_upload_size file_manager_data[trans_id] = configs session['fileManagerData'] = file_manager_data Filemanager.resume_windows_warning() return trans_id
def check_file_for_bom_and_binary(filename, enc="utf-8"): """ This utility function will check if file is Binary file and/or if it startswith BOM character Args: filename: File enc: Encoding for the file Returns: Status(Error?), Error message, Binary file flag, BOM character flag and Encoding to open file """ status = True err_msg = None is_startswith_bom = False is_binary = False # check if file type is text or binary text_chars = bytearray([7, 8, 9, 10, 12, 13, 27]) \ + bytearray(range(0x20, 0x7f)) \ + bytearray(range(0x80, 0x100)) def is_binary_string(bytes_data): """Checks if string data is binary""" return bool( bytes_data.translate(None, text_chars) ) # read the file try: with open(filename, 'rb') as f: file_data = f.read(1024) # Check for BOM in file data for encoding, boms in \ ('utf-8-sig', (codecs.BOM_UTF8,)), \ ('utf-16', (codecs.BOM_UTF16_LE, codecs.BOM_UTF16_BE)), \ ('utf-32', (codecs.BOM_UTF32_LE, codecs.BOM_UTF32_BE)): if any(file_data.startswith(bom) for bom in boms): is_startswith_bom = True enc = encoding # Check if string is binary is_binary = is_binary_string(file_data) except IOError as ex: status = False # we don't want to expose real path of file # so only show error message. if ex.strerror == 'Permission denied': err_msg = u"Error: {0}".format(ex.strerror) else: err_msg = u"Error: {0}".format(str(ex)) except Exception as ex: status = False err_msg = u"Error: {0}".format(str(ex)) # Remove root storage path from error message # when running in Server mode if not status and not current_app.PGADMIN_RUNTIME: storage_directory = get_storage_directory() if storage_directory: err_msg = err_msg.replace(storage_directory, '') return status, err_msg, is_binary, is_startswith_bom, enc
def create_new_transaction(params): """ It will also create a unique transaction id and store the information into session variable. Args: capabilities: Allow/Disallow user to perform selection, rename, delete etc. """ # Define configs for dialog types # select file, select folder, create mode fm_type = params['dialog_type'] storage_dir = get_storage_directory() # It is used in utitlity js to decide to # show or hide select file type options show_volumes = isinstance(storage_dir, list) or not storage_dir supp_types = allow_upload_files = params['supported_types'] \ if 'supported_types' in params else [] if fm_type == 'select_file': capabilities = ['select_file', 'rename', 'upload', 'create'] supp_types = supp_types files_only = True folders_only = False title = "Select File" elif fm_type == 'select_folder': capabilities = ['select_folder', 'rename', 'create'] files_only = False folders_only = True title = "Select Folder" elif fm_type == 'create_file': capabilities = ['select_file', 'rename', 'create'] supp_types = supp_types files_only = True folders_only = False title = "Create File" elif fm_type == 'storage_dialog': capabilities = ['select_folder', 'select_file', 'download', 'rename', 'delete', 'upload', 'create'] supp_types = supp_types files_only = True folders_only = False title = "Storage Manager" # create configs using above configs configs = { "fileroot": "/", "dialog_type": fm_type, "title": title, "upload": { "multiple": True }, "capabilities": capabilities, "security": { "uploadPolicy": "", "uploadRestrictions": allow_upload_files }, "files_only": files_only, "folders_only": folders_only, "supported_types": supp_types, "platform_type": _platform, "show_volumes": show_volumes } # Create a unique id for the transaction trans_id = str(random.randint(1, 9999999)) if 'fileManagerData' not in session: file_manager_data = dict() else: file_manager_data = session['fileManagerData'] file_upload_size = blueprint.get_file_size_preference().get() configs['upload']['fileSizeLimit'] = file_upload_size file_manager_data[trans_id] = configs session['fileManagerData'] = file_manager_data return trans_id
def create_import_export_job(sid): """ Args: sid: Server ID Creates a new job for import and export table data functionality Returns: None """ if request.form: # Convert ImmutableDict to dict data = dict(request.form) data = json.loads(data['data'][0], encoding='utf-8') else: data = json.loads(request.data, encoding='utf-8') # Fetch the server details like hostname, port, roles etc server = Server.query.filter_by(id=sid).first() if server is None: return bad_request(errormsg=_("Could not find the given server")) # To fetch MetaData for the server from pgadmin.utils.driver import get_driver driver = get_driver(PG_DEFAULT_DRIVER) manager = driver.connection_manager(server.id) conn = manager.connection() connected = conn.connected() if not connected: return bad_request(errormsg=_("Please connect to the server first...")) # Get the utility path from the connection manager utility = manager.utility('sql') # Get the storage path from preference storage_dir = get_storage_directory() if 'filename' in data: try: _file = filename_with_file_manager_path(data['filename'], data['is_import']) except Exception as e: return bad_request(errormsg=str(e)) if not _file: return bad_request(errormsg=_('Please specify a valid file')) if IS_WIN: _file = _file.replace('\\', '/') data['filename'] = _file else: return bad_request(errormsg=_('Please specify a valid file')) cols = None icols = None if data['icolumns']: ignore_cols = data['icolumns'] # format the ignore column list required as per copy command # requirement if ignore_cols and len(ignore_cols) > 0: for col in ignore_cols: if icols: icols += ', ' else: icols = '(' icols += driver.qtIdent(conn, col) icols += ')' # format the column import/export list required as per copy command # requirement if data['columns']: columns = data['columns'] if columns and len(columns) > 0: for col in columns: if cols: cols += ', ' else: cols = '(' cols += driver.qtIdent(conn, col) cols += ')' # Create the COPY FROM/TO from template query = render_template('import_export/sql/cmd.sql', conn=conn, data=data, columns=cols, ignore_column_list=icols) args = ['--command', query] try: p = BatchProcess(desc=IEMessage(sid, data['schema'], data['table'], data['database'], storage_dir, utility, *args), cmd=utility, args=args) manager.export_password_env(p.id) def export_pg_env(env): env['PGHOST'] = server.host env['PGPORT'] = str(server.port) env['PGUSER'] = server.username env['PGDATABASE'] = data['database'] p.start(export_pg_env) jid = p.id except Exception as e: current_app.logger.exception(e) return bad_request(errormsg=str(e)) # Return response return make_json_response(data={'job_id': jid, 'success': 1})
def create_import_export_job(sid): """ Args: sid: Server ID Creates a new job for import and export table data functionality Returns: None """ if request.form: # Convert ImmutableDict to dict data = dict(request.form) data = json.loads(data['data'][0]) else: data = json.loads(request.data.decode()) # Fetch the server details like hostname, port, roles etc server = Server.query.filter_by( id=sid).first() if server is None: return make_json_response( success=0, errormsg=_("Couldn't find the given server") ) # To fetch MetaData for the server from pgadmin.utils.driver import get_driver driver = get_driver(PG_DEFAULT_DRIVER) manager = driver.connection_manager(server.id) conn = manager.connection() connected = conn.connected() if not connected: return make_json_response( success=0, errormsg=_("Please connect to the server first...") ) # Get the utility path from the connection manager utility = manager.utility('sql') # Get the storage path from preference storage_dir = get_storage_directory() if 'filename' in data: if os.name == 'nt': data['filename'] = data['filename'].replace('/', '\\') if storage_dir: storage_dir = storage_dir.replace('/', '\\') data['filename'] = data['filename'].replace('\\', '\\\\') data['filename'] = os.path.join(storage_dir, data['filename'].lstrip('/')) else: data['filename'] = os.path.join(storage_dir, data['filename'].lstrip('/')) else: return make_json_response( data={'status': False, 'info': 'Please specify a valid file'} ) cols = None icols = None if data['icolumns']: ignore_cols = data['icolumns'] # format the ignore column list required as per copy command # requirement if ignore_cols and len(ignore_cols) > 0: for col in ignore_cols: if icols: icols += ', ' else: icols = '(' icols += driver.qtIdent(conn, col) icols += ')' # format the column import/export list required as per copy command # requirement if data['columns']: columns = data['columns'] if columns and len(columns) > 0: for col in columns: if cols: cols += ', ' else: cols = '(' cols += driver.qtIdent(conn, col) cols += ')' # Create the COPY FROM/TO from template query = render_template( 'import_export/sql/cmd.sql', conn=conn, data=data, columns=cols, ignore_column_list=icols ) args = [ '--host', server.host, '--port', str(server.port), '--username', server.username, '--dbname', driver.qtIdent(conn, data['database']), '--command', query ] try: p = BatchProcess( desc=Message( sid, data['schema'], data['table'], data['database'], storage_dir ), cmd=utility, args=args ) manager.export_password_env(p.id) p.start() jid = p.id except Exception as e: current_app.logger.exception(e) return make_json_response( status=410, success=0, errormsg=str(e) ) # Return response return make_json_response( data={'job_id': jid, 'success': 1} )