def _parse_robot_data(self, file_path, model): data = {} data[DBJsonSetting.file_name] = path.basename(file_path) data[DBJsonSetting.file_path] = normalise_path(file_path) data[DBJsonSetting.keywords] = self._get_keywords(model) data[DBJsonSetting.variables] = self._get_global_variables(model) lib, res, v_files = self._get_imports( model, path.dirname(normalise_path(file_path)), file_path) data[DBJsonSetting.resources] = res data[DBJsonSetting.libraries] = lib data[DBJsonSetting.variable_files] = v_files return data
def _parse_robot_data(self, file_path, model): data = {} data[DBJsonSetting.file_name] = path.basename(file_path) data[DBJsonSetting.file_path] = normalise_path(file_path) data[DBJsonSetting.keywords] = self._get_keywords(model) data[DBJsonSetting.variables] = self._get_global_variables(model) lib, res, v_files = self._get_imports( model, path.dirname(normalise_path(file_path)), file_path ) data[DBJsonSetting.resources] = res data[DBJsonSetting.libraries] = lib data[DBJsonSetting.variable_files] = v_files return data
def scan(self, workspace, ext, db_path): """Scan and create the database ``workspace`` --root folder where robot data is scanned. ``ext`` --Extension for included files. ``db_path`` --Directory where files are saved""" if not os.path.exists(workspace): raise EnvironmentError('Workspace does not exist: {0}'.format( str(workspace))) if not os.path.dirname(workspace): raise EnvironmentError('Workspace must be folder: {0}'.format( str(workspace))) if not os.path.exists(db_path): os.makedirs(db_path) else: shutil.rmtree(db_path) os.makedirs(db_path) self.add_builtin() if self.xml_libraries: self.add_xml_libraries(self.xml_libraries) for f in finder(workspace, ext): self.queue.add(normalise_path(f), None, None) while True: item = self.get_item() if not item: return else: logging.info('Creating table for: {0}'.format(item[0])) try: data = self.parse_all(item) self.add_to_queue(data) self.put_item_to_db(data, db_path) except ValueError: logging.warning('Error in: %s', item[0]) finally: self.queue.set(item[0])
def create_view(self, new_view, view_db, index_db): """Changes the content of database/view_db/current_view.json ``new_view`` -- Path to the open tab in sublime. ``view_db`` -- Path to folder where current_view.json is. ``index_db`` -- Path in index database folder. When user changes between different robot framework data tabs, this function changes the context of the database/view_db/current_view.json. The current_view.json. is used to provide the completions for the Sublime on_query_completions API call. """ view_path = path.join(view_db, VIEW_FILE_NAME) new_view = normalise_path(new_view) index_table = 'index-{0}'.format(rf_table_name(new_view)) index_table = path.join(index_db, index_table) index_data = self.get_data(index_table) data = {} data[DBJsonSetting.variable] = index_data[DBJsonSetting.variable] data[VIEW_NAME] = new_view data[VIEW_MD5] = hashlib.md5(new_view.encode('utf-8')).hexdigest() data[KW_COMPLETION] = self.get_keyword_completions(index_data) if not path.exists(path.dirname(view_path)): mkdir(path.dirname(view_path)) f = open(view_path, 'w') json_dump(data, f, indent=4) f.close()
def scan(self, workspace, ext, db_path): """Scan and create the database ``workspace`` --root folder where robot data is scanned. ``ext`` --Extension for included files. ``db_path`` --Directory where files are saved""" if not os.path.exists(workspace): raise EnvironmentError( 'Workspace does not exist: {0}'.format(str(workspace))) if not os.path.dirname(workspace): raise EnvironmentError( 'Workspace must be folder: {0}'.format(str(workspace))) if not os.path.exists(db_path): os.makedirs(db_path) else: shutil.rmtree(db_path) os.makedirs(db_path) self.add_builtin() if self.xml_libraries: self.add_xml_libraries(self.xml_libraries) for f in finder(workspace, ext): self.queue.add(normalise_path(f), None, None) while True: item = self.get_item() if not item: return else: logging.info('Creating table for: {0}'.format(item[0])) try: data = self.parse_all(item) self.add_to_queue(data) self.put_item_to_db(data, db_path) except ValueError: logging.warning('Error in: %s', item[0]) finally: self.queue.set(item[0])
def get_table_name_from_index(self, object_name, keyword): """Returns the keyword table name from the index table ``keyword`` -- Keyword documentation to search from database. ``object_name`` -- Library or resource object name. """ return_kw = None return_table_name = None kw_object_name = None KwDetails = collections.namedtuple( 'KwDetails', ['table_name', 'kw', 'kw_object_name']) open_tab = normalise_path(self.open_tab) index_name = get_index_name(rf_table_name(open_tab)) index_data = get_data_from_json(path.join(self.index_dir, index_name)) for keyword_ in index_data[DBJsonSetting.keywords]: kw_canditate = keyword_[0] kw_object_name_alias = keyword_[4] kw_table_name = keyword_[3] kw_object_name = keyword_[2] if (object_name and object_name == kw_object_name_alias or object_name == kw_object_name): # Test kw if object names are equal if kw_equals_kw_candite(keyword, kw_canditate): return_kw = kw_canditate return_table_name = kw_table_name break elif not object_name: if kw_equals_kw_candite(keyword, kw_canditate): return_kw = kw_canditate return_table_name = kw_table_name break return KwDetails(table_name=return_table_name, kw=return_kw, kw_object_name=kw_object_name)
def _format_variable_file(self, setting): data = {} v_path = normalise_path(path.join( path.dirname(self.file_path), setting.name)) args = {} args['variable_file_arguments'] = setting.args data[v_path] = args return data
def _format_variable_file(self, setting): data = {} v_path = normalise_path( path.join(path.dirname(self.file_path), setting.name)) args = {} args['variable_file_arguments'] = setting.args data[v_path] = args return data
def _format_resource(self, setting, file_path): if path.isabs(setting.name): return setting.name else: c_dir = path.dirname(self.file_path) resource_path = normalise_path(path.join(c_dir, setting.name)) if not path.isfile(resource_path): print ('Import failure on file: {0},'.format(file_path), 'could not locate: {0}'.format(setting.name)) return resource_path
def _format_resource(self, setting, file_path): if path.isabs(setting.name): return setting.name else: c_dir = path.dirname(self.file_path) resource_path = normalise_path(path.join(c_dir, setting.name)) if not path.isfile(resource_path): print('Import failure on file: {0},'.format(file_path), 'could not locate: {0}'.format(setting.name)) return resource_path
def is_in_index(self, view_path, index_db): view_path_norm_path = normalise_path(view_path) index_table = 'index-{0:s}'.format(rf_table_name(view_path_norm_path)) try: files = listdir(index_db) except: return False if index_table in files: return True else: return False
def _format_library(self, setting, file_dir): data = {} lib_name = setting.name if lib_name.endswith('.py') and not path.isfile(lib_name): lib_path = path.abspath(path.join(file_dir, lib_name)) lib_name = path.basename(lib_path) elif lib_name.endswith('.py') and path.isfile(lib_name): lib_path = normalise_path(lib_name) lib_name = path.basename(lib_name) else: lib_path = None data[DBJsonSetting.library_name] = lib_name data[DBJsonSetting.library_alias] = setting.alias data[DBJsonSetting.library_arguments] = setting.args data[DBJsonSetting.library_path] = lib_path return data
def parse_variable_file(self, file_path, args=None): if not args: args = [] data = {} data[DBJsonSetting.file_name] = path.basename(file_path) data[DBJsonSetting.file_path] = normalise_path(file_path) self.file_path = file_path setter = VariableFileSetter(self.rf_var_storage) var_list = [] try: variables = setter.set(file_path, args) except DataError: variables = [] for variable in variables: var_list.append(variable[0]) data[DBJsonSetting.variables] = sorted(var_list) return data
def get_table_name_from_index(self, object_name, keyword): """Returns the keyword table name from the index table ``keyword`` -- Keyword documentation to search from database. ``object_name`` -- Library or resource object name. """ return_kw = None return_table_name = None kw_object_name = None KwDetails = collections.namedtuple( 'KwDetails', [ 'table_name', 'kw', 'kw_object_name' ] ) open_tab = normalise_path(self.open_tab) index_name = get_index_name(rf_table_name(open_tab)) index_data = get_data_from_json( path.join(self.index_dir, index_name) ) for keyword_ in index_data[DBJsonSetting.keywords]: kw_canditate = keyword_[0] kw_object_name_alias = keyword_[4] kw_table_name = keyword_[3] kw_object_name = keyword_[2] if (object_name and object_name == kw_object_name_alias or object_name == kw_object_name): # Test kw if object names are equal if kw_equals_kw_candite(keyword, kw_canditate): return_kw = kw_canditate return_table_name = kw_table_name break elif not object_name: if kw_equals_kw_candite(keyword, kw_canditate): return_kw = kw_canditate return_table_name = kw_table_name break return KwDetails( table_name=return_table_name, kw=return_kw, kw_object_name=kw_object_name )
def get_table_name_from_index(self, object_name, keyword): """Returns the keyword table name from the index table ``keyword`` -- Keyword documentation to search from database. ``object_name`` -- Library or resource object name. """ open_tab = normalise_path(self.open_tab) index_name = get_index_name(rf_table_name(open_tab)) index_data = get_data_from_json(path.join(self.index_dir, index_name)) for keyword_ in index_data[DBJsonSetting.keyword]: kw = keyword_[0] kw_object_name = keyword_[2] kw_table_name = keyword_[3] if object_name and object_name == kw_object_name: if kw_equals_kw_candite(keyword, kw): return kw_table_name elif not object_name: if kw_equals_kw_candite(keyword, kw): return kw_table_name
def get_table_name_from_index(self, object_name, keyword): """Returns the keyword table name from the index table ``keyword`` -- Keyword documentation to search from database. ``object_name`` -- Library or resource object name. """ open_tab = normalise_path(self.open_tab) index_name = get_index_name(rf_table_name(open_tab)) index_data = get_data_from_json( path.join(self.index_dir, index_name) ) for keyword_ in index_data[DBJsonSetting.keyword]: kw = keyword_[0] kw_object_name = keyword_[2] kw_table_name = keyword_[3] if object_name and object_name == kw_object_name: if kw_equals_kw_candite(keyword, kw): return kw_table_name elif not object_name: if kw_equals_kw_candite(keyword, kw): return kw_table_name
def parse_library(self, library, args=None): """Parses RF library to dictionary Uses internally libdoc modules to parse the library. Possible arguments to the library are provided in the args parameter. """ data = {} if not args: data[DBJsonSetting.arguments] = [] else: arg_list = [] for arg in args: arg_list.append(arg) data[DBJsonSetting.arguments] = arg_list if path.isfile(library): data[DBJsonSetting.file_path] = normalise_path(library) if library.endswith('.xml'): library_module, keywords = self._parse_xml_doc(library) data[DBJsonSetting.keywords] = keywords data[DBJsonSetting.library_module] = library_module elif library.endswith('.py'): data[DBJsonSetting.file_name] = path.basename(library) data[DBJsonSetting.library_module] = path.splitext( data[DBJsonSetting.file_name])[0] data[DBJsonSetting.keywords] = self._parse_python_lib( library, data[DBJsonSetting.arguments]) else: raise ValueError('Unknown library') else: data[DBJsonSetting.library_module] = library data[DBJsonSetting.keywords] = self._parse_python_lib( library, data[DBJsonSetting.arguments]) if data[DBJsonSetting.keywords] is None: raise ValueError('Library did not contain keywords') else: data[DBJsonSetting.table_type] = DBJsonSetting.library return data
def parse_library(self, library, args=None): """Parses RF library to dictionary Uses internally libdoc modules to parse the library. Possible arguments to the library are provided in the args parameter. """ data = {} if not args: data[DBJsonSetting.arguments] = [] else: arg_list = [] for arg in args: arg_list.append(arg) data[DBJsonSetting.arguments] = arg_list if path.isfile(library): data[DBJsonSetting.file_path] = normalise_path(library) if library.endswith('.xml'): library_module, keywords = self._parse_xml_doc(library) data[DBJsonSetting.keywords] = keywords data[DBJsonSetting.library_module] = library_module elif library.endswith('.py'): data[DBJsonSetting.file_name] = path.basename(library) data[DBJsonSetting.library_module] = path.splitext( data[DBJsonSetting.file_name])[0] data[DBJsonSetting.keywords] = self._parse_python_lib( library, data[DBJsonSetting.arguments]) else: raise ValueError('Unknown library') else: data[DBJsonSetting.library_module] = library data[DBJsonSetting.keywords] = self._parse_python_lib( library, data[DBJsonSetting.arguments]) if data[DBJsonSetting.keywords] is None: raise ValueError('Library did not contain keywords') else: return data
def get_table_name_from_index(self, object_name, keyword): """Returns the keyword table name from the index table ``keyword`` -- Keyword documentation to search from database. ``object_name`` -- Library or resource object name. """ open_tab = normalise_path(self.open_tab) index_name = get_index_name(rf_table_name(open_tab)) index_data = get_data_from_json( path.join(self.index_dir, index_name) ) for keyword_ in index_data[DBJsonSetting.keyword]: kw = keyword_[0] # This leaves bug in code if the there class name imported like: # com.company.object.robot In this case robot is stripped from # class name without actually checking should it be. kw_object_name = keyword_[2].rstrip('.' + self.rf_extension) kw_table_name = keyword_[3] if object_name and object_name == kw_object_name: if kw_equals_kw_candite(keyword, kw): return kw_table_name elif not object_name: if kw_equals_kw_candite(keyword, kw): return kw_table_name
def get_function_file(self, kw_class): file_ = inspect.getsourcefile(kw_class) if file_ and path.exists(file_): return normalise_path(file_) else: return None