def test_string2date(self): """ Test the string2date function with different formats. """ simple_time_string = "03-03-1999" simple_date = string2date(simple_time_string, complex_format=False) self.assertEqual( simple_date, datetime.datetime(1999, 3, 3), "Did not get expected datetime from conversion object.") complex_time_string = "1999-03-16,18-20-33.1" complex_date = string2date(complex_time_string) self.assertEqual( complex_date, datetime.datetime(1999, 3, 16, 18, 20, 33, 100000), "Did not get expected datetime from conversion object.") complex_time_stringv1 = "1999-03-16,18-20-33" complexv1_date = string2date(complex_time_stringv1) self.assertEqual( complexv1_date, datetime.datetime(1999, 3, 16, 18, 20, 33), "Did not get expected datetime from conversion object.") custom_format = "%Y" custom_time_string = "1999" custom_date = string2date(custom_time_string, date_format=custom_format) self.assertEqual( custom_date, datetime.datetime(1999, 1, 1), "Did not get expected datetime from conversion object.")
def from_dict(self, dictionary): self.name = dictionary['name'] self.status = dictionary['status'] self.error_message = dictionary['error_message'] self.start_time = string2date(dictionary['start_time']) self.finish_time = string2date(dictionary['finish_time']) self.workflows_number = int(dictionary['workflows_number']) self.datatypes_number = int(dictionary['datatypes_number']) self._simulator_configuration = dictionary['_simulator_configuration']
def load_into(self, burst_config): burst_config.name = self.name.load() burst_config.status = self.status.load() burst_config.error_message = self.error_message.load() burst_config.start_time = string2date(self.start_time.load()) burst_config.finish_time = string2date(self.finish_time.load()) burst_config.simulator_gid = self.simulator.load().hex try: burst_config.range1 = self.range1.load() except MissingDataSetException: burst_config.range1 = None try: burst_config.range2 = self.range2.load() except MissingDataSetException: burst_config.range2 = None
def load(self, gid=None, fname=None): # type: (typing.Union[uuid.UUID, str], str) -> ViewModel """ Load a ViewModel object by reading the H5 file with the given GID, from the directory self.base_dir """ if fname is None: if gid is None: raise ValueError("Neither gid nor filename is provided to load!") fname = self.find_file_by_gid(gid) else: fname = os.path.join(self.base_dir, fname) view_model_class = H5File.determine_type(fname) view_model = view_model_class() with ViewModelH5(fname, view_model) as h5_file: h5_file.load_into(view_model) references = h5_file.gather_references() view_model.create_date = string2date(h5_file.create_date.load()) view_model.generic_attributes = h5_file.load_generic_attributes() for trait_attr, gid in references: if not gid: continue if isinstance(gid, list): loaded_ref = [] for idx, sub_gid in enumerate(gid): ref = self.load(sub_gid) loaded_ref.append(ref) else: loaded_ref = self.load(gid) setattr(view_model, trait_attr.field_name, loaded_ref) return view_model
def getfiltereddatatypes(self, name, parent_div, tree_session_key, filters): """ Given the name from the input tree, the dataType required and a number of filters, return the available dataType that satisfy the conditions imposed. """ previous_tree = self.context.get_session_tree_for_key(tree_session_key) if previous_tree is None: base.set_error_message( "Adapter Interface not in session for filtering!") raise cherrypy.HTTPRedirect("/tvb?error=True") current_node = self._get_node(previous_tree, name) if current_node is None: raise Exception("Could not find node :" + name) datatype = current_node[ABCAdapter.KEY_DATATYPE] filters = json.loads(filters) availablefilter = json.loads( FilterChain.get_filters_for_type(datatype)) for i, filter_ in enumerate(filters[FILTER_FIELDS]): #Check for filter input of type 'date' as these need to be converted if filter_ in availablefilter and availablefilter[filter_][ FILTER_TYPE] == 'date': try: filter_ = string2date(filter_, False) filters[FILTER_VALUES][i] = filter_ except ValueError, excep: raise excep
def _deserialize_value(value): """ This method takes value loaded from H5 file and transform it to TVB data. :param value: the value that was read from the H5 file :returns: a TVB specific deserialized value of the input NOTE: this method was a part of TVB 1.0 hdf5storage manager, but since this script needs to be independent of current storage manager, we duplicate it here. """ if value is not None: if isinstance(value, numpy.string_): if len(value) == 0: value = None else: value = str(value) if isinstance(value, str): if value.startswith(BOOL_VALUE_PREFIX): # Remove bool prefix and transform to bool return string2bool(value[len(BOOL_VALUE_PREFIX):]) if value.startswith(DATETIME_VALUE_PREFIX): # Remove datetime prefix and transform to datetime return string2date(value[len(DATETIME_VALUE_PREFIX):], date_format=DATE_TIME_FORMAT) return value
def load_view_model_from_file(filepath): # type: (str) -> ViewModel """ Load a ViewModel object by reading the H5 file specified by filepath. """ base_dir = os.path.dirname(filepath) view_model_class = H5File.determine_type(filepath) view_model = view_model_class() with ViewModelH5(filepath, view_model) as h5_file: h5_file.load_into(view_model) references = h5_file.gather_references() view_model.create_date = string2date(h5_file.create_date.load()) view_model.generic_attributes = h5_file.load_generic_attributes() for trait_attr, gid in references: if not gid: continue if isinstance(gid, list): loaded_ref = [] for idx, sub_gid in enumerate(gid): ref = load_view_model(sub_gid, base_dir) loaded_ref.append(ref) else: loaded_ref = load_view_model(gid, base_dir) setattr(view_model, trait_attr.field_name, loaded_ref) return view_model
def get_burst_for_migration(burst_id, burst_match_dict, date_format, selected_db): """ This method is supposed to only be used when migrating from version 4 to version 5. It finds a BurstConfig in the old format (when it did not inherit from HasTraitsIndex), deletes it and returns its parameters. """ session = SA_SESSIONMAKER() burst_params = session.execute("""SELECT * FROM "BURST_CONFIGURATION" WHERE id = """ + burst_id).fetchone() session.close() if burst_params is None: return None, False burst_params_dict = {'datatypes_number': burst_params['datatypes_number'], 'dynamic_ids': burst_params['dynamic_ids'], 'range_1': burst_params['range1'], 'range_2': burst_params['range2'], 'fk_project': burst_params['fk_project'], 'name': burst_params['name'], 'status': burst_params['status'], 'error_message': burst_params['error_message'], 'start_time': burst_params['start_time'], 'finish_time': burst_params['finish_time'], 'fk_simulation': burst_params['fk_simulation'], 'fk_operation_group': burst_params['fk_operation_group'], 'fk_metric_operation_group': burst_params['fk_metric_operation_group']} if selected_db == 'sqlite': burst_params_dict['start_time'] = string2date(burst_params_dict['start_time'], date_format=date_format) burst_params_dict['finish_time'] = string2date(burst_params_dict['finish_time'], date_format=date_format) if burst_id not in burst_match_dict: burst_config = BurstConfiguration(burst_params_dict['fk_project']) burst_config.datatypes_number = burst_params_dict['datatypes_number'] burst_config.dynamic_ids = burst_params_dict['dynamic_ids'] burst_config.error_message = burst_params_dict['error_message'] burst_config.finish_time = burst_params_dict['finish_time'] burst_config.fk_metric_operation_group = burst_params_dict['fk_metric_operation_group'] burst_config.fk_operation_group = burst_params_dict['fk_operation_group'] burst_config.fk_project = burst_params_dict['fk_project'] burst_config.fk_simulation = burst_params_dict['fk_simulation'] burst_config.name = burst_params_dict['name'] burst_config.range1 = burst_params_dict['range_1'] burst_config.range2 = burst_params_dict['range_2'] burst_config.start_time = burst_params_dict['start_time'] burst_config.status = burst_params_dict['status'] new_burst = True else: burst_config = dao.get_burst_by_id(burst_match_dict[burst_id]) new_burst = False return burst_config, new_burst
def getfiltereddatatypes(self, name, parent_div, tree_session_key, filters): """ Given the name from the input tree, the dataType required and a number of filters, return the available dataType that satisfy the conditions imposed. """ previous_tree = self.context.get_session_tree_for_key(tree_session_key) if previous_tree is None: common.set_error_message("Adapter Interface not in session for filtering!") raise cherrypy.HTTPRedirect("/tvb?error=True") current_node = self._get_node(previous_tree, name) if current_node is None: raise Exception("Could not find node :" + name) datatype = current_node[ABCAdapter.KEY_DATATYPE] filters = json.loads(filters) availablefilter = json.loads(FilterChain.get_filters_for_type(datatype)) for i, filter_ in enumerate(filters[FILTER_FIELDS]): #Check for filter input of type 'date' as these need to be converted if filter_ in availablefilter and availablefilter[filter_][FILTER_TYPE] == 'date': try: temp_date = string2date(filters[FILTER_VALUES][i], False) filters[FILTER_VALUES][i] = temp_date except ValueError: raise #In order for the filter object not to "stack up" on multiple calls to #this method, create a deepCopy to work with if ABCAdapter.KEY_CONDITION in current_node: new_filter = copy.deepcopy(current_node[ABCAdapter.KEY_CONDITION]) else: new_filter = FilterChain() new_filter.fields.extend(filters[FILTER_FIELDS]) new_filter.operations.extend(filters[FILTER_OPERATIONS]) new_filter.values.extend(filters[FILTER_VALUES]) #Get dataTypes that match the filters from DB then populate with values values, total_count = InputTreeManager().populate_option_values_for_dtype( common.get_current_project().id, datatype, new_filter, self.context.get_current_step() ) #Create a dictionary that matches what the template expects parameters = {ABCAdapter.KEY_NAME: name, ABCAdapter.KEY_FILTERABLE: availablefilter, ABCAdapter.KEY_TYPE: ABCAdapter.TYPE_SELECT, ABCAdapter.KEY_OPTIONS: values, ABCAdapter.KEY_DATATYPE: datatype} if total_count > MAXIMUM_DATA_TYPES_DISPLAYED: parameters[KEY_WARNING] = WARNING_OVERFLOW if ABCAdapter.KEY_REQUIRED in current_node: parameters[ABCAdapter.KEY_REQUIRED] = current_node[ABCAdapter.KEY_REQUIRED] if len(values) > 0 and string2bool(str(parameters[ABCAdapter.KEY_REQUIRED])): parameters[ABCAdapter.KEY_DEFAULT] = str(values[-1][ABCAdapter.KEY_VALUE]) previous_selected = self.context.get_current_default(name) if previous_selected in [str(vv['value']) for vv in values]: parameters[ABCAdapter.KEY_DEFAULT] = previous_selected template_specification = {"inputRow": parameters, "disabled": False, "parentDivId": parent_div, common.KEY_SESSION_TREE: tree_session_key} return self.fill_default_attributes(template_specification)
def getfiltereddatatypes(self, name, parent_div, tree_session_key, filters): """ Given the name from the input tree, the dataType required and a number of filters, return the available dataType that satisfy the conditions imposed. """ previous_tree = self.context.get_session_tree_for_key(tree_session_key) if previous_tree is None: common.set_error_message("Adapter Interface not in session for filtering!") raise cherrypy.HTTPRedirect("/tvb?error=True") current_node = self._get_node(previous_tree, name) if current_node is None: raise Exception("Could not find node :" + name) datatype = current_node[ABCAdapter.KEY_DATATYPE] filters = json.loads(filters) availablefilter = json.loads(FilterChain.get_filters_for_type(datatype)) for i, filter_ in enumerate(filters[FILTER_FIELDS]): # Check for filter input of type 'date' as these need to be converted if filter_ in availablefilter and availablefilter[filter_][FILTER_TYPE] == 'date': try: temp_date = string2date(filters[FILTER_VALUES][i], False) filters[FILTER_VALUES][i] = temp_date except ValueError: raise # In order for the filter object not to "stack up" on multiple calls to # this method, create a deepCopy to work with if ABCAdapter.KEY_CONDITION in current_node: new_filter = copy.deepcopy(current_node[ABCAdapter.KEY_CONDITION]) else: new_filter = FilterChain() new_filter.fields.extend(filters[FILTER_FIELDS]) new_filter.operations.extend(filters[FILTER_OPERATIONS]) new_filter.values.extend(filters[FILTER_VALUES]) # Get dataTypes that match the filters from DB then populate with values values, total_count = InputTreeManager().populate_option_values_for_dtype( common.get_current_project().id, datatype, new_filter, self.context.get_current_step()) # Create a dictionary that matches what the template expects parameters = {ABCAdapter.KEY_NAME: name, ABCAdapter.KEY_FILTERABLE: availablefilter, ABCAdapter.KEY_TYPE: ABCAdapter.TYPE_SELECT, ABCAdapter.KEY_OPTIONS: values, ABCAdapter.KEY_DATATYPE: datatype} if total_count > MAXIMUM_DATA_TYPES_DISPLAYED: parameters[KEY_WARNING] = WARNING_OVERFLOW if ABCAdapter.KEY_REQUIRED in current_node: parameters[ABCAdapter.KEY_REQUIRED] = current_node[ABCAdapter.KEY_REQUIRED] if len(values) > 0 and string2bool(str(parameters[ABCAdapter.KEY_REQUIRED])): parameters[ABCAdapter.KEY_DEFAULT] = str(values[-1][ABCAdapter.KEY_VALUE]) previous_selected = self.context.get_current_default(name) if previous_selected in [str(vv['value']) for vv in values]: parameters[ABCAdapter.KEY_DEFAULT] = previous_selected template_specification = {"inputRow": parameters, "disabled": False, "parentDivId": parent_div, common.KEY_SESSION_TREE: tree_session_key} return self.fill_default_attributes(template_specification)
def load_into(self, burst_config): # type (BurstConfiguration) -> None burst_config.gid = self.gid.load().hex burst_config.name = self.name.load() burst_config.status = self.status.load() burst_config.error_message = self.error_message.load() burst_config.start_time = string2date(self.start_time.load()) finish_time = self.finish_time.load() if finish_time and finish_time != 'None': burst_config.finish_time = string2date(finish_time) burst_config.simulator_gid = self.simulator.load().hex try: burst_config.range1 = self.range1.load() except MissingDataSetException: burst_config.range1 = None try: burst_config.range2 = self.range2.load() except MissingDataSetException: burst_config.range2 = None
def test_string2date(self): """ Test the string2date function with different formats. """ simple_time_string = "03-03-1999" simple_date = string2date(simple_time_string, complex_format=False) self.assertEqual(simple_date, datetime.datetime(1999, 3, 3), "Did not get expected datetime from conversion object.") complex_time_string = "1999-03-16,18-20-33.1" complex_date = string2date(complex_time_string) self.assertEqual(complex_date, datetime.datetime(1999, 3, 16, 18, 20, 33, 100000), "Did not get expected datetime from conversion object.") complex_time_stringv1 = "1999-03-16,18-20-33" complexv1_date = string2date(complex_time_stringv1) self.assertEqual(complexv1_date, datetime.datetime(1999, 3, 16, 18, 20, 33), "Did not get expected datetime from conversion object.") custom_format = "%Y" custom_time_string = "1999" custom_date = string2date(custom_time_string, date_format=custom_format) self.assertEqual(custom_date, datetime.datetime(1999, 1, 1), "Did not get expected datetime from conversion object.")
def load_generic_attributes(self): # type: () -> GenericAttributes self.generic_attributes.invalid = self.invalid.load() self.generic_attributes.is_nan = self.is_nan.load() self.generic_attributes.subject = self.subject.load() self.generic_attributes.state = self.state.load() self.generic_attributes.type = self.type.load() self.generic_attributes.user_tag_1 = self.user_tag_1.load() self.generic_attributes.user_tag_2 = self.user_tag_2.load() self.generic_attributes.user_tag_3 = self.user_tag_3.load() self.generic_attributes.user_tag_4 = self.user_tag_4.load() self.generic_attributes.user_tag_5 = self.user_tag_5.load() self.generic_attributes.visible = self.visible.load() self.generic_attributes.create_date = string2date(str(self.create_date.load())) or None return self.generic_attributes
def _load(self, file, view_model): file.load_into(view_model) references = file.gather_references() view_model.create_date = string2date(file.create_date.load()) view_model.generic_attributes = file.load_generic_attributes() for trait_attr, gid in references: if not gid: continue if isinstance(gid, list): loaded_ref = [] for idx, sub_gid in enumerate(gid): ref = self.load(sub_gid) loaded_ref.append(ref) else: loaded_ref = self.load(gid) setattr(view_model, trait_attr.field_name, loaded_ref)
def _deserialize_value(self, value): """ This method takes value loaded from H5 file and transform it to TVB data. """ if value is not None: if isinstance(value, numpy.string_): if len(value) == 0: value = None else: value = str(value) if isinstance(value, str): if value.startswith(self.BOOL_VALUE_PREFIX): # Remove bool prefix and transform to bool return utils.string2bool(value[len(self.BOOL_VALUE_PREFIX):]) if value.startswith(self.DATETIME_VALUE_PREFIX): # Remove datetime prefix and transform to datetime return utils.string2date(value[len(self.DATETIME_VALUE_PREFIX):], date_format=self.DATE_TIME_FORMAT) return value
def load_generic_attributes(self): # type: () -> GenericAttributes self.generic_attributes.invalid = self.invalid.load() self.generic_attributes.is_nan = self.is_nan.load() self.generic_attributes.subject = self.subject.load() self.generic_attributes.state = self.state.load() self.generic_attributes.user_tag_1 = self.user_tag_1.load() self.generic_attributes.user_tag_2 = self.user_tag_2.load() self.generic_attributes.user_tag_3 = self.user_tag_3.load() self.generic_attributes.user_tag_4 = self.user_tag_4.load() self.generic_attributes.user_tag_5 = self.user_tag_5.load() self.generic_attributes.visible = self.visible.load() self.generic_attributes.create_date = string2date(str(self.create_date.load())) or None try: self.generic_attributes.operation_tag = self.operation_tag.load() except MissingDataSetException: self.generic_attributes.operation_tag = None try: burst = self.parent_burst.load() self.generic_attributes.parent_burst = burst.hex if burst is not None else None except MissingDataSetException: self.generic_attributes.parent_burst = None return self.generic_attributes
def getfiltereddatatypes(self, name, parent_div, tree_session_key, filters): """ Given the name from the input tree, the dataType required and a number of filters, return the available dataType that satisfy the conditions imposed. """ previous_tree = self.context.get_session_tree_for_key(tree_session_key) if previous_tree is None: base.set_error_message("Adapter Interface not in session for filtering!") raise cherrypy.HTTPRedirect("/tvb?error=True") current_node = self._get_node(previous_tree, name) if current_node is None: raise Exception("Could not find node :" + name) datatype = current_node[ABCAdapter.KEY_DATATYPE] filters = json.loads(filters) availablefilter = json.loads(FilterChain.get_filters_for_type(datatype)) for i, filter_ in enumerate(filters[FILTER_FIELDS]): #Check for filter input of type 'date' as these need to be converted if filter_ in availablefilter and availablefilter[filter_][FILTER_TYPE] == 'date': try: filter_ = string2date(filter_, False) filters[FILTER_VALUES][i] = filter_ except ValueError, excep: raise excep
def from_dict(self, dictionary, dao, user_id=None, project_gid=None): """ Add specific attributes from a input dictionary. """ # If user id was specified try to load it, otherwise use System account user = dao.get_system_user() if user_id is None else dao.get_user_by_id(user_id) self.fk_launched_by = user.id # Find parent Project prj_to_load = project_gid if project_gid is not None else dictionary['fk_launched_in'] parent_project = dao.get_project_by_gid(prj_to_load) self.fk_launched_in = parent_project.id self.project = parent_project # Find parent Algorithm source_algorithm = json.loads(dictionary['fk_from_algo']) algorithm = dao.get_algorithm_by_module(source_algorithm['module'], source_algorithm['classname']) if algorithm: self.algorithm = algorithm self.fk_from_algo = algorithm.id else: # The algorithm that produced this operation no longer exists most likely due to # exported operation from different version. Fallback to tvb importer. LOG.warning("Algorithm group %s was not found in DB. Most likely cause is that archive was exported " "from a different TVB version. Using fallback TVB_Importer as source of " "this operation." % (source_algorithm['module'],)) algorithm = dao.get_algorithm_by_module(TVB_IMPORTER_MODULE, TVB_IMPORTER_CLASS) self.fk_from_algo = algorithm.id dictionary['additional_info'] = ("The original parameters for this operation were: \nAdapter: %s " "\nParameters %s" % (source_algorithm['module'] + '.' + source_algorithm['classname'], dictionary['parameters'])) # Find OperationGroup, if any if 'fk_operation_group' in dictionary: group_dict = json.loads(dictionary['fk_operation_group']) op_group = None if group_dict: op_group = dao.get_operationgroup_by_gid(group_dict['gid']) if not op_group: name = group_dict['name'] ranges = [group_dict['range1'], group_dict['range2'], group_dict['range3']] gid = group_dict['gid'] op_group = OperationGroup(self.fk_launched_in, name, ranges) op_group.gid = gid op_group = dao.store_entity(op_group) self.operation_group = op_group self.fk_operation_group = op_group.id else: self.operation_group = None self.fk_operation_group = None self.parameters = dictionary['parameters'] self.meta_data = dictionary['meta_data'] self.create_date = string2date(dictionary['create_date']) if dictionary['start_date'] != "None": self.start_date = string2date(dictionary['start_date']) if dictionary['completion_date'] != "None": self.completion_date = string2date(dictionary['completion_date']) self.status = self._parse_status(dictionary['status']) self.visible = string2bool(dictionary['visible']) self.range_values = dictionary['range_values'] self.user_group = dictionary['user_group'] self.additional_info = dictionary['additional_info'] self.gid = dictionary['gid'] return self
def retrieve_project_full(self, project_id, applied_filters=None, current_page=1): """ Return a Tuple with Project entity and Operations for current Project. :param project_id: Current Project Identifier :param applied_filters: Filters to apply on Operations :param current_page: Number for current page in operations """ selected_project = self.find_project(project_id) total_filtered = self.count_filtered_operations(project_id, applied_filters) start_idx = OPERATIONS_PAGE_SIZE * (current_page - 1) if total_filtered >= start_idx + OPERATIONS_PAGE_SIZE: end_idx = OPERATIONS_PAGE_SIZE else: end_idx = total_filtered - start_idx pages_no = total_filtered // OPERATIONS_PAGE_SIZE + (1 if total_filtered % OPERATIONS_PAGE_SIZE else 0) total_ops_nr = self.count_filtered_operations(project_id) current_ops = dao.get_filtered_operations(project_id, applied_filters, start_idx, end_idx) started_ops = 0 if current_ops is None: return selected_project, [], 0 operations = [] for one_op in current_ops: try: result = {} if one_op[0] != one_op[1]: result["id"] = str(one_op[0]) + "-" + str(one_op[1]) else: result["id"] = str(one_op[0]) burst = dao.get_burst_for_operation_id(one_op[0]) result["burst_name"] = burst.name if burst else '-' result["count"] = one_op[2] result["gid"] = one_op[14] if one_op[3] is not None and one_op[3]: try: operation_group = dao.get_generic_entity(model.OperationGroup, one_op[3])[0] result["group"] = operation_group.name result["group"] = result["group"].replace("_", " ") result["operation_group_id"] = operation_group.id datatype_group = dao.get_datatypegroup_by_op_group_id(one_op[3]) datatype = dao.get_datatype_by_id(datatype_group.id) result["datatype_group_gid"] = datatype.gid result["gid"] = operation_group.gid all_categs = dao.get_algorithm_categories() view_categ = dao.get_visualisers_categories()[0] excludes = [categ.id for categ in all_categs if categ.id != view_categ.id] algo = self.retrieve_launchers("DataTypeGroup", datatype.gid, exclude_categories=excludes).values()[0] view_groups = [] for algo in algo.values(): url = '/flow/' + str(algo['category']) + '/' + str(algo['id']) if algo['part_of_group']: url = '/flow/prepare_group_launch/' + datatype.gid + '/' + \ str(algo['category']) + '/' + str(algo['id']) view_groups.append(dict(name=algo["displayName"], url=url, param_name=algo['children'][0]['param_name'], part_of_group=algo['part_of_group'])) result["view_groups"] = view_groups except Exception, excep: self.logger.error(excep) self.logger.warning("Will ignore group on entity:" + str(one_op)) result["datatype_group_gid"] = None else: result['group'] = None result['datatype_group_gid'] = None result["algorithm"] = dao.get_algorithm_by_id(one_op[4]) result["method"] = one_op[5] result["user"] = dao.get_user_by_id(one_op[6]) if type(one_op[7]) in (str, unicode): result["create"] = string2date(str(one_op[7])) else: result["create"] = one_op[7] if type(one_op[8]) in (str, unicode): result["start"] = string2date(str(one_op[8])) else: result["start"] = one_op[8] if type(one_op[9]) in (str, unicode): result["complete"] = string2date(str(one_op[9])) else: result["complete"] = one_op[9] if result["complete"] is not None and result["start"] is not None: result["duration"] = timedelta2string(result["complete"] - result["start"]) result["status"] = one_op[10] if result["status"] == model.STATUS_STARTED: started_ops += 1 result["additional"] = one_op[11] result["visible"] = True if one_op[12] > 0 else False result['operation_tag'] = one_op[13] result['figures'] = None if not result['group']: datatype_results = dao.get_results_for_operation(result['id']) result['results'] = [dao.get_generic_entity(dt.module + '.' + dt.type, dt.gid, 'gid')[0] for dt in datatype_results] operation_figures = dao.get_figures_for_operation(result['id']) # Compute the full path to the figure / image on disk for figure in operation_figures: figures_folder = self.structure_helper.get_images_folder(figure.project.name, figure.operation.id) figure_full_path = os.path.join(figures_folder, figure.file_path) # Compute the path available from browser figure.figure_path = utils.path2url_part(figure_full_path) result['figures'] = operation_figures else: result['results'] = None operations.append(result)
def test_string2date_invalid(self): """ Check that a ValueError is raised in case some invalid date is passed. """ with pytest.raises(ValueError): string2date("somethinginvalid")
def retrieve_project_full(self, project_id, applied_filters=None, current_page=1): """ Return a Tuple with Project entity and Operations for current Project. :param project_id: Current Project Identifier :param applied_filters: Filters to apply on Operations :param current_page: Number for current page in operations """ selected_project = self.find_project(project_id) total_filtered = self.count_filtered_operations(project_id, applied_filters) pages_no = total_filtered // OPERATIONS_PAGE_SIZE + (1 if total_filtered % OPERATIONS_PAGE_SIZE else 0) total_ops_nr = self.count_filtered_operations(project_id) start_idx = OPERATIONS_PAGE_SIZE * (current_page - 1) current_ops = dao.get_filtered_operations(project_id, applied_filters, start_idx, OPERATIONS_PAGE_SIZE) if current_ops is None: return selected_project, 0, [], 0 operations = [] view_categ_id = dao.get_visualisers_categories()[0].id for one_op in current_ops: try: result = {} if one_op[0] != one_op[1]: result["id"] = str(one_op[0]) + "-" + str(one_op[1]) else: result["id"] = str(one_op[0]) burst = dao.get_burst_for_operation_id(one_op[0]) result["burst_name"] = burst.name if burst else '-' result["count"] = one_op[2] result["gid"] = one_op[14] if one_op[3] is not None and one_op[3]: try: operation_group = dao.get_generic_entity(model.OperationGroup, one_op[3])[0] result["group"] = operation_group.name result["group"] = result["group"].replace("_", " ") result["operation_group_id"] = operation_group.id datatype_group = dao.get_datatypegroup_by_op_group_id(one_op[3]) result["datatype_group_gid"] = datatype_group.gid result["gid"] = operation_group.gid ## Filter only viewers for current DataTypeGroup entity: launcher = self.retrieve_launchers(datatype_group.gid, include_categories=[view_categ_id]).values()[0] view_groups = [] for launcher in launcher.values(): url = '/flow/' + str(launcher['category']) + '/' + str(launcher['id']) if launcher['part_of_group']: url = '/flow/prepare_group_launch/' + datatype_group.gid + '/' + \ str(launcher['category']) + '/' + str(launcher['id']) view_groups.append(dict(name=launcher["displayName"], url=url, param_name=launcher['children'][0]['param_name'], part_of_group=launcher['part_of_group'])) result["view_groups"] = view_groups except Exception: self.logger.exception("We will ignore group on entity:" + str(one_op)) result["datatype_group_gid"] = None else: result['group'] = None result['datatype_group_gid'] = None result["algorithm"] = dao.get_algorithm_by_id(one_op[4]) result["method"] = one_op[5] result["user"] = dao.get_user_by_id(one_op[6]) if type(one_op[7]) in (str, unicode): result["create"] = string2date(str(one_op[7])) else: result["create"] = one_op[7] if type(one_op[8]) in (str, unicode): result["start"] = string2date(str(one_op[8])) else: result["start"] = one_op[8] if type(one_op[9]) in (str, unicode): result["complete"] = string2date(str(one_op[9])) else: result["complete"] = one_op[9] if result["complete"] is not None and result["start"] is not None: result["duration"] = format_timedelta(result["complete"] - result["start"]) result["status"] = one_op[10] result["additional"] = one_op[11] result["visible"] = True if one_op[12] > 0 else False result['operation_tag'] = one_op[13] result['figures'] = None if not result['group']: datatype_results = dao.get_results_for_operation(result['id']) result['results'] = [dao.get_generic_entity(dt.module + '.' + dt.type, dt.gid, 'gid')[0] for dt in datatype_results] operation_figures = dao.get_figures_for_operation(result['id']) # Compute the full path to the figure / image on disk for figure in operation_figures: figures_folder = self.structure_helper.get_images_folder(figure.project.name) figure_full_path = os.path.join(figures_folder, figure.file_path) # Compute the path available from browser figure.figure_path = utils.path2url_part(figure_full_path) result['figures'] = operation_figures else: result['results'] = None operations.append(result) except Exception: ## We got an exception when processing one Operation Row. We will continue with the rest of the rows. self.logger.exception("Could not prepare operation for display:" + str(one_op)) return selected_project, total_ops_nr, operations, pages_no
def _get_create_date_for_sorting(h5_file): create_date_str = str(H5File.get_metadata_param(h5_file, 'Create_date'), 'utf-8') create_date = string2date(create_date_str, date_format='datetime:%Y-%m-%d %H:%M:%S.%f') return create_date
def from_dict(self, dictionary, dao, user_id=None, project_gid=None): """ Add specific attributes from a input dictionary. """ # If user id was specified try to load it, otherwise use System account user = dao.get_system_user( ) if user_id is None else dao.get_user_by_id(user_id) self.fk_launched_by = user.id # Find parent Project prj_to_load = project_gid if project_gid is not None else dictionary[ 'fk_launched_in'] parent_project = dao.get_project_by_gid(prj_to_load) self.fk_launched_in = parent_project.id self.project = parent_project # Find parent Algorithm source_algorithm = json.loads(dictionary['fk_from_algo']) algorithm = dao.get_algorithm_by_module(source_algorithm['module'], source_algorithm['classname']) if algorithm: self.algorithm = algorithm self.fk_from_algo = algorithm.id else: # The algorithm that produced this operation no longer exists most likely due to # exported operation from different version. Fallback to tvb importer. LOG.warning( "Algorithm group %s was not found in DB. Most likely cause is that archive was exported " "from a different TVB version. Using fallback TVB_Importer as source of " "this operation." % (source_algorithm['module'], )) algorithm = dao.get_algorithm_by_module(TVB_IMPORTER_MODULE, TVB_IMPORTER_CLASS) self.fk_from_algo = algorithm.id dictionary['additional_info'] = ( "The original parameters for this operation were: \nAdapter: %s " "\nParameters %s" % (source_algorithm['module'] + '.' + source_algorithm['classname'], dictionary['parameters'])) # Find OperationGroup, if any if 'fk_operation_group' in dictionary: group_dict = json.loads(dictionary['fk_operation_group']) op_group = None if group_dict: op_group = dao.get_operationgroup_by_gid(group_dict['gid']) if not op_group: name = group_dict['name'] ranges = [ group_dict['range1'], group_dict['range2'], group_dict['range3'] ] gid = group_dict['gid'] op_group = OperationGroup(self.fk_launched_in, name, ranges) op_group.gid = gid op_group = dao.store_entity(op_group) self.operation_group = op_group self.fk_operation_group = op_group.id else: self.operation_group = None self.fk_operation_group = None self.parameters = dictionary['parameters'] self.meta_data = dictionary['meta_data'] self.create_date = string2date(dictionary['create_date']) if dictionary['start_date'] != "None": self.start_date = string2date(dictionary['start_date']) if dictionary['completion_date'] != "None": self.completion_date = string2date(dictionary['completion_date']) self.status = self._parse_status(dictionary['status']) self.visible = string2bool(dictionary['visible']) self.range_values = dictionary['range_values'] self.user_group = dictionary['user_group'] self.additional_info = dictionary['additional_info'] self.gid = dictionary['gid'] return self
def retrieve_project_full(self, project_id, applied_filters=None, current_page=1): """ Return a Tuple with Project entity and Operations for current Project. :param project_id: Current Project Identifier :param applied_filters: Filters to apply on Operations :param current_page: Number for current page in operations """ selected_project = self.find_project(project_id) total_filtered = self.count_filtered_operations(project_id, applied_filters) pages_no = total_filtered // OPERATIONS_PAGE_SIZE + (1 if total_filtered % OPERATIONS_PAGE_SIZE else 0) total_ops_nr = self.count_filtered_operations(project_id) start_idx = OPERATIONS_PAGE_SIZE * (current_page - 1) current_ops = dao.get_filtered_operations(project_id, applied_filters, start_idx, OPERATIONS_PAGE_SIZE) if current_ops is None: return selected_project, 0, [], 0 operations = [] view_categ_id = dao.get_visualisers_categories()[0].id for one_op in current_ops: try: result = {} if one_op[0] != one_op[1]: result["id"] = str(one_op[0]) + "-" + str(one_op[1]) else: result["id"] = str(one_op[0]) burst = dao.get_burst_for_operation_id(one_op[0]) result["burst_name"] = burst.name if burst else '-' result["count"] = one_op[2] result["gid"] = one_op[13] if one_op[3] is not None and one_op[3]: try: operation_group = dao.get_generic_entity(OperationGroup, one_op[3])[0] result["group"] = operation_group.name result["group"] = result["group"].replace("_", " ") result["operation_group_id"] = operation_group.id datatype_group = dao.get_datatypegroup_by_op_group_id(one_op[3]) result["datatype_group_gid"] = datatype_group.gid result["gid"] = operation_group.gid ## Filter only viewers for current DataTypeGroup entity: result["view_groups"] = FlowService().get_visualizers_for_group(datatype_group.gid) except Exception: self.logger.exception("We will ignore group on entity:" + str(one_op)) result["datatype_group_gid"] = None else: result['group'] = None result['datatype_group_gid'] = None result["algorithm"] = dao.get_algorithm_by_id(one_op[4]) result["user"] = dao.get_user_by_id(one_op[5]) if type(one_op[6]) is str: result["create"] = string2date(str(one_op[6])) else: result["create"] = one_op[6] if type(one_op[7]) is str: result["start"] = string2date(str(one_op[7])) else: result["start"] = one_op[7] if type(one_op[8]) is str: result["complete"] = string2date(str(one_op[8])) else: result["complete"] = one_op[8] if result["complete"] is not None and result["start"] is not None: result["duration"] = format_timedelta(result["complete"] - result["start"]) result["status"] = one_op[9] result["additional"] = one_op[10] result["visible"] = True if one_op[11] > 0 else False result['operation_tag'] = one_op[12] result['figures'] = None if not result['group']: datatype_results = dao.get_results_for_operation(result['id']) result['results'] = [] for dt in datatype_results: dt_loaded = ABCAdapter.load_entity_by_gid(dt.gid) if dt_loaded: result['results'].append(dt_loaded) else: self.logger.warning("Could not retrieve datatype %s" % str(dt)) operation_figures = dao.get_figures_for_operation(result['id']) # Compute the full path to the figure / image on disk for figure in operation_figures: figures_folder = self.structure_helper.get_images_folder(figure.project.name) figure_full_path = os.path.join(figures_folder, figure.file_path) # Compute the path available from browser figure.figure_path = utils.path2url_part(figure_full_path) result['figures'] = operation_figures else: result['results'] = None operations.append(result) except Exception: ## We got an exception when processing one Operation Row. We will continue with the rest of the rows. self.logger.exception("Could not prepare operation for display:" + str(one_op)) return selected_project, total_ops_nr, operations, pages_no
def retrieve_project_full(self, project_id, applied_filters=None, current_page=1): """ Return a Tuple with Project entity and Operations for current Project. :param project_id: Current Project Identifier :param applied_filters: Filters to apply on Operations :param current_page: Number for current page in operations """ selected_project = self.find_project(project_id) total_filtered = self.count_filtered_operations( project_id, applied_filters) pages_no = total_filtered // OPERATIONS_PAGE_SIZE + ( 1 if total_filtered % OPERATIONS_PAGE_SIZE else 0) total_ops_nr = self.count_filtered_operations(project_id) start_idx = OPERATIONS_PAGE_SIZE * (current_page - 1) current_ops = dao.get_filtered_operations(project_id, applied_filters, start_idx, OPERATIONS_PAGE_SIZE) if current_ops is None: return selected_project, 0, [], 0 operations = [] for one_op in current_ops: try: result = {} if one_op[0] != one_op[1]: result["id"] = str(one_op[0]) + "-" + str(one_op[1]) else: result["id"] = str(one_op[0]) burst = dao.get_burst_for_operation_id(one_op[0]) result["burst_name"] = burst.name if burst else '-' result["count"] = one_op[2] result["gid"] = one_op[13] operation_group_id = one_op[3] if operation_group_id is not None and operation_group_id: try: operation_group = dao.get_generic_entity( OperationGroup, operation_group_id)[0] result["group"] = operation_group.name result["group"] = result["group"].replace("_", " ") result["operation_group_id"] = operation_group.id datatype_group = dao.get_datatypegroup_by_op_group_id( operation_group_id) result[ "datatype_group_gid"] = datatype_group.gid if datatype_group is not None else None result["gid"] = operation_group.gid # Filter only viewers for current DataTypeGroup entity: if datatype_group is None: view_groups = None else: view_groups = AlgorithmService( ).get_visualizers_for_group(datatype_group.gid) result["view_groups"] = view_groups except Exception: self.logger.exception( "We will ignore group on entity:" + str(one_op)) result["datatype_group_gid"] = None else: result['group'] = None result['datatype_group_gid'] = None result["algorithm"] = dao.get_algorithm_by_id(one_op[4]) result["user"] = dao.get_user_by_id(one_op[5]) if type(one_op[6]) is str: result["create"] = string2date(str(one_op[6])) else: result["create"] = one_op[6] if type(one_op[7]) is str: result["start"] = string2date(str(one_op[7])) else: result["start"] = one_op[7] if type(one_op[8]) is str: result["complete"] = string2date(str(one_op[8])) else: result["complete"] = one_op[8] if result["complete"] is not None and result[ "start"] is not None: result["duration"] = format_timedelta(result["complete"] - result["start"]) result["status"] = one_op[9] result["additional"] = one_op[10] result["visible"] = True if one_op[11] > 0 else False result['operation_tag'] = one_op[12] if not result['group']: result['results'] = dao.get_results_for_operation( result['id']) else: result['results'] = None operations.append(result) except Exception: # We got an exception when processing one Operation Row. We will continue with the rest of the rows. self.logger.exception( "Could not prepare operation for display:" + str(one_op)) return selected_project, total_ops_nr, operations, pages_no