def reload(self, trans, payload={}, **kwd): """ POST /api/display_applications/reload Reloads the list of display applications. :param ids: list containing ids of display to be reloaded :type ids: list """ ids = payload.get('ids') queue_worker.send_control_task(trans.app, 'reload_display_application', noop_self=True, kwargs={'display_application_ids': ids}) reloaded, failed = trans.app.datatypes_registry.reload_display_applications( ids) if not reloaded and failed: message = 'Unable to reload any of the %i requested display applications ("%s").' % ( len(failed), '", "'.join(failed)) elif failed: message = 'Reloaded %i display applications ("%s"), but failed to reload %i display applications ("%s").' % ( len(reloaded), '", "'.join(reloaded), len(failed), '", "'.join(failed)) elif not reloaded: message = 'You need to request at least one display application to reload.' else: message = 'Reloaded %i requested display applications ("%s").' % ( len(reloaded), '", "'.join(reloaded)) return {'message': message, 'reloaded': reloaded, 'failed': failed}
def test_send_control_task_to_many_listeners(queue_worker_factory): app1 = queue_worker_factory() app2 = queue_worker_factory() app3 = queue_worker_factory() app4 = queue_worker_factory() app5 = queue_worker_factory() send_control_task(app=app1, task='echo') for app in [app1, app2, app3, app4, app5]: wait_for_var(app, 'some_var', 'bar') assert len(app.tasks_executed) == 1
def test_send_control_task_noop_self(queue_worker_factory): app = queue_worker_factory() assert app.some_var == 'foo' response = send_control_task(app=app, task='echo', noop_self=True, get_response=True) assert response == 'NO_OP' assert app.some_var == 'foo' assert len(app.tasks_executed) == 0
def reload(self, trans, payload={}, **kwd): """ POST /api/display_applications/reload Reloads the list of display applications. :param ids: list containing ids of display to be reloaded :type ids: list """ ids = payload.get('ids') queue_worker.send_control_task(trans.app, 'reload_display_application', noop_self=True, kwargs={'display_application_ids': ids}) reloaded, failed = trans.app.datatypes_registry.reload_display_applications(ids) if not reloaded and failed: message = 'Unable to reload any of the %i requested display applications ("%s").' % (len(failed), '", "'.join(failed)) elif failed: message = 'Reloaded %i display applications ("%s"), but failed to reload %i display applications ("%s").' % (len(reloaded), '", "'.join(reloaded), len(failed), '", "'.join(failed)) elif not reloaded: message = 'You need to request at least one display application to reload.' else: message = 'Reloaded %i requested display applications ("%s").' % (len(reloaded), '", "'.join(reloaded)) return {'message': message, 'reloaded': reloaded, 'failed': failed}
def reload_toolbox(self, trans, **kwds): """ PUT /api/configuration/toolbox Reload the Galaxy toolbox (but not individual tools). """ send_control_task(self.app.toolbox.app, 'reload_toolbox')
def process_result(self, out_data): data_manager_dicts = {} data_manager_dict = {} # TODO: fix this merging below for output_name, output_dataset in out_data.items(): try: output_dict = json.loads(open(output_dataset.file_name).read()) except Exception as e: log.warning('Error reading DataManagerTool json for "%s": %s' % (output_name, e)) continue data_manager_dicts[output_name] = output_dict for key, value in output_dict.items(): if key not in data_manager_dict: data_manager_dict[key] = {} data_manager_dict[key].update(value) data_manager_dict.update(output_dict) data_tables_dict = data_manager_dict.get('data_tables', {}) for data_table_name in self.data_tables.keys(): data_table_values = data_tables_dict.pop(data_table_name, None) if not data_table_values: log.warning('No values for data table "%s" were returned by the data manager "%s".' % (data_table_name, self.id)) continue # next data table data_table = self.data_managers.app.tool_data_tables.get(data_table_name, None) if data_table is None: log.error('The data manager "%s" returned an unknown data table "%s" with new entries "%s". These entries will not be created. Please confirm that an entry for "%s" exists in your "%s" file.' % (self.id, data_table_name, data_table_values, data_table_name, 'tool_data_table_conf.xml')) continue # next table name if not isinstance(data_table, SUPPORTED_DATA_TABLE_TYPES): log.error('The data manager "%s" returned an unsupported data table "%s" with type "%s" with new entries "%s". These entries will not be created. Please confirm that the data table is of a supported type (%s).' % (self.id, data_table_name, type(data_table), data_table_values, SUPPORTED_DATA_TABLE_TYPES)) continue # next table name output_ref_values = {} if data_table_name in self.output_ref_by_data_table: for data_table_column, output_ref in self.output_ref_by_data_table[data_table_name].items(): output_ref_dataset = out_data.get(output_ref, None) assert output_ref_dataset is not None, "Referenced output was not found." output_ref_values[data_table_column] = output_ref_dataset if not isinstance(data_table_values, list): data_table_values = [data_table_values] for data_table_row in data_table_values: data_table_value = dict(**data_table_row) # keep original values here for name, value in data_table_row.items(): # FIXME: need to loop through here based upon order listed in data_manager config if name in output_ref_values: self.process_move(data_table_name, name, output_ref_values[name].extra_files_path, **data_table_value) data_table_value[name] = self.process_value_translation(data_table_name, name, **data_table_value) data_table.add_entry(data_table_value, persist=True, entry_source=self) send_control_task(self.data_managers.app, 'reload_tool_data_tables', noop_self=True, kwargs={'table_name': data_table_name}) if self.undeclared_tables and data_tables_dict: # We handle the data move, by just moving all the data out of the extra files path # moving a directory and the target already exists, we move the contents instead log.debug('Attempting to add entries for undeclared tables: %s.', ', '.join(data_tables_dict.keys())) for ref_file in out_data.values(): if ref_file.extra_files_path_exists(): util.move_merge(ref_file.extra_files_path, self.data_managers.app.config.galaxy_data_manager_data_path) path_column_names = ['path'] for data_table_name, data_table_values in data_tables_dict.items(): data_table = self.data_managers.app.tool_data_tables.get(data_table_name, None) if not isinstance(data_table_values, list): data_table_values = [data_table_values] for data_table_row in data_table_values: data_table_value = dict(**data_table_row) # keep original values here for name, value in data_table_row.items(): if name in path_column_names: data_table_value[name] = os.path.abspath(os.path.join(self.data_managers.app.config.galaxy_data_manager_data_path, value)) data_table.add_entry(data_table_value, persist=True, entry_source=self) send_control_task(self.data_managers.app, 'reload_tool_data_tables', noop_self=True, kwargs={'table_name': data_table_name}) else: for data_table_name, data_table_values in data_tables_dict.items(): # tool returned extra data table entries, but data table was not declared in data manager # do not add these values, but do provide messages log.warning('The data manager "%s" returned an undeclared data table "%s" with new entries "%s". These entries will not be created. Please confirm that an entry for "%s" exists in your "%s" file.' % (self.id, data_table_name, data_table_values, data_table_name, self.data_managers.filename))
def reload_toolbox(self, trans, **kwds): """ PUT /api/configuration/toolbox Reload the Galaxy toolbox (but not individual tools). """ send_control_task(self.app.toolbox.app, 'reload_toolbox')
def process_result(self, out_data): data_manager_dicts = {} data_manager_dict = {} # TODO: fix this merging below for output_name, output_dataset in out_data.items(): try: output_dict = json.loads(open(output_dataset.file_name).read()) except Exception as e: log.warning('Error reading DataManagerTool json for "%s": %s' % (output_name, e)) continue data_manager_dicts[output_name] = output_dict for key, value in output_dict.items(): if key not in data_manager_dict: data_manager_dict[key] = {} data_manager_dict[key].update(value) data_manager_dict.update(output_dict) data_tables_dict = data_manager_dict.get('data_tables', {}) for data_table_name in self.data_tables.keys(): data_table_values = data_tables_dict.pop(data_table_name, None) if not data_table_values: log.warning( 'No values for data table "%s" were returned by the data manager "%s".' % (data_table_name, self.id)) continue # next data table data_table = self.data_managers.app.tool_data_tables.get( data_table_name, None) if data_table is None: log.error( 'The data manager "%s" returned an unknown data table "%s" with new entries "%s". These entries will not be created. Please confirm that an entry for "%s" exists in your "%s" file.' % (self.id, data_table_name, data_table_values, data_table_name, 'tool_data_table_conf.xml')) continue # next table name if not isinstance(data_table, SUPPORTED_DATA_TABLE_TYPES): log.error( 'The data manager "%s" returned an unsupported data table "%s" with type "%s" with new entries "%s". These entries will not be created. Please confirm that the data table is of a supported type (%s).' % (self.id, data_table_name, type(data_table), data_table_values, SUPPORTED_DATA_TABLE_TYPES)) continue # next table name output_ref_values = {} if data_table_name in self.output_ref_by_data_table: for data_table_column, output_ref in self.output_ref_by_data_table[ data_table_name].items(): output_ref_dataset = out_data.get(output_ref, None) assert output_ref_dataset is not None, "Referenced output was not found." output_ref_values[data_table_column] = output_ref_dataset if not isinstance(data_table_values, list): data_table_values = [data_table_values] for data_table_row in data_table_values: data_table_value = dict( **data_table_row) # keep original values here for name, value in data_table_row.items( ): # FIXME: need to loop through here based upon order listed in data_manager config if name in output_ref_values: self.process_move( data_table_name, name, output_ref_values[name].extra_files_path, **data_table_value) data_table_value[ name] = self.process_value_translation( data_table_name, name, **data_table_value) data_table.add_entry(data_table_value, persist=True, entry_source=self) send_control_task(self.data_managers.app, 'reload_tool_data_tables', noop_self=True, kwargs={'table_name': data_table_name}) if self.undeclared_tables and data_tables_dict: # We handle the data move, by just moving all the data out of the extra files path # moving a directory and the target already exists, we move the contents instead log.debug('Attempting to add entries for undeclared tables: %s.', ', '.join(data_tables_dict.keys())) for ref_file in out_data.values(): if ref_file.extra_files_path_exists(): util.move_merge( ref_file.extra_files_path, self.data_managers.app. config.galaxy_data_manager_data_path) path_column_names = ['path'] for data_table_name, data_table_values in data_tables_dict.items(): data_table = self.data_managers.app.tool_data_tables.get( data_table_name, None) if not isinstance(data_table_values, list): data_table_values = [data_table_values] for data_table_row in data_table_values: data_table_value = dict( **data_table_row) # keep original values here for name, value in data_table_row.items(): if name in path_column_names: data_table_value[name] = os.path.abspath( os.path.join( self.data_managers.app.config. galaxy_data_manager_data_path, value)) data_table.add_entry(data_table_value, persist=True, entry_source=self) send_control_task(self.data_managers.app, 'reload_tool_data_tables', noop_self=True, kwargs={'table_name': data_table_name}) else: for data_table_name, data_table_values in data_tables_dict.items(): # tool returned extra data table entries, but data table was not declared in data manager # do not add these values, but do provide messages log.warning( 'The data manager "%s" returned an undeclared data table "%s" with new entries "%s". These entries will not be created. Please confirm that an entry for "%s" exists in your "%s" file.' % (self.id, data_table_name, data_table_values, data_table_name, self.data_managers.filename))
def test_send_control_task_get_result(queue_worker_factory): app = queue_worker_factory() response = send_control_task(app=app, task='echo', get_response=True) assert response == 'bar' assert app.some_var == 'bar' assert len(app.tasks_executed) == 1
def test_send_control_task(queue_worker_factory): app = queue_worker_factory() send_control_task(app=app, task='echo') wait_for_var(app, 'some_var', 'bar') assert len(app.tasks_executed) == 1