Beispiel #1
0
    def process_move( self, data_table_name, column_name, source_base_path, relative_symlinks=False, **kwd ):
        if data_table_name in self.move_by_data_table_column and column_name in self.move_by_data_table_column[ data_table_name ]:
            move_dict = self.move_by_data_table_column[ data_table_name ][ column_name ]
            source = move_dict[ 'source_base' ]
            if source is None:
                source = source_base_path
            else:
                source = fill_template( source, GALAXY_DATA_MANAGER_DATA_PATH=self.data_managers.app.config.galaxy_data_manager_data_path, **kwd )
            if move_dict[ 'source_value' ]:
                source = os.path.join( source, fill_template( move_dict[ 'source_value' ], GALAXY_DATA_MANAGER_DATA_PATH=self.data_managers.app.config.galaxy_data_manager_data_path, **kwd )  )
            target = move_dict[ 'target_base' ]
            if target is None:
                target = self.data_managers.app.config.galaxy_data_manager_data_path
            else:
                target = fill_template( target, GALAXY_DATA_MANAGER_DATA_PATH=self.data_managers.app.config.galaxy_data_manager_data_path, **kwd )
            if move_dict[ 'target_value' ]:
                target = os.path.join( target, fill_template( move_dict[ 'target_value' ], GALAXY_DATA_MANAGER_DATA_PATH=self.data_managers.app.config.galaxy_data_manager_data_path, **kwd  ) )

            if move_dict[ 'type' ] == 'file':
                dirs = os.path.split( target )[0]
                try:
                    os.makedirs( dirs )
                except OSError as e:
                    if e.errno != errno.EEXIST:
                        raise e
            # moving a directory and the target already exists, we move the contents instead
            util.move_merge( source, target )

            if move_dict.get( 'relativize_symlinks', False ):
                util.relativize_symlinks( target )

            return True
        return False
Beispiel #2
0
    def process_move(self, data_table_name, column_name, source_base_path, relative_symlinks=False, **kwd):
        if data_table_name in self.move_by_data_table_column and column_name in self.move_by_data_table_column[data_table_name]:
            move_dict = self.move_by_data_table_column[data_table_name][column_name]
            source = move_dict['source_base']
            if source is None:
                source = source_base_path
            else:
                source = fill_template(source, GALAXY_DATA_MANAGER_DATA_PATH=self.data_managers.app.config.galaxy_data_manager_data_path, **kwd).strip()
            if move_dict['source_value']:
                source = os.path.join(source, fill_template(move_dict['source_value'], GALAXY_DATA_MANAGER_DATA_PATH=self.data_managers.app.config.galaxy_data_manager_data_path, **kwd).strip())
            target = move_dict['target_base']
            if target is None:
                target = self.data_managers.app.config.galaxy_data_manager_data_path
            else:
                target = fill_template(target, GALAXY_DATA_MANAGER_DATA_PATH=self.data_managers.app.config.galaxy_data_manager_data_path, **kwd).strip()
            if move_dict['target_value']:
                target = os.path.join(target, fill_template(move_dict['target_value'], GALAXY_DATA_MANAGER_DATA_PATH=self.data_managers.app.config.galaxy_data_manager_data_path, **kwd).strip())

            if move_dict['type'] == 'file':
                dirs = os.path.split(target)[0]
                try:
                    os.makedirs(dirs)
                except OSError as e:
                    if e.errno != errno.EEXIST:
                        raise e
            # moving a directory and the target already exists, we move the contents instead
            if os.path.exists(source):
                util.move_merge(source, target)

            if move_dict.get('relativize_symlinks', False):
                util.relativize_symlinks(target)

            return True
        return False
Beispiel #3
0
    def process_move(self, data_table_name, column_name, source_base_path, relative_symlinks=False, **kwd):
        if (
            data_table_name in self.move_by_data_table_column
            and column_name in self.move_by_data_table_column[data_table_name]
        ):
            move_dict = self.move_by_data_table_column[data_table_name][column_name]
            source = move_dict["source_base"]
            if source is None:
                source = source_base_path
            else:
                source = fill_template(
                    source,
                    GALAXY_DATA_MANAGER_DATA_PATH=self.data_managers.app.config.galaxy_data_manager_data_path,
                    **kwd
                )
            if move_dict["source_value"]:
                source = os.path.join(
                    source,
                    fill_template(
                        move_dict["source_value"],
                        GALAXY_DATA_MANAGER_DATA_PATH=self.data_managers.app.config.galaxy_data_manager_data_path,
                        **kwd
                    ),
                )
            target = move_dict["target_base"]
            if target is None:
                target = self.data_managers.app.config.galaxy_data_manager_data_path
            else:
                target = fill_template(
                    target,
                    GALAXY_DATA_MANAGER_DATA_PATH=self.data_managers.app.config.galaxy_data_manager_data_path,
                    **kwd
                )
            if move_dict["target_value"]:
                target = os.path.join(
                    target,
                    fill_template(
                        move_dict["target_value"],
                        GALAXY_DATA_MANAGER_DATA_PATH=self.data_managers.app.config.galaxy_data_manager_data_path,
                        **kwd
                    ),
                )

            if move_dict["type"] == "file":
                dirs, filename = os.path.split(target)
                try:
                    os.makedirs(dirs)
                except OSError, e:
                    if e.errno != errno.EEXIST:
                        raise e
                    # log.debug( 'Error creating directory "%s": %s' % ( dirs, e ) )
            # moving a directory and the target already exists, we move the contents instead
            util.move_merge(source, target)

            if move_dict.get("relativize_symlinks", False):
                util.relativize_symlinks(target)

            return True
Beispiel #4
0
    def process_result(self, out_data):
        data_manager_dicts = {}
        data_manager_dict = {}
        # TODO: fix this merging below
        for output_name, output_dataset in out_data.items():
            try:
                output_dict = json.loads(open(output_dataset.file_name).read())
            except Exception as e:
                log.warning('Error reading DataManagerTool json for "%s": %s' % (output_name, e))
                continue
            data_manager_dicts[output_name] = output_dict
            for key, value in output_dict.items():
                if key not in data_manager_dict:
                    data_manager_dict[key] = {}
                data_manager_dict[key].update(value)
            data_manager_dict.update(output_dict)

        data_tables_dict = data_manager_dict.get('data_tables', {})
        for data_table_name in self.data_tables.keys():
            data_table_values = data_tables_dict.pop(data_table_name, None)
            if not data_table_values:
                log.warning('No values for data table "%s" were returned by the data manager "%s".' % (data_table_name, self.id))
                continue  # next data table
            data_table = self.data_managers.app.tool_data_tables.get(data_table_name, None)
            if data_table is None:
                log.error('The data manager "%s" returned an unknown data table "%s" with new entries "%s". These entries will not be created. Please confirm that an entry for "%s" exists in your "%s" file.' % (self.id, data_table_name, data_table_values, data_table_name, 'tool_data_table_conf.xml'))
                continue  # next table name
            if not isinstance(data_table, SUPPORTED_DATA_TABLE_TYPES):
                log.error('The data manager "%s" returned an unsupported data table "%s" with type "%s" with new entries "%s". These entries will not be created. Please confirm that the data table is of a supported type (%s).' % (self.id, data_table_name, type(data_table), data_table_values, SUPPORTED_DATA_TABLE_TYPES))
                continue  # next table name
            output_ref_values = {}
            if data_table_name in self.output_ref_by_data_table:
                for data_table_column, output_ref in self.output_ref_by_data_table[data_table_name].items():
                    output_ref_dataset = out_data.get(output_ref, None)
                    assert output_ref_dataset is not None, "Referenced output was not found."
                    output_ref_values[data_table_column] = output_ref_dataset

            if not isinstance(data_table_values, list):
                data_table_values = [data_table_values]
            for data_table_row in data_table_values:
                data_table_value = dict(**data_table_row)  # keep original values here
                for name, value in data_table_row.items():  # FIXME: need to loop through here based upon order listed in data_manager config
                    if name in output_ref_values:
                        self.process_move(data_table_name, name, output_ref_values[name].extra_files_path, **data_table_value)
                        data_table_value[name] = self.process_value_translation(data_table_name, name, **data_table_value)
                data_table.add_entry(data_table_value, persist=True, entry_source=self)
            send_control_task(self.data_managers.app,
                              'reload_tool_data_tables',
                              noop_self=True,
                              kwargs={'table_name': data_table_name})
        if self.undeclared_tables and data_tables_dict:
            # We handle the data move, by just moving all the data out of the extra files path
            # moving a directory and the target already exists, we move the contents instead
            log.debug('Attempting to add entries for undeclared tables: %s.', ', '.join(data_tables_dict.keys()))
            for ref_file in out_data.values():
                if ref_file.extra_files_path_exists():
                    util.move_merge(ref_file.extra_files_path, self.data_managers.app.config.galaxy_data_manager_data_path)
            path_column_names = ['path']
            for data_table_name, data_table_values in data_tables_dict.items():
                data_table = self.data_managers.app.tool_data_tables.get(data_table_name, None)
                if not isinstance(data_table_values, list):
                    data_table_values = [data_table_values]
                for data_table_row in data_table_values:
                    data_table_value = dict(**data_table_row)  # keep original values here
                    for name, value in data_table_row.items():
                        if name in path_column_names:
                            data_table_value[name] = os.path.abspath(os.path.join(self.data_managers.app.config.galaxy_data_manager_data_path, value))
                    data_table.add_entry(data_table_value, persist=True, entry_source=self)
                send_control_task(self.data_managers.app, 'reload_tool_data_tables',
                                  noop_self=True,
                                  kwargs={'table_name': data_table_name})
        else:
            for data_table_name, data_table_values in data_tables_dict.items():
                # tool returned extra data table entries, but data table was not declared in data manager
                # do not add these values, but do provide messages
                log.warning('The data manager "%s" returned an undeclared data table "%s" with new entries "%s". These entries will not be created. Please confirm that an entry for "%s" exists in your "%s" file.' % (self.id, data_table_name, data_table_values, data_table_name, self.data_managers.filename))
Beispiel #5
0
     for data_table_row in data_table_values:
         data_table_value = dict( **data_table_row )  # keep original values here
         for name, value in data_table_row.iteritems():  # FIXME: need to loop through here based upon order listed in data_manager config
             if name in output_ref_values:
                 self.process_move( data_table_name, name, output_ref_values[ name ].extra_files_path, **data_table_value )
                 data_table_value[ name ] = self.process_value_translation( data_table_name, name, **data_table_value )
         data_table.add_entry( data_table_value, persist=True, entry_source=self )
         galaxy.queue_worker.send_control_task(self.data_managers.app, 'reload_tool_data_tables',
                                               noop_self=True,
                                               kwargs={'table_name': data_table_name} )
 if self.undeclared_tables and data_tables_dict:
     # We handle the data move, by just moving all the data out of the extra files path
     # moving a directory and the target already exists, we move the contents instead
     log.debug( 'Attempting to add entries for undeclared tables: %s.', ', '.join( data_tables_dict.keys() ) )
     for ref_file in out_data.values():
         util.move_merge( ref_file.extra_files_path, self.data_managers.app.config.galaxy_data_manager_data_path )
     path_column_names = [ 'path' ]
     for data_table_name, data_table_values in data_tables_dict.iteritems():
         data_table = self.data_managers.app.tool_data_tables.get( data_table_name, None )
         if not isinstance( data_table_values, list ):
             data_table_values = [ data_table_values ]
         for data_table_row in data_table_values:
             data_table_value = dict( **data_table_row )  # keep original values here
             for name, value in data_table_row.iteritems():
                 if name in path_column_names:
                     data_table_value[ name ] = os.path.abspath( os.path.join( self.data_managers.app.config.galaxy_data_manager_data_path, value ) )
             data_table.add_entry( data_table_value, persist=True, entry_source=self )
             galaxy.queue_worker.send_control_task(self.data_managers.app, 'reload_tool_data_tables',
                                                   noop_self=True,
                                                   kwargs={'table_name': data_table_name} )
 else:
Beispiel #6
0
                     output_ref_values[name].extra_files_path,
                     **data_table_value)
                 data_table_value[
                     name] = self.process_value_translation(
                         data_table_name, name, **data_table_value)
         data_table.add_entry(data_table_value,
                              persist=True,
                              entry_source=self)
 if self.undeclared_tables and data_tables_dict:
     # We handle the data move, by just moving all the data out of the extra files path
     # moving a directory and the target already exists, we move the contents instead
     log.debug('Attempting to add entries for undeclared tables: %s.',
               ', '.join(data_tables_dict.keys()))
     for ref_file in out_data.values():
         util.move_merge(
             ref_file.extra_files_path, self.data_managers.app.config.
             galaxy_data_manager_data_path)
     path_column_names = ['path']
     for data_table_name, data_table_values in data_tables_dict.iteritems(
     ):
         data_table = self.data_managers.app.tool_data_tables.get(
             data_table_name, None)
         if not isinstance(data_table_values, list):
             data_table_values = [data_table_values]
         for data_table_row in data_table_values:
             data_table_value = dict(
                 **data_table_row)  #keep original values here
             for name, value in data_table_row.iteritems():
                 if name in path_column_names:
                     data_table_value[name] = os.path.abspath(
                         os.path.join(
Beispiel #7
0
    def process_result(self, out_data):
        data_manager_dicts = {}
        data_manager_dict = {}
        # TODO: fix this merging below
        for output_name, output_dataset in out_data.items():
            try:
                output_dict = json.loads(open(output_dataset.file_name).read())
            except Exception as e:
                log.warning('Error reading DataManagerTool json for "%s": %s' %
                            (output_name, e))
                continue
            data_manager_dicts[output_name] = output_dict
            for key, value in output_dict.items():
                if key not in data_manager_dict:
                    data_manager_dict[key] = {}
                data_manager_dict[key].update(value)
            data_manager_dict.update(output_dict)

        data_tables_dict = data_manager_dict.get('data_tables', {})
        for data_table_name in self.data_tables.keys():
            data_table_values = None
            data_table_remove_values = None
            # Add/Remove option for data tables
            if isinstance(data_tables_dict.get(data_table_name), dict):

                data_table_data = data_tables_dict.get(data_table_name, None)
                # Validate results
                if not data_table_data:
                    log.warning('Data table seems invalid: "%s".' %
                                data_table_name)
                    continue

                data_table_values = data_table_data.pop('add', None)
                data_table_remove_values = data_table_data.pop('remove', None)

                # Remove it as well here
                data_tables_dict.pop(data_table_name, None)
            else:
                data_table_values = data_tables_dict.pop(data_table_name, None)

            if not data_table_values and not data_table_remove_values:
                log.warning(
                    'No values for data table "%s" were returned by the data manager "%s".'
                    % (data_table_name, self.id))
                continue  # next data table
            data_table = self.data_managers.app.tool_data_tables.get(
                data_table_name, None)
            if data_table is None:
                log.error(
                    'The data manager "%s" returned an unknown data table "%s" with new entries "%s". These entries will not be created. Please confirm that an entry for "%s" exists in your "%s" file.'
                    % (self.id, data_table_name, data_table_values,
                       data_table_name, 'tool_data_table_conf.xml'))
                continue  # next table name
            if not isinstance(data_table, SUPPORTED_DATA_TABLE_TYPES):
                log.error(
                    'The data manager "%s" returned an unsupported data table "%s" with type "%s" with new entries "%s". These entries will not be created. Please confirm that the data table is of a supported type (%s).'
                    % (self.id, data_table_name, type(data_table),
                       data_table_values, SUPPORTED_DATA_TABLE_TYPES))
                continue  # next table name
            output_ref_values = {}
            if data_table_name in self.output_ref_by_data_table:
                for data_table_column, output_ref in self.output_ref_by_data_table[
                        data_table_name].items():
                    output_ref_dataset = out_data.get(output_ref, None)
                    assert output_ref_dataset is not None, "Referenced output was not found."
                    output_ref_values[data_table_column] = output_ref_dataset

            if not isinstance(data_table_values, list):
                data_table_values = [data_table_values
                                     ] if data_table_values else []
            if not isinstance(data_table_remove_values, list):
                data_table_remove_values = [
                    data_table_remove_values
                ] if data_table_remove_values else []
            for data_table_row in data_table_values:
                data_table_value = dict(
                    **data_table_row)  # keep original values here
                for name, value in data_table_row.items(
                ):  # FIXME: need to loop through here based upon order listed in data_manager config
                    if name in output_ref_values:
                        self.process_move(
                            data_table_name, name,
                            output_ref_values[name].extra_files_path,
                            **data_table_value)
                        data_table_value[
                            name] = self.process_value_translation(
                                data_table_name, name, **data_table_value)
                data_table.add_entry(data_table_value,
                                     persist=True,
                                     entry_source=self)
            # Removes data table entries
            for data_table_row in data_table_remove_values:
                data_table_value = dict(
                    **data_table_row)  # keep original values here
                data_table.remove_entry(list(data_table_value.values()))

            self.data_managers.app.queue_worker.send_control_task(
                'reload_tool_data_tables',
                noop_self=True,
                kwargs={'table_name': data_table_name})
        if self.undeclared_tables and data_tables_dict:
            # We handle the data move, by just moving all the data out of the extra files path
            # moving a directory and the target already exists, we move the contents instead
            log.debug('Attempting to add entries for undeclared tables: %s.',
                      ', '.join(data_tables_dict.keys()))
            for ref_file in out_data.values():
                if ref_file.extra_files_path_exists():
                    util.move_merge(
                        ref_file.extra_files_path, self.data_managers.app.
                        config.galaxy_data_manager_data_path)
            path_column_names = ['path']
            for data_table_name, data_table_values in data_tables_dict.items():
                data_table = self.data_managers.app.tool_data_tables.get(
                    data_table_name, None)
                if not isinstance(data_table_values, list):
                    data_table_values = [data_table_values]
                for data_table_row in data_table_values:
                    data_table_value = dict(
                        **data_table_row)  # keep original values here
                    for name, value in data_table_row.items():
                        if name in path_column_names:
                            data_table_value[name] = os.path.abspath(
                                os.path.join(
                                    self.data_managers.app.config.
                                    galaxy_data_manager_data_path, value))
                    data_table.add_entry(data_table_value,
                                         persist=True,
                                         entry_source=self)
                self.data_managers.app.queue_worker.send_control_task(
                    'reload_tool_data_tables',
                    noop_self=True,
                    kwargs={'table_name': data_table_name})
        else:
            for data_table_name, data_table_values in data_tables_dict.items():
                # tool returned extra data table entries, but data table was not declared in data manager
                # do not add these values, but do provide messages
                log.warning(
                    'The data manager "%s" returned an undeclared data table "%s" with new entries "%s". These entries will not be created. Please confirm that an entry for "%s" exists in your "%s" file.'
                    % (self.id, data_table_name, data_table_values,
                       data_table_name, self.data_managers.filename))