def search(self, query, max_results=False, fields=None, group_by=None): """ Search the current client for results """ self._wait_for_load() frames = MultiResultList() for item in self: frame = item.search( self._language_parser.parse( query, self._get_item( os.path.basename(item.filename) ).keys() ), max_results=max_results, fields=fields, group_by=group_by ) results = ResultList( name=os.path.basename(item.filename).split('.')[0].split('/')[-1] ) results.dataframe = (frame, self._get_item(os.path.basename(item.filename))) if len(results) > 0: frames.append(results) return frames
def setup(self, query, max_results=0, fields=None, collate=None, distinct=False, namespace=None, group_by=None): """ Initialise the filter @param query string The query to search with @param max_results [bool|int] If False will retrieve all matching issues in batches of 50 @param fields list An optional list of fields to retrieve. If empty, retrieves all fields """ # pylint: disable=arguments-differ # It is understood that the arguments will always differ # from the super() class on this method due to the use of # *args and **kwargs in the call from __init__ # pylint: disable=too-many-arguments # This class requires a large number of arguments to come from the config self._query = Replacements().replace(query) self._fields = fields self._observers = [] self._max_results = max_results collation = Collation.get( collate, namespace=namespace) if collate is not None else collate self._results = ResultList(collate=collation, distinct=distinct, namespace=namespace) self._group_by = group_by
def test_result_list_dataframe_returns_pandas_dict_if_called_directly( self): resultset = ResultList(name='test results') resultset.map_to(Issue()) result_issue = Issue() result_issue.description = 'This is a test item' resultset.append(result_issue) another_result_issue = Issue() another_result_issue.description = 'This is a test item' resultset.append(another_result_issue) self.assertIsInstance(resultset.dataframe, pd.DataFrame)
def __init__(self, threadmanager, config, append=True): """ Initialise the ThreadableCommand object """ self._thread_manager = threadmanager self._commands = [] self._observers = [] self._redirects = [] self._results = ResultList() # pylint: disable=protected-access # verification that Configuration is not initialised and initialise it # with the thread manager - this is desired behaviour against the singleton # to prevent it being loaded multiple times inside threading objects if Configuration._instance is None: Configuration._instance = self.threadmanager.configuration self.validate_setup(config) super().__init__(**config._asdict()) if append: self.threadmanager.append(self)
def test_table_renders_filter_results_multi_results_without_combine( self, mock_heading, mock_table): self.tearDown() Config = namedtuple('Config', 'rows columns style') with patch('pyccata.core.filter.Filter.results', new_callable=PropertyMock) as mock_results: results_set_one = ResultList(name='test results') result_issue = Issue() result_issue.description = 'This is a test item' results_set_one.append(result_issue) results_set_two = ResultList(name='another set of tests') another_result_issue = Issue() another_result_issue.description = 'This is a test item' results_set_two.append(another_result_issue) multi_results = MultiResultList() multi_results.combine = False multi_results.append(results_set_one) multi_results.append(results_set_two) mock_results.return_value = multi_results Filter = namedtuple('Filter', 'query max_results namespace') rows = Filter(query='project=mssportal', max_results=5, namespace='pyccata.core') columns = ['Name', 'Description'] config = Config(rows=rows, columns=columns, style='Light heading 1') table = Table(self._thread_manager, config) document = ReportManager() table.render(document) self.assertEquals(2, mock_table.call_count) self.assertEquals(2, mock_heading.call_count)
def test_result_list_from_pandas_dict(self): dictionary = {0: {'line': 'hello'}, 1: {'line': 'world'}} result_list = ResultList() result_list.dataframe = pd.DataFrame.from_dict(dictionary) self.assertIsInstance(result_list.dataframe, pd.DataFrame)
def _convert_results(results): """ Converts a ``jira.client.ResultList`` of ``jira.resources.Issues`` into a ``pyccata.core.resources.ResultList`` of ``pyccata.core.resources.Issues`` """ result_set = ResultList() result_set.total = results.total if hasattr(results, 'total') else len(results) for issue in results: item = Issue() item.key = getattr(issue, 'key') if hasattr(issue, 'key') else None item.summary = getattr(issue.fields, 'summary') if hasattr( issue.fields, 'summary') else None item.issuetype = getattr(issue.fields, 'issuetype') if hasattr( issue.fields, 'issuetype') else None item.created = getattr(issue.fields, 'created') if hasattr( issue.fields, 'created') else None item.updated = getattr(issue.fields, 'updated') if hasattr( issue.fields, 'updated') else None item.priority = getattr(issue.fields, 'priority') if hasattr( issue.fields, 'priority') else None item.description = getattr(issue.fields, 'description') if hasattr( issue.fields, 'description') else None item.status = getattr(issue.fields, 'status') if hasattr( issue.fields, 'status') else None item.project = getattr(issue.fields, 'project') if hasattr( issue.fields, 'project') else None item.fixVersions = getattr(issue.fields, 'fixVersions') if hasattr( issue.fields, 'fixVersions') else None item.resolution = getattr(issue.fields, 'resolution') if hasattr( issue.fields, 'resolution') else None item.resolutiondate = getattr( issue.fields, 'resolutiondate') if hasattr( issue.fields, 'resolutiondate') else None item.creator = getattr(issue.fields, 'creator') if hasattr( issue.fields, 'creator') else None item.assignee = getattr(issue.fields, 'assignee') if hasattr( issue.fields, 'assignee') else None item.attachments = Jira._get_attachments( getattr(issue.fields, 'attachment' ) if hasattr(issue.fields, 'attachment') else None) item.release_text = getattr( issue.fields, 'customfield_10600') if hasattr( issue.fields, 'customfield_10600') else None item.business_representative = getattr( issue.fields, 'customfield_10700') if hasattr( issue.fields, 'customfield_10700') else None item.rollout_instructions = getattr( issue.fields, 'customfield_10800') if hasattr( issue.fields, 'customfield_10800') else None item.rollback_instructions = getattr( issue.fields, 'customfield_10801') if hasattr( issue.fields, 'customfield_10801') else None if hasattr(issue.fields, 'customfield_10802'): item.pipelines = [ item.value for item in getattr(issue.fields, 'customfield_10802') ] result_set.append(item) return result_set
class Filter(Threadable): """ The Filter object provides a base class for all filters requested by the application. """ # pylint: disable=too-many-instance-attributes,too-many-public-methods # It is understood that this class requires a number of attributes and # accessor methods. __implements__ = (ObservableInterface, ) PRIORITY = 1000 _query = None _fields = None _results = None _observers = None _max_results = None _projectmanager = None _observing = False _group_by = None @property def projectmanager(self): """ get the project manager assigned to this filter """ return self._projectmanager @projectmanager.setter @accepts(ProjectManager) def projectmanager(self, manager): """ assign a project manager to the filter """ self._projectmanager = manager @property def query(self): """ Get the query defined within this object """ return self._query @property def group_by(self): """ Get the clause to group results by """ return self._group_by @property def fields(self): """ Get a list of fields to restrict the search to """ return self._fields @property def max_results(self): """ Get the max results returned by this filter """ return self._max_results @property def results(self): """ Get the results of the filter search """ return self._results.collate @property def name(self): return self._results.name if self._results is not None else super( ).name @property def hasobservers(self): """ returns True if the number of items observing this one is > 0 """ return len(self._observers) > 0 @property def observing(self): """ Is this item being observed? """ return self._observing @observing.setter @accepts(bool) def observing(self, value): """ Tell the item that it is being observed """ self._observing = value @property def observers(self): """ get the list of observers to this object """ return self._observers @accepts(str, max_results=(bool, int), fields=(None, list), collate=(None, tuple, str), distinct=bool, namespace=(None, str), group_by=(None, str)) def setup(self, query, max_results=0, fields=None, collate=None, distinct=False, namespace=None, group_by=None): """ Initialise the filter @param query string The query to search with @param max_results [bool|int] If False will retrieve all matching issues in batches of 50 @param fields list An optional list of fields to retrieve. If empty, retrieves all fields """ # pylint: disable=arguments-differ # It is understood that the arguments will always differ # from the super() class on this method due to the use of # *args and **kwargs in the call from __init__ # pylint: disable=too-many-arguments # This class requires a large number of arguments to come from the config self._query = Replacements().replace(query) self._fields = fields self._observers = [] self._max_results = max_results collation = Collation.get( collate, namespace=namespace) if collate is not None else collate self._results = ResultList(collate=collation, distinct=distinct, namespace=namespace) self._group_by = group_by @accepts(ObservableInterface) def append(self, item): """ Append an observer to the list of observables """ item.observing = True self._observers.append(item) @accepts((bool, ResultList)) def notify(self, results): """ notify observers or store results """ if not results: Logger().info( 'Notifying {0} threads of the results of filter with query \'{1}\'' .format(len(self._observers), self._query)) for observer in self._observers: observer.notify(self._results.copy()) else: self._results = results self._complete = True def run(self): """ Triggers the current thread and any child threads (if defined) The status of this thread can then be monitored via the ``Filter::complete`` property. """ try: assert self.projectmanager is not None if not self._complete: results = self.projectmanager.search_issues( search_query=self.query, max_results=self.max_results, fields=self.fields, group_by=self.group_by) if isinstance(results, MultiResultList): # pylint: disable=protected-access # We need access to results collation method # in order to ensure collation is passed into sub-lists results._namespace = self._results._namespace results._field = self._results.field results.collate = self._results._collate results.distinct = self._results._distinct results.group_by = self._group_by results._subquery = self._results._subquery self._results = results else: self._results.extend(results) self.notify(False) self._complete = True # pylint: disable=broad-except # It is necessary to have a broad exception case here # as we need to set the failure state of the thread # regardless of what type of exception is thrown except Exception as exception: if isinstance(exception, ThreadNotStartedError): sleep(Threadable.THREAD_SLEEP) self.run() else: self.failure = exception
class ThreadableCommand(Threadable): """ This class can be used to process shell commands """ # pylint: disable=too-many-instance-attributes,too-many-public-methods # It is understood that this class requires a number of attributes and # accessor methods. MAX_PRIORITY = 10000 PRIORITY = 0 _redirect_regex = None _commands = None _redirects = None PRIORITY = 1000 _results = None _block = None @property def results(self): """ Get the results of command """ return self._results @property def threadmanager(self): """ Get the current loaded threadmanager """ return self._thread_manager @accepts(ThreadManager, tuple, append=bool) def __init__(self, threadmanager, config, append=True): """ Initialise the ThreadableCommand object """ self._thread_manager = threadmanager self._commands = [] self._observers = [] self._redirects = [] self._results = ResultList() # pylint: disable=protected-access # verification that Configuration is not initialised and initialise it # with the thread manager - this is desired behaviour against the singleton # to prevent it being loaded multiple times inside threading objects if Configuration._instance is None: Configuration._instance = self.threadmanager.configuration self.validate_setup(config) super().__init__(**config._asdict()) if append: self.threadmanager.append(self) @accepts( name=str, command=str, input_directory=(None, str), output_directory=(None, str), wait_for=(None, str, Threadable) ) def setup(self, name='', command='', input_directory=None, output_directory=None, wait_for=None): """ Sets up the thread and builds the command structure. @param name string @param command string @param input_directory string or None @param output_directory string or None @param wait_for string, None or ThreadableCommand If this method is overridden, the overriding method must call back up to parent or implement this functionality directly. Failure to do this will prevent the thread from executing. The flag `wait_for` will prevent the thread from executing until the wait_for thread has completed. This is useful in the event a command needs the complete output from another thread before it can work, for example, downloading data then parsing it. If `wait_for` is None, the thread will execute as soon as there is room in the pool. (Default pool size = ThreadManager.POOL_SIZE). """ # pylint: disable=arguments-differ # This method provides specific implementations of *args and **kwargs # pylint: disable=too-many-arguments # This method requires a larger number of arguments than the standard self.thread_name = name self._input_directory = Replacements().replace(input_directory) self._output_directory = Replacements().replace( output_directory, additional=ThreadableCommand.replacements(output_directory) ) self._build_command(command) self._block = wait_for @accepts(str) def _build_command(self, command_string): """ Breaks down a command string and returns a list of commands initialised for subprocess.Popen @param command_string string """ command_list = command_string.split(' | ') for command_structure in command_list: command = Command() redirect_regex = re.compile( r"(?P<command>.*?)( ((?P<redirect>[&\d]?)>+ ?&?(?P<filename>\S+)))( ?< ?(?P<infile>.*))?", re.DOTALL ) matches = [match.groupdict() for match in redirect_regex.finditer(command_structure)] if len(matches) == 0: try: structure = shlex.split(command_structure) except ValueError as exception: Logger().error('Failed to parse command \'' + command_structure + '\'') raise command.command = structure[0] command.arguments = structure[1:] else: try: structure = shlex.split(matches[0]['command']) except ValueError as exception: Logger().error('Failed to parse command \'' + matches[0]['command'] + '\'') raise exception command.command = structure[0] command.arguments = structure[1:] for match in matches: command.redirects.append( Redirect( redirect_input=( int(match['redirect']) if match['redirect'].isdigit() else match['redirect'] ), redirect_output=int(match['filename']) if match['filename'].isdigit() else match['filename'] ) ) self._commands.append(command) def run(self): """ Executes the current thread """ processes = [] for command in self._commands: last_pipe = processes[-1].stdout if len(processes) > 0 else None processes.append( Popen( [command.command] + command.arguments, stdin=last_pipe, stdout=command.stdout, stderr=command.stderr ) ) command.return_code = processes[-1].poll() if processes[-1].stdout is not None and hasattr(processes[-1].stdout, 'readline'): for line in iter(processes[-1].stdout.readline, b''): item = CommandLineResultItem() item.line = line.decode('utf8').strip() self._results.append(item) stderr = [] if processes[-1].stderr is not None: for line in iter(processes[-1].stderr.readline, b''): stderr.append(line.decode('utf8').strip()) processes[-1].communicate() if len(stderr) != 0: self.failure = ThreadFailedError(stderr) self._complete = True @staticmethod def replacements(string_to_search): """ Compiles a list of optional string replacements for command thread strings """ replacements = {} function_regex = re.compile( r'.*?[-_.]{1}\{(?P<what>.*?)\.(?P<command>.*?)\}', re.DOTALL ) matches = [match.groupdict() for match in function_regex.finditer(string_to_search)] if len(matches) == 0: return None for match in matches: string = '{0}.{1}'.format(match['what'], match['command']) what = match['what'] if Replacements().find(what.upper()): what = Replacements().replace('{' + what.upper() + '}') command = '{0}_helper'.format(match['command']) try: command = include(command, 'pyccata', 'helpers') except InvalidModuleError: Logger().error( 'Invalid helper method {0} specified for {1}'.format( match['command'], string ) ) raise replacements[string] = command(what) return replacements @staticmethod def logdir(): """ Get a log directory for the command output """ path = os.path.join(Replacements().replace('{BASE_PATH}'), 'log') if not os.path.exists(path): os.makedirs(path) return path