def _download_attachments(self): """ Downloads all attachments from the project manager. """ # attachments function is a callback to the project manager attachments_function = self.projectmanager.server.attachments if not attachments_function or attachments_function is None: raise InvalidCallbackError('attachments callback function has not been set') for item in self._content: with open(os.path.join(self._output_path, item.filename), 'wb') as output_file: try: attachment_url = attachments_function(str(item.attachment_id), item.filename) Logger().info('Downloading file \'' + item.filename + '\' from \'' + attachment_url + '\'') curl_instance = pycurl.Curl() curl_instance.setopt(curl_instance.URL, attachment_url) curl_instance.setopt( pycurl.USERPWD, '{username}:{password}'.format( username=Configuration().jira.username, password=Configuration().jira.password ) ) curl_instance.setopt(curl_instance.WRITEDATA, output_file) curl_instance.perform() if curl_instance.getinfo(pycurl.RESPONSE_CODE) != 200: Logger().error( 'Error in downloading attachments. Got response code {0}'.format( curl_instance.getinfo(pycurl.RESPONSE_CODE) ) ) except pycurl.error as exception: Logger().error(exception) curl_instance.close()
def _merge(results, collation): """ Carry out a full outer merge on n dataframes """ data = None names = [item.name for item in results] results = [ dd.dataframe(item.dataframe, npartitions=collation.partitions) for item in results ] Logger().info('Creating merge table') merge_start = time.clock() for index, item in enumerate(results): columns = [ column if column == collation.join.column else '{0}_{1}'.format( column, names[index]) for column in item.columns ] item.columns = columns if index == 0: data = item else: suffixes = ('_{0}'.format(names[index - 1]), '_{0}'.format(names[index])) data = data.merge(item, on=collation.join.column, how=collation.join.method, suffixes=suffixes, copy=False) Logger().info('Merge table created in {0} seconds. ({1} rows)'.format( time.clock() - merge_start, len(data.index))) return data
def client(self): """ Lazy load the python-jira.client.JIRA object @return JIRA """ if self._client is None: Logger().info('Establishing JIRA Api connection...') try: self._client = JIRA( options=self._options, basic_auth=(self.configuration.jira.username, self.configuration.jira.password)) Logger().info('Connection success') except JIRAError as exception: Logger().error( 'Recieved HTTP/{code} whilst establishing connection to {server}' .format(code=exception.status_code, server=self._options['server'])) Logger().error('Message was: \'{message}\''.format( message=exception.text)) raise InvalidConnectionError(exception.status_code, self._options['server'], exception.text, exception.headers) return self._client
def _load(self): """ Attempts to load the configuration file from JSON """ for location in self._get_locations(): Logger().debug( 'Checking for config file in \'{0}\''.format(location)) path = os.path.join(str(location), self._filename) try: with open(path) as configuration_file: self._configuration = json.load( configuration_file, object_hook=lambda x: namedtuple('Config', x.keys()) (*x.values())) Logger().debug( 'Using configuration from \'{0}\'.'.format(path)) break except IOError: pass if not self._configuration: raise IOError( 'Invalid configuration file or path not provided (got \'{0}\')' .format(self._filename)) self.validate_config(self._required_root_elements) if hasattr(self._configuration, 'replacements'): self._replacements = Replacements( configuration=self._configuration.replacements) if not self._dont_parse: self._parse_flags() self._is_loaded = True
def render(self, report): """ Renders the current object into the document @param report ReportManager """ self._report = report Logger().info('Writing table {0}'.format( self.title if self.title is not None else '')) if isinstance(self._rows, Filter): if len(self._rows.results) == 0: Logger().info('Empty table. Skipping...') return results = self._rows.results if self._columns is None or len(self._columns) == 0: self._columns = list(results.columns) if isinstance(results, MultiResultList): if not results.combine: return self._write_multi(results) results = [item for result in results for item in result] if hasattr(self._rows.results, 'dataframe') and self._rows.results.dataframe is None: fields = self._rows.fields self._rows = [] for issue in results: self._rows.append([ getattr(issue, field.replace(' ', '_').lower()) for field in fields ]) if self.title is not None: self._write_heading() self._write_table(self._rows)
def sql_handler(destination, filepath): try: Logger().info('Adding file \'' + filepath + '\'') sql_file = ReleaseInstructions._sql_filename(os.path.basename(filepath)) output_directory = os.path.join( destination, sql_file.direction.lower(), sql_file.ticket_order, sql_file.ticket_id, sql_file.server, sql_file.database ) create_directory(output_directory) os.rename( filepath, os.path.join( output_directory, ( sql_file.operation_order + '_' if sql_file.operation_order is not None else '' ) + sql_file.operation + '.sql' ) ) # pylint: disable=broad-except # We are not bothering to handle invalidly named SQL files - these will be # ignored from the instruction sent to SysOps. # ANY error here and the file gets bounced back to the developer. except Exception as exception: Logger().error('Failed to handle SQL file \'' + os.path.basename(filepath) + '\'') Logger().error('reason: ' + str(exception))
def _parse_structure(self, config): """ Parses the provided configuration and converts it to a document structure @param config list A list of items defining the section content """ structure = [] if config is not None: for item in config: name = getattr(self._factory, item.type) args = item.content title = getattr(item, 'title') if hasattr(item, 'title') else None # pylint: disable=broad-except # anything can go wrong when adding a new item. # we need to record what did go wrong and move on try: component = name(self.threadmanager, args) if title is not None: component.title = title structure.append(component) except Exception as exception: Logger().warning( 'Failed to add item of type \'{0}\''.format(item.type)) Logger().warning(exception) Logger().warning(args) return structure
def run(self): """ wait for the filter to complete """ while not self._complete: self._complete = self._content.complete if self._content.failed: Logger().warning('Failed to execute ' + self._content.query) Logger().warning(self._content.failure) self._complete = True return content = [item for issue in self._content.results for item in issue.attachments] contents = [] for collate in self._collate: contents.append( getattr(Attachments, collate)(content) if hasattr(Attachments, collate) else content ) self._content = [item for content in contents for item in content] if len(self._content) == 0: Logger().warning('No Attachments to download. Skipping.') self._complete = True return try: self._download_attachments() except InvalidCallbackError as exception: Logger().error(exception) self.failure = exception
def save(self, filename): """ Save the current report @param filename string """ Logger().info('Saving document to file') self.reportmanager.save(filename) Logger().info('Done')
def test_input_with_custom_vids(self, mock_write, mock_input): mock_input.readline.return_value = "5\n" self.tearDown() self.logger = Logger(view=TestLogger._ValidView(), stdin=TestLogger._ValidView()) value = self.logger.input('please enter a number') mock_write.write.assert_called_with(Logger.INFO + 'please enter a number > ') self.assertEquals(value, '5')
def method(self, collate): """ Set the collation method """ try: function = self._import(collate) except InvalidModuleError as exception: if collate is not None: Logger().error('Failed to set collation method') Logger().error(exception) raise exception self._method = function
def run(self): """ Execute the current thread """ while not self._complete: if self._content.complete: self.save() if self._content.failed: Logger().warning('Failed to execute \'' + self._content.query + '\'') Logger().warning('Reason was:') Logger().warning(self._content.failure)
def search_issues(self, search_query='', max_results=0, fields=None, group_by=None): """ Search for issues in JIRA @param search_query string A JQL formatted query @param max_results [int|bool] If false will load all issues in batches of 50 @param fields list A list of fields to include in the results @return ResultSet """ # pylint: disable=unused-argument # The group_by argument is required by other clients # but will never be used within this class try: if isinstance(fields, list): fields = ','.join(fields) max_results = max_results if max_results != 0 else Jira.MAX_RESULTS results = self.client.search_issues(search_query, maxResults=max_results, fields=fields) Logger().debug('Got \'' + str(len(results)) + '\' results for query ' + search_query) return Jira._convert_results(results) except JIRAError as exception: expression = '.*Error in the JQL Query.*' if exception.status_code == 400 and re.match( expression, exception.text): raise InvalidQueryError(str(exception)) raise InvalidConnectionError(exception.status_code, self._options['server'], exception.text, exception.headers)
def __init__(self): if self._is_loaded: return super().__init__() self._managed_threads = 0 self._complete_threads = 0 Logger().debug('Loading project manager') self._projectmanager = ProjectManager() Logger().debug(str(self._projectmanager.threadmanager)) self._projectmanager.threadmanager = self self._querymanager = QueryManager() self._configuration = Configuration() self._failed_threads = [] self._pool = [] self._complete = False self._is_loaded = True
def run(self): Logger().info('Starting broken filter thread \'{0}\''.format( self._name)) self.failure = InvalidConnectionError( 500, 'http://jira.local:8080', 'Recieved HTTP/500 whilst establishing a connection to jira.local.' )
def reporting(self, reporting): """ Verify the reporting class exists and set its name if true @raise RequiredKeyError if a root element of the same name as manager is not defined. @raise InvalidClassError if the required class does not exist under NAMESPACE.module Example configuration: { "reporting": "docx", "report": " { ... } } Example module structure: pyccata.core |-- managers |-- clients |-- docx.py class Docx(ReportingInterface): ___implements__ = (ReportingInterface,) """ Logger().info('Loading reporting class \'{0}\''.format(reporting)) if not hasattr(self._configuration, 'report'): raise RequiredKeyError('\'<root>/report') class_path = '{0}.clients.{1}'.format(self.NAMESPACE, reporting) if not class_exists(self.NAMESPACE, 'managers.clients', reporting): raise InvalidClassError(reporting, class_path) self._reporting = reporting
def manager(self, manager): """ Verify the manager class exists and set its name if true @raise RequiredKeyError if a root element of the same name as manager is not defined. @raise InvalidClassError if the required class does not exist under NAMESPACE.module Example configuration: { "manager": "jira", "jira": " { ... } } Example module structure: pyccata.core |-- managers |-- clients |-- jira.py class Jira(ManagerInterface): ___implements__ = (ManagerInterface,) """ Logger().info('Loading manager class \'{0}\''.format(manager)) if not hasattr(self._configuration, manager): raise RequiredKeyError('\'<root>/{0}'.format(manager)) class_path = '{0}.managers.clients.{1}'.format(self.NAMESPACE, manager.lower()) if not class_exists(self.NAMESPACE, 'managers.clients', manager.title()): raise InvalidClassError(manager, class_path) self._manager = manager
def replacements(string_to_search): """ Compiles a list of optional string replacements for command thread strings """ replacements = {} function_regex = re.compile( r'.*?[-_.]{1}\{(?P<what>.*?)\.(?P<command>.*?)\}', re.DOTALL ) matches = [match.groupdict() for match in function_regex.finditer(string_to_search)] if len(matches) == 0: return None for match in matches: string = '{0}.{1}'.format(match['what'], match['command']) what = match['what'] if Replacements().find(what.upper()): what = Replacements().replace('{' + what.upper() + '}') command = '{0}_helper'.format(match['command']) try: command = include(command, 'pyccata', 'helpers') except InvalidModuleError: Logger().error( 'Invalid helper method {0} specified for {1}'.format( match['command'], string ) ) raise replacements[string] = command(what) return replacements
def _wait_for_load(self): """ Pauses the client until all child threads have completed """ Logger().debug('Waiting for CSV client items to load') while True: complete = True for item in self: complete = item.complete if not complete: break if complete: break sleep(Threadable.THREAD_SLEEP) Logger().info('Done loading CSV client')
def add_source(self, source): """ Add a CSV file as a source to search in @param source string A path to a file containing the CSV source """ csvfile = None try: csvfile = CSVFile( os.path.join(self._datapath, source), self._get_item(source).DELIMITER, self._get_item(source).keys() ) self.append(csvfile) except (OSError, ValueError) as exception: Logger().error('Failed to load file \'{0}\''.format(source)) Logger().error(exception)
def combinatorics(results, collation): """ Creates combination results between datasets based on the equation: ``` g = 50 \par D = [d_1 ... d_n] \par l(x): x[start] - 200 \par u(x): x[end] + 200 \par \ \par R = \left [\forall d \in D: X_i = d \times \prod_{i \in [D - d]}D \Rightarrow ( \forall p \in x[ y = (z- \lceil{\left |s \right |\over g}\rceil): z = (y+ \lceil{\left |s \right |\over g}\rceil) ]: \forall x \in X ): \left [x_s \cap x_{i \in[D-d]} \because ( l(x_d) \geq l(x_{i \in [D-d]}) \land u(x_d) \leq u(x_{i \in [D-d]}) ) - ( l(x_d) \gnsim l(x_{i \in [D-d]}) \land u(x_d) \lnsim u(x_{i \in [D-d]}) )\right ] \right ] ``` :inputs: { "inclusive_query": "", "exclusive_query": "" } """ method_start = time.clock() parser = LanguageParser() extractor = DataExtraction(unique_keys=collation.join.column) sizes = {} for item in results: sizes[item.name] = len(item) extractor.set_sizes(sizes) extractor.names = [item.name for item in results] queries = parser.combinatorics(collation.query.inclusive, collation.query.exclusive, extractor.names, collation.limits) extractor.search(queries, results, collation.join.column) while not extractor.complete: time.sleep(Threadable.THREAD_SLEEP) extractor.set_results(queries, collation.join.column) Logger().info('Collation completed in {0} seconds'.format( (time.clock() - method_start))) results.dataframe = (extractor, None) return extractor
def render(self, report): """ Render the picture into the report """ Logger().info('Adding picture {0}'.format( self.title if self.title is not None else '')) if not self._complete: raise ThreadFailedError( 'Failed to render document. Has thread been executed?') report.add_picture(os.path.join(os.getcwd(), self.filepath), width=self.width)
def run(self): while not self._complete: complete = True if isinstance(self._rows, Filter): self._complete = self._rows.complete continue for row in self._rows: for item in row: if isinstance(item, Filter) and item.failed: Logger().warning('Failed to execute \'' + item.query + '\'') Logger().warning('Reason was:') Logger().warning(item.failure) elif isinstance(item, Filter) and not item.complete: complete = False if complete: self._complete = True
def threadmanager(self, threadmanager): """ If the client is capable of receiving a threadmanager, set it @param threadmanager ThreadManager """ Logger().info('Assigning ThreadManager to ProjectManager client') if hasattr(self.client, 'threadmanager'): self.client.threadmanager = threadmanager
def render(self, document): """ Render the text of the abstract into a series of paragraphs @param document ReportManager """ Logger().info('Adding section abstract') for paragraph in self._content: document.add_paragraph(Replacements().replace(paragraph))
def _build_command(self, command_string): """ Breaks down a command string and returns a list of commands initialised for subprocess.Popen @param command_string string """ command_list = command_string.split(' | ') for command_structure in command_list: command = Command() redirect_regex = re.compile( r"(?P<command>.*?)( ((?P<redirect>[&\d]?)>+ ?&?(?P<filename>\S+)))( ?< ?(?P<infile>.*))?", re.DOTALL ) matches = [match.groupdict() for match in redirect_regex.finditer(command_structure)] if len(matches) == 0: try: structure = shlex.split(command_structure) except ValueError as exception: Logger().error('Failed to parse command \'' + command_structure + '\'') raise command.command = structure[0] command.arguments = structure[1:] else: try: structure = shlex.split(matches[0]['command']) except ValueError as exception: Logger().error('Failed to parse command \'' + matches[0]['command'] + '\'') raise exception command.command = structure[0] command.arguments = structure[1:] for match in matches: command.redirects.append( Redirect( redirect_input=( int(match['redirect']) if match['redirect'].isdigit() else match['redirect'] ), redirect_output=int(match['filename']) if match['filename'].isdigit() else match['filename'] ) ) self._commands.append(command)
def render(self, report): """ render the current object """ Logger().info('\033[1mWriting section {0}\033[0m'.format( self.title if self.title is not None else '')) if len(self._structure) > 0: using_tables = len(self._structure) for item in self._structure: if isinstance(item, Table) and isinstance(item.rows, Filter): using_tables = using_tables - 1 if len( item.rows.results) == 0 else using_tables if using_tables == 0: # we probably have nothing to render Logger().info('Empty section. Skipping...') return report.add_heading(str(Replacements().replace(self._title)), self.level) self._abstract.render(report) for part in self._structure: part.render(report)
def notify(self, results): """ notify observers or store results """ if not results: Logger().info( 'Notifying {0} threads of the results of filter with query \'{1}\'' .format(len(self._observers), self._query)) for observer in self._observers: observer.notify(self._results.copy()) else: self._results = results self._complete = True
def __new__(cls): """ Override for __new__ to check if ThreadManager has already been loaded. """ cls.POOL_SIZE = psutil.cpu_count(logical=True) * 64 if cls._instance is None: Logger().info('Loading thread manager') cls._is_loaded = False cls._instance = super(ThreadManager, cls).__new__(cls) return cls._instance
def __init__(self): """ Initialise Jira """ Logger().info('Initialising Jira interface') if self.configuration.jira.port != '': self._options[ 'server'] = self.configuration.jira.server + ':' + self.configuration.jira.port else: self._options['server'] = self.configuration.jira.server # explicitly turn error log files off JIRAError.log_to_tempfile = False