def _get_issues_collections(self, date_start, date_finish): """ Returns two collection of Issues for both of JIRAs. """ io.info('Getting issues...') sk_issues = self._sk_helper.issues_by_worklog_date_range( date_start, date_finish) pub_issues = self._pub_helper.issues_by_worklog_date_range( date_start, date_finish) sk_collection = IssuesCollection(sk_issues) pub_collection = PubIssuesCollection(pub_issues) sk_issues = self._sk_helper.issues(pub_collection.sk_keys) pub_issues = self._pub_helper.get_issues_by_sk_links( sk_collection.links) sk_collection.merge(sk_issues) pub_collection.merge(pub_issues) sk_collection = self._add_sk_worklogs(sk_collection, date_start, date_finish) pub_collection = self._add_pub_worklogs(pub_collection, date_start, date_finish) return sk_collection, pub_collection
def unsync_sk_issues(self, pub_map, sk_issues): """ Get all SK issues with different time Print each step :param pub_map: :param sk_issues: :return: """ unsync_sk_issues = {} for sk_key, pub_issues in pub_map.items(): pub_time = sum(pub_issue.spent_time for pub_issue in pub_issues) if sk_key is None: io.print_time_diff_line(pub_issues, None, time_diff=pub_time, status="warning") continue sk_issue = sk_issues[sk_key] time_diff = pub_time - sk_issue.spent_time io.print_time_diff_line(pub_issues, sk_issue, time_diff) if time_diff != 0: unsync_sk_issues[sk_key] = sk_issue return unsync_sk_issues
def find_start_stop_fragments(self) -> SeqFragmentCollection: start_codons = config.START_CODONS stop_codons = config.STOP_CODONS start_stop_fragments = [] nucleotides = self.seq_record.seq IO.print('Parsing start - stop codon sequences...') for nuc_strand in [nucleotides, nucleotides.reverse_complement()]: for frame in range(0, 2): frame_codons = self.get_codons(nuc_strand, frame) fragment = Seq('') is_read_started = False for index, codon in enumerate(frame_codons): if codon in start_codons: fragment += codon is_read_started = True elif is_read_started and codon not in stop_codons: fragment += codon elif is_read_started and codon in stop_codons: fragment += codon start_stop_fragments.append(fragment) fragment = '' is_read_started = False IO.success('Done') return SeqFragmentCollection( self.seq_record, self.__filter_out_shorter_than( start_stop_fragments, config.MIN_START_STOP_FRAGMENT_LENGTH))
def wrapped(*args, **kwargs): try: return fn(*args, **kwargs) except Exception as e: if message is None: io.error(str(e)) else: io.error(message) exit()
def __filter_out_shorter_than(fragments: List[Seq], min_length: int) -> List[Seq]: filtered = [] IO.print( f'Filtering out sequences shorter than {min_length} symbols...') for fragment in fragments: if len(fragment) > min_length: filtered.append(fragment) IO.success('Done') return filtered
def create_pub_issue(self, sk_issue): """ Migrate SK issue to PUB Jira :param sk_issue: :return: """ fields = self.convert_fields(sk_issue) click.echo("\nPlease confirm migration:") io.print_dict(fields, indent=1) if not click.confirm('\nMigrate?', default=False): return None return self._pub_jira.create_issue(fields=fields)
def get_dicodon_freq_table(self, seq_fragment_collection: SeqFragmentCollection) -> FrequencyTable: freq_table = dict.fromkeys(self.__possible_dicodons, 0) fragments = seq_fragment_collection.get_fragments() IO.print('Calculating dicodon frequencies...') for frag in fragments: frag_dicodons = SeqUtils.get_dicodons(frag) for dicodon in self.__possible_dicodons: freq_table[dicodon] += frag_dicodons.count(dicodon) IO.success('Done') total_items = self.__get_total_dicodons_in_fragments(fragments) normalized_table = self.__normalize_freq_table(freq_table, total_items) return FrequencyTable(normalized_table, seq_fragment_collection.get_seq_record())
def do(self, date_start): """ Find and sync differences between JIRAs :type date_start: dt :param date_start: """ date_start = date_start.replace(hour=0, minute=0, second=0, microsecond=0) date_finish = dt.today().replace(tzinfo=date_start.tzinfo, hour=23, minute=59, second=59) sk, pub = self._get_issues_collections(date_start, date_finish) for date in date_range(date_start, date_finish): io.echo_date(date) sk_keys = sk.filter_by_worklog_date(date).keys sk_keys += pub.filter_by_worklog_date(date).sk_keys sk_keys = list(set(sk_keys)) worklogs_diff = [] for sk_key in sk_keys: sk_issue = sk.get(sk_key) sk_worklogs = sk_issue.worklogs.filter_by_date( date) if sk_issue else WorklogsCollection() pub_collection = pub.filter_by_sk_key( sk_key).filter_by_worklog_date(date) time_diff = pub_collection.total_worklogs_time( date) - sk_worklogs.total_time if time_diff != 0 and sk_issue: worklogs_diff.append((sk_issue, pub_collection, date)) self._print_line(time_diff, sk_issue, pub_collection) if worklogs_diff and self._confirm(worklogs_diff): self._sync_time(worklogs_diff)
def main(): try: sk_jira = config.JiraFactory.create_sk() pub_jira = config.JiraFactory.create_pub() except (configparser.NoSectionError, configparser.NoOptionError): click.echo('Please, edit config file %s' % click.format_filename(config.AppConfig.get_file_path())) return started = IO.input_days_ago() TimeSynchronizer(pub_jira, sk_jira).do_from(started)
def main(): try: sk_jira = config.JiraFactory.create_sk() pub_jira = config.JiraFactory.create_pub() except (configparser.NoSectionError, configparser.NoOptionError): click.echo('Please, edit config file %s' % click.format_filename(config.AppConfig.get_file_path())) return started = io.input_days_ago(default=14) IssueSync(pub_jira, sk_jira).do_many(started)
def _print_line(self, time_diff, sk_issue=None, pub_collection=None): """ Print time differences information. """ pub_issue = pub_collection.first() if pub_collection else None pub_link = io.highlight_key( issue=pub_issue) if pub_issue else 'Not found' sk_link = io.highlight_key(issue=sk_issue) if sk_issue else 'Not found' summary = sk_issue.summary if sk_issue else pub_collection.first( ).summary hours = io.highlight_time(time_diff, prefix='[ ', suffix=' ]', ljust=5) click.echo('%s -> %s\t%s %s' % (pub_link.rjust(54), sk_link.ljust(41), hours, io.truncate_summary(summary))) if pub_collection: for issue in pub_collection.items[1:]: click.echo('%s' % io.highlight_key(issue=issue))
def do(self, sk_key): """ Doing issue migration from SK jira to PUB by SK key :param sk_key: :return: """ try: sk_issue = self._sk_jira.issue(sk_key) except Exception: raise Exception('Can\'t find the issue by key: %s' % sk_key) pub_issues = self._pub_jira.search_issues( "'External issue ID' ~ '%s'" % sk_issue.permalink()) if pub_issues: click.echo('\nThis task has been already migrated to PUB: ') for issue in pub_issues: click.echo( io.highlight_key(issue=issue) + '\t' + issue.fields.summary) if not click.confirm('\nContinue?', default=False): return click.echo('Beginning of migration %s' % io.highlight_key(issue=sk_issue)) pub_issue = self.create_pub_issue(sk_issue) if pub_issue is None: return click.echo('Issue was migrated: %s' % io.highlight_key(issue=pub_issue)) click.echo('Beginning of migration attachments') self.migrate_attachments(sk_issue, pub_issue) return pub_issue
def do_from(self, started): """ Find and sync differences between JIRAs :type started: dt :param started: """ today = dt.today().replace(tzinfo=started.tzinfo) for date in date_range(started, today): date = date.replace(hour=0, minute=0, second=0, microsecond=0) io.print_date_line(date) started = date.astimezone(tz=timezone.utc) finished = started + timedelta(days=1) - timedelta(seconds=1) pub_issues, sk_issues = self.issues(started, finished) uncync_issues = self.unsync_sk_issues(pub_issues, sk_issues) if uncync_issues: self.sync(uncync_issues, pub_issues)
def _sync_time(self, items): """ :type issue: IssuesCollection """ for issue, pub_collection, date in items: self._sk_helper.remove_worklogs( issue.data, issue.worklogs.filter_by_date(date)) if pub_collection is None: continue for pub_issue in pub_collection: [ self._sk_helper.add_worklog(issue.data, worklog) for worklog in pub_issue.worklogs.filter_by_date(date) ] click.echo('Synchronized %s' % io.highlight_key(issue=issue))
def migrate_issues(self, started): """ Migrates issues from SK to PUB. :param started: :return: """ today = dt.today().replace(tzinfo=started.tzinfo) days = int((today - started).days) + 1 sk_issues = self._sk_jira.search_issues( 'createdDate >= startOfDay(-%dd) and (assignee=currentUser() or worklogAuthor=currentUser())' % days, maxResults=100000) pub_issues = self._pub_helper.get_issues_by_sk_links( [sk_issue.permalink() for sk_issue in sk_issues]) exists_sk_links = [PubIssue(issue).sk_url for issue in pub_issues] new_issues = [ issue for issue in sk_issues if issue.permalink() not in exists_sk_links ] if not new_issues: click.echo('Nothing to do') return hidden_keys = config.AppConfig.read_hidden_keys() new_issues = [ issue for issue in new_issues if issue.key not in hidden_keys ] m_issues, s_issues, h_issues = io.edit_unsync_issues(new_issues) h_keys = [h_issue.key for h_issue in h_issues] config.AppConfig.write_hidden_keys(h_keys) for issue in m_issues: self.migrate(issue.key)
def convert_fields(self, sk_issue): """ Convert SK issue to PUB fileds :param sk_issue: :return: """ click.echo() summary = sk_issue.key + ': ' + sk_issue.fields.summary summary = click.prompt('Summary', default=summary, type=str) estimate = io.input_jira_estimate('Original Estimate') labels = ['auto_migration'] if sk_issue.fields.project.key == 'ULT': labels += ['ultra'] return { 'project': { 'id': '10204', # Sheknows DT project 'name': 'SheknowsDT' }, 'issuetype': self.convert_issue_type(sk_issue.fields.issuetype), 'summary': summary, 'priority': self.convert_priority(sk_issue.fields.priority), 'description': sk_issue.fields.description if sk_issue.fields.description else summary, 'timetracking': { 'originalEstimate': estimate }, 'labels': labels, config.Issue.EXTERNAL_ID_FIELD: sk_issue.permalink() }
def do_many(self, started): """ Doing issues migration from started :param started: :return: """ today = dt.today().replace(tzinfo=started.tzinfo) days = int((today - started).days) + 1 sk_issues = self._sk_jira.search_issues( 'createdDate >= startOfDay(-%dd) and assignee=currentUser()' % days) unsync_issues = [] for sk_issue in sk_issues: pub_issues = self._pub_jira.search_issues( "'External issue ID' ~ '%s'" % sk_issue.permalink()) if pub_issues: continue unsync_issues.append(sk_issue) if not unsync_issues: click.echo('Nothing to do') return hidden_keys = config.AppConfig.read_hidden_keys() unsync_issues = [ issue for issue in unsync_issues if issue.key not in hidden_keys ] m_issues, s_issues, h_issues = io.edit_unsync_issues(unsync_issues) h_keys = [h_issue.key for h_issue in h_issues] config.AppConfig.write_hidden_keys(h_keys) for issue in m_issues: self.do(issue.key)
def jira_credential(url, section): credentials = (url, None, None) try: pub_old = AppConfig.read_jira_config(section) credentials = (pub_old.url, pub_old.username, pub_old.password) except (configparser.NoSectionError, configparser.NoOptionError): pass while True: try: config = IO.input_jira_credentials(*credentials) JiraFactory.create(config) except Exception: click.secho(click.style('Credentials are not valid', fg='red')) if click.confirm('Try again?', default=True): credentials = (config.url, config.username, config.password) continue return click.secho(click.style('Credentials are valid', fg='green')) AppConfig.write_jira_config(section, config) return
def migrate(self, sk_key): """ Migrates issue from SK to PUB. """ try: sk_issue = self._sk_jira.issue(sk_key) except Exception: io.error('Can\'t find the issue by key: %s' % sk_key) return pub_issues = self._pub_jira.search_issues( "'External issue ID' ~ '%s'" % sk_issue.permalink()) if pub_issues: click.echo('\nThis task has been already migrated to PUB: ') for issue in pub_issues: click.echo( io.highlight_key(issue=issue) + '\t' + io.truncate_summary(issue.fields.summary)) if not click.confirm('\nContinue?', default=False): return click.echo('Beginning of migration %s' % io.highlight_key(issue=sk_issue)) pub_issue = self.create_pub_issue(sk_issue) if pub_issue is None: click.echo('Error has occurred') return click.echo('Issue was migrated: %s' % io.highlight_key(issue=pub_issue)) self.migrate_attachments(sk_issue, pub_issue) return pub_issue
paths.append(f'{project_root()}/dna-data/{filename}') return paths if __name__ == '__main__': start_time = time() freqCalculator = CodonFreqCalculator() codon_freq_tables = [] dicodon_freq_tables = [] for file in get_file_paths(): seq_fragment_collection = (SeqUtils(file)).find_start_stop_fragments() codon_freq_tables.append( freqCalculator.get_codon_freq_table(seq_fragment_collection)) dicodon_freq_tables.append( freqCalculator.get_dicodon_freq_table(seq_fragment_collection)) print('') IO.print('Generating Phylip matrix for codon frequencies...') codon_matrix = PhylipMatrix(codon_freq_tables) codon_matrix.print() IO.print('Generating Phylip matrix for dicodon frequencies...') dicodon_matrix = PhylipMatrix(dicodon_freq_tables) dicodon_matrix.print() end_time = time() print(f'\nAll done, took {round(end_time - start_time, 2)}s')
def insert_iv(self, data): iv_sh = data['df_iv'] iv_sh_digit = data['df_iv_digit'] iv_info = data['iv_info_dict'] insertData = [] for col in iv_sh.columns: if col != 'YYYYMM' and col != 'DATE' and col != 'DV': for j in range(len(iv_sh[col])): if datetime.datetime.strptime( iv_sh['YYYYMM'][j] + '01', '%Y%m%d' ).date() == self.params['t1']: if iv_info[col]['adf_test']: adf_gb = '1' else: adf_gb = '0' elem = (str(self.params['id_nm']), str(self.params['seq']), str(iv_sh['YYYYMM'][j]), str(self.params['dv']), str(col), float(iv_sh[col][j]), adf_gb, iv_info[col]['dir'], iv_info[col]['nts'], iv_info[col]['thres'], iv_info[col]['a'], iv_info[col]['b'], iv_info[col]['c'], iv_info[col]['d'], int(iv_sh_digit[col][j]) ) else: elem = (str(self.params['id_nm']), str(self.params['seq']), str(iv_sh['YYYYMM'][j]), str(self.params['dv']), str(col), float(iv_sh[col][j]), None, None, None, None, None, None, None, None, int(iv_sh_digit[col][j]) ) insertData.append(elem) conn = self.db.getConn() cur = conn.cursor() io = IO() file = io.print_df( 'data_%s_%s_%s'%( self.params['id_nm'], self.params['seq'], self.params['dv'] ), insertData ) try: warnings.filterwarnings('always', category=MySQLdb.Warning) with warnings.catch_warnings(record=True) as w: cur.execute(self.CONST.QR_DELETE_IND_VAR_INFO, (self.params['id_nm'], self.params['seq'], self.params['dv'])) cur.execute(self.CONST.QR_INSERT_IND_VAR_INFO, file) # todo change to file # if len(w) > 0: self.logger.warning(w[-1].message) conn.commit() io.remove_file(file) except Exception as e: conn.rollback() conn.close() io.remove_file(file) raise Exception(e) conn.close()
def __init__(self, filename: str): self.seq_record = SeqIO.read(filename, config.SEQ_FILE_FORMAT) IO.success('Loaded file ' + filename)