def from_api_to_db(data_list, url): func = lambda datum: dict( nome_proposicao=datum['nomeProposicao'], id_proposicao=datum['idProposicao'], id_proposicao_principal=datum['idProposicaoPrincipal'], nome_proposicao_origem=datum['nomeProposicaoOrigem'], tipo_proposicao=datum['tipoProposicao'], tema=datum['tema'], ementa=datum['Ementa'], explicacao_ementa=datum['ExplicacaoEmenta'], autor=datum['Autor'], ide_cadastro=datum['ideCadastro'], uf_autor=datum['ufAutor'], partido_autor=datum['partidoAutor'], data_apresentacao=to_date(datum['DataApresentacao'], '%d/%m/%Y'), regime_tramitacao=datum['RegimeTramitacao'], tipo_proposicao_sigla=datum['@tipo'].strip(), numero_proposicao=datum['@numero'], ano_proposicao=datum['@ano'], ultimo_despacho_data=to_date(datum['UltimoDespacho']['@Data'], '%d/%m/%Y'), ultimo_despacho=tryit(datum['UltimoDespacho'], key='#text'), apreciacao=datum['Apreciacao'], indexacao=datum['Indexacao'], situacao=datum['Situacao'], link_inteiro_teor=datum['LinkInteiroTeor'], data_captura=datetime.datetime.now(), url_captura=url) return map(func, data_list)
def extract_element_data(self, root): """ Extracts the information of an element existing in the website. """ element_data = {} self.logger.debug('Extracting personal information') element_data['date_of_birth'] = to_date( xpath_value(root, '//h5[@class="perfil-details"]', self.logger)) element_data['abstract'] = xpath_value(root, '//p[@class="perfil-details"]', self.logger) element_data['last_update'] = to_date( xpath_value(root, '//span[@class="actualizado"]', self.logger)) rows = root.xpath('.//*[@id="collapse1"]/div/form/table/tr') for row in rows: label = row.xpath('.//td[@class="w2p_fl"]/label') if not label: continue key = label[0].get('for', None) idx = key.index('_') + 1 key = cc_to_us(key[idx:]) value = xpath_value(row, './/td[@class="w2p_fw"]', self.logger) element_data[key] = value element_data['profile'] = xpath_html(root, '//div[@id="perfil"]', self.logger) return element_data
def generate_data(instance): data_header = [] for folder in instance.folders: folder_item = None folder_date_added = preset.no_date folder_date_modified = preset.no_date folder_guid = preset.empty folder_id = preset.empty folder_last_visited = preset.no_date folder_name = preset.empty folder_sync_transaction_version = preset.empty folder_type = preset.empty folder_url = preset.empty for item in folder: if item == preset.children: folder_item = read_content(folder[item]) elif item == preset.meta_info: for element in folder[item]: if element == preset.last_visited: folder_last_visited = folder[item][element] elif item == preset.date_added: folder_date_added = folder[item] elif item == preset.date_modified: folder_date_modified = folder[item] elif item == preset.guid: folder_guid = folder[item] elif item == preset.item_id: folder_id = folder[item] elif item == preset.item_name: folder_name = folder[item] elif item == preset.sync_transaction_version: folder_sync_transaction_version = folder[item] elif item == preset.item_type: folder_type = folder[item] elif item == preset.url: folder_url = folder[item] else: tools.debug(preset.message["warning"] + str(item)) folder_data = (folder_guid, utils.to_number(folder_id), utils.to_number(folder_sync_transaction_version), folder_type, utils.to_date(folder_date_added), utils.to_date(folder_date_modified), utils.to_date(folder_last_visited), folder_name, folder_url) for item in folder_item: data_header.append(folder_data + item + preset.trail) return data_header
def read_content(folder_items): url_list = [] for folder_item in folder_items: url_date_added = preset.empty url_date_modified = preset.empty url_guid = preset.empty url_item_id = preset.empty url_last_visited = preset.empty url_name = preset.empty url_sync_transaction_version = preset.empty url_item_type = preset.empty url_address = preset.empty url_icon = preset.empty for item in folder_item: if item == preset.children: read_content(folder_item[item]) elif item == preset.meta_info: for element in folder_item[item]: if element == preset.last_visited: url_last_visited = folder_item[item][element] elif item == preset.date_added: url_date_added = folder_item[item] elif item == preset.date_modified: url_date_modified = folder_item[item] elif item == preset.guid: url_guid = folder_item[item] elif item == preset.icon: url_icon = folder_item[item] elif item == preset.item_id: url_item_id = folder_item[item] elif item == preset.item_name: url_name = folder_item[item] elif item == preset.sync_transaction_version: url_sync_transaction_version = folder_item[item] elif item == preset.item_type: url_item_type = folder_item[item] elif item == preset.url: url_address = folder_item[item] else: tools.debug(preset.message["warning"] + str(item)) url_data = (url_guid, utils.to_number(url_item_id), utils.to_number(url_sync_transaction_version), url_item_type, utils.to_date(url_date_added), utils.to_date(url_date_modified), utils.to_date(url_last_visited), url_name, htmlSupport.clean_url(url_address), url_address, url_icon) parsed_url = htmlSupport.parse_url(url_address) url_list.append(url_data + parsed_url) return url_list
def fetch_forums(self): query = "SELECT f.forum_id, f.title, f.tag, f.body, f.created_at, u.user_id, u.full_name, f.forum_image FROM forum as f INNER JOIN user as u ON f.user_id = u.user_id order by f.created_at DESC" try: with connection.cursor() as cursor: cursor.execute(query) rows = cursor.fetchall() if rows is None: return None else: forums = list() for row in rows: forum = Forum() forum.forum_id = row[0] forum.title = row[1] forum.tag = row[2] forum.body = row[3] forum.created_at = row[4] forum.created_date = to_date(row[4]) user = User() user.user_id = row[5] user.full_name = row[6] forum.forum_image = row[7] forum.user = user forums.append(forum) return forums except Exception as e: traceback.print_exc() return None
def opt_remainder(front_months, weekday, symbol, curr, qdate, num): df = front_static(front_months, weekday, symbol, curr, qdate) if df.empty: print( f"{time.strftime('%H:%M:%S')} > Nothing to return from static, is spot data populated?" ) return df #Drop strikes df = pd.DataFrame(filter_static(df, num), columns=["Strike", "Expiry"]).dropna() df["Expiry"] = utils.to_date(df["Expiry"].to_numpy()) df["Type"] = "C" dfx = df.copy() dfx["Type"] = "P" df = pd.concat([df, dfx], axis=0).sort_values(by=["Strike", "Expiry", "Type"]) df = df.set_index(["Expiry", "Type", "Strike"]) del dfx q = f"select distinct Expiry,Type,Strike from dbo.[{symbol}] where Date = convert(datetime,'{qdate}',103)" try: opt = get("Quotes", q).set_index(["Expiry", "Type", "Strike"]) opt["Exists"] = 1. except: return df.reset_index() df = pd.concat([df, opt], axis=1) df = df[df["Exists"].isnull()] df = df.reset_index().drop(columns=["Exists"]) return df
def get(self, date, direction): assert direction==1 or direction==-1 if not self._loaded: raise RuntimeError('Data not loaded yet') if date is None: # requested first availible date date=self._firstDate.strftime('%Y-%m-%d') return date, self.__cache[date] elif date in self.__cache: return date, self.__cache[date] dateObj=to_date(date) if self.dateWasLoaded(dateObj): # requested date in loaded range return self._get_nearest(date, dateObj, direction) elif direction!=self._dateStep: # requested opposite direction so no point to load next data return False, False else: # data for this date not loaded yet while True: data, targets=self.load() if targets is False: # search ended return False, False elif date in self.__cache: return date, self.__cache[date] elif self.dateWasLoaded(dateObj): # requested date in loaded range return self._get_nearest(date, dateObj, direction)
def import_from_dict(self, importDict): for key in importDict: if key == 'p_start_time': self.__dict__[key] = to_date(importDict[key]) elif key == 'p_period': self.__dict__[key] = timedelta(importDict[key]) elif key.startswith('p_'): self.__dict__[key] = importDict[key]
def format_transactions(self, transaction_items): if isinstance(transaction_items, list): [self.format_transactions(t) for t in transaction_items] else: status = to_int(transaction_items.get('Status')) data = { 'braspag_transaction_id': transaction_items.get('BraspagTransactionId'), 'acquirer_transaction_id': transaction_items.get('AcquirerTransactionId'), 'authorization_code': transaction_items.get('AuthorizationCode'), 'amount': to_int(transaction_items.get('Amount')), 'status': status, 'status_message': self.STATUS[status], 'proof_of_sale': transaction_items.get('ProofOfSale'), } if transaction_items.has_key('MaskedCreditCardNumber'): data['masked_credit_card_number'] = transaction_items.get('MaskedCreditCardNumber') if transaction_items.has_key('ReturnCode'): data['return_code'] = transaction_items.get('ReturnCode') if transaction_items.has_key('ReturnMessage'): data['return_message'] = transaction_items.get('ReturnMessage') if transaction_items.has_key('PaymentMethod'): data['payment_method'] = to_int(transaction_items.get('PaymentMethod')) if transaction_items.has_key('CreditCardToken'): data['card_token'] = transaction_items.get('CreditCardToken') if transaction_items.has_key('PaymentMethodName'): data['payment_method_name'] = transaction_items.get('PaymentMethodName') if transaction_items.has_key('TransactionType'): data['transaction_type'] = to_int(transaction_items.get('TransactionType')) if transaction_items.has_key('ReceivedDate'): data['received_date'] = to_date(transaction_items.get('ReceivedDate')) if transaction_items.has_key('CapturedDate'): data['captured_date'] = to_date(transaction_items.get('CapturedDate')) if transaction_items.has_key('OrderId'): data['order_id'] = transaction_items.get('OrderId') self.transactions.append(data)
def load(self): if self._ended: return False, False data, targets, nextDates=self._load() if not targets: self._ended=True return False, False self.__cache.update(data) if self._firstDate is None: self._firstDate=to_date(data[0][0]) self._lastDate=to_date(data[-1][0]) self._loaded=True if not nextDates: self._ended=True else: self._dateStart=nextDates[0] return data, targets
def from_api_to_db_votacao_orientacao(data_list, url, data_proposicao, data_votacao, id_proposicao, numero_captura): func = lambda datum: dict( id_proposicao=id_proposicao, tipo_proposicao_sigla=data_proposicao['Sigla'], numero_proposicao=data_proposicao['Numero'], ano_proposicao=data_proposicao['Ano'], resumo_votacao=data_votacao['@Resumo'], data_votacao=to_date(data_votacao['@Data'], '%d/%m/%Y'), hora_votacao=to_date(data_votacao['@Hora'], '%H:%M'), objeto_votacao=data_votacao['@ObjVotacao'], cod_sessao=data_votacao['@codSessao'], sigla_partido=datum['@Sigla'], orientacao_partido=datum['@orientacao'].strip(), data_captura=datetime.datetime.now(), url_captura=url, numero_captura=numero_captura) return map(func, data_list)
def from_api_to_db(data_list, url, numero_captura): func = lambda datum: dict(id_proposicao=datum['codProposicao'], nome_proposicao=datum['nomeProposicao'], data_votacao=to_date(datum['dataVotacao'], '%d/%m/%Y'), data_captura=datetime.datetime.now(), url_captura=url, numero_captura=numero_captura) return map(func, data_list)
def dateWasLoaded(self, date): if not self._loaded: raise RuntimeError('Data not loaded yet') if date is None: return True date=to_date(date) if self._dateStep>0: if date>=self._firstDate and date<=self._lastDate: return True else: if date<=self._firstDate and date>=self._lastDate: return True return False
def from_api_to_db_votacao_deputado(data_list, url, data_proposicao, data_votacao, id_proposicao): func = lambda datum: dict( id_proposicao=id_proposicao, tipo_proposicao_sigla=data_proposicao['Sigla'], numero_proposicao=data_proposicao['Numero'], ano_proposicao=data_proposicao['Ano'], resumo_votacao=data_votacao['@Resumo'], data_votacao=to_date(data_votacao['@Data'], '%d/%m/%Y'), hora_votacao=to_date(data_votacao['@Hora'], '%H:%M'), objeto_votacao=data_votacao['@ObjVotacao'], cod_sessao=data_votacao['@codSessao'], nome=datum['@Nome'], ide_cadastro=datum['@ideCadastro'], sigla_partido=datum['@Partido'], uf=datum['@UF'], voto=datum['@Voto'], data_captura=datetime.datetime.now(), url_captura=url) return map(func, data_list)
def _convert_commit(self, c): return { 'sha' : c.hex, 'subject' : utils.truncate(utils.commit_subject(c.message), 60), 'message' : c.message, 'commit_time' : utils.prettydate(utils.to_date(c.commit_time)), 'author' : { 'name' : c.author.name, 'email' : c.author.email }, 'committer' : { 'name' : c.committer.name, 'email' : c.committer.email } }
def api_activity(reponame): hit = '{0}-activity'.format(reponame) entries = cache.get(hit) if entries is None: repo = Repository(reponame) commits = [x for x in repo.log(repo.head.target.hex)] data = [utils.to_date(c.commit_time).strftime("%Y/%m/%d") for c in commits] data_sorted = sorted(Counter(data).items()) entries = [{'date': k, 'commits': v} for k, v in data_sorted] cache.set(hit, entries, app.config['CACHE_TIME']); return jsonify(activity=entries);
def consumingProjectParse(data): retseq = [] statdict = data['p_statdict'] restOfGoal = data['p_target_amount'] - data['p_accumt_amount'] averageOfRec = data['p_accumt_amount'] / data['p_recs_amount'] pastDates = (date.today() - to_date(data['p_start_time'])).days remainDates = data['p_period'] - pastDates # figure out the basic infomation of project. templist = [ PROJECT_TARGET.format(data['p_target_amount']), PROJECT_START_DATE.format(data['p_start_time']), REST_OF_GOAL.format(restOfGoal), AVERAGE_OF_REC.format(averageOfRec), PAST_DATES.format(pastDates), REMAIN_DATES.format(remainDates) ] retseq.extend(templist) # figure out the milestone of project target. _milestone_check( data['p_accumt_amount'], data['p_target_amount'], statdict ) # judge situations of project to process if data['p_accomplish_date']: retseq.append( HAS_ACCOMPLISH.format(data['p_accomplish_date']) ) elif statdict['hasThreeQuarters']: retseq.append( HAS_THREEQUARTERS.format(statdict['DateOfThreeQuarters']) ) elif statdict['hasHalf']: retseq.append( HAS_HALF.format(statdict['DateOfHalf']) ) elif statdict['hasQuarter']: retseq.append( HAS_QUARTER.format(statdict['DateOfQuarter']) ) # if project recursion is enable. if data['p_recursion']: retseq.append( IS_RECURSION.format(data['p_period']) ) return retseq
def get_reviews_df(filepath): with open(filepath) as f: content = json.load(f) data_corpus = [] for item in content: data = [] data.append((item['time'])) data.append(item['content'].replace('\r', ' ')) data_corpus.append(data) df = pd.DataFrame(data_corpus, columns=["time", "content"]) df.drop_duplicates(['time'], inplace=True) df['date'] = df['time'].map(lambda x: to_date(x)) df.sort_values(by="date", inplace=True) df.drop(['time'], axis=1, inplace=True) return df
def from_api_to_db(data_list, url, id_proposicao): func = lambda datum: dict( id_proposicao=id_proposicao, data_hora=to_date(datum['dataHora'], '%Y-%m-%dT%H:%M'), sequencia=datum['sequencia'], sigla_orgao=datum['siglaOrgao'], uri_orgao=datum['uriOrgao'], regime=datum['regime'], descricao_tramitacao=datum['descricaoTramitacao'], id_tipo_tramitacao=datum['idTipoTramitacao'], descricao_situacao=datum['descricaoSitucao'], id_situacao=datum['idSituacao'], despacho=datum['despacho'], url=datum['url'], data_captura=datetime.datetime.now(), url_captura=url) return map(func, data_list)
def _get_nearest_date(self, direction, date): dateStep = direction * self._loader.direction if direction else self._loader.direction if date is not None and direction: date = (to_date(date) + timedelta(days=dateStep)).strftime('%Y-%m-%d') if date in self._data: dialog = 0 if dateStep < 0 else len(self._data[date]) - 1 msg = None if direction > 0 or not self.isOpened( date, dialog) else self._data[date][dialog].messageCount - 1 return date, dialog, msg date, data = self._loader.get(date, dateStep) if data is False: return None, 0, None elif date not in self._data: print(f'DIALOG_CACHE_UPDATE {date}') self._data[date] = tuple( WidgetPlaceholderEx(DialogHeader(None, o)) for o in data) dialog = 0 if dateStep < 0 else len(self._data[date]) - 1 msg = None if direction > 0 or not self.isOpened( date, dialog) else self._data[date][dialog].messageCount - 1 return date, dialog, msg
def get_reviews_partitions(filepath, w2v_model, thres_count=150, thres_simi=0.2, merge_num=2, topk=5): ''' 将歌曲评论按照语义划分。 params: filepath: 评论文件路径 w2v_model: 词向量模型 thres_count: 合成一段语料的最少评论数 thres_simi: 合成一段语料的最小相似度 merge_num: 合并次数 topk: 合成时考察的feature_words数目 return: d_reviews_partitions[dict]: d[(start_date,end_date)] = feature_words ''' stops_sup = open("../resources/rubbish_words.txt").read().splitlines() with open(filepath) as f: df = pd.read_json(f) track_id = filepath[:-5] df["date"] = df["time"].map(lambda x: to_date(x)) # 将时间戳转为日期 df.sort_values(by="date", inplace=True) p_reviews = list(dict( df.groupby("date")["content"].sum()).items()) # 合并同一日期下的评论 reviews_count = list( df.groupby("date")["content"].count().values) # 统计各日期下的评论数 dates = list(df.groupby("date")["content"].count().index) # 得到有效日期 tmp_count, flag = 0, 0 tmp_start_flag, tmp_end_flag = 0, 0 # 将评论数少的日期合并,至少需要thres_count为一段 reviews_partitions = [] while flag < len(p_reviews) - 1: tmp_count += reviews_count[flag] if tmp_count < thres_count and reviews_count[flag + 1] < thres_count: flag += 2 else: if tmp_count < thres_count and reviews_count[flag + 1] >= thres_count: flag += 2 else: flag += 1 tmp_end_flag = flag - 1 reviews_partitions.append((tmp_start_flag, tmp_end_flag)) tmp_start_flag = flag tmp_count = 0 def merge_simis(reviews_partitions, max_text_length=5000): ''' 基于feature_words,考察评论段之间的相似性,得到simi_scores,并合并相似评论段 param: reviews_partitions[list]: [(start_date_flag, end_date_flag),...] param: thres_simi: 用于合并评论的阈值 return: new_reviews_partitions ''' if len(reviews_partitions) == 1: return reviews_partitions d_reviews_partitions = {} for i in range(len(reviews_partitions)): text = "" start, end = reviews_partitions[i] for j in range(start, end + 1): text += p_reviews[j][1] feature_words = tags_extractor(text[:max_text_length], topk=topk, stops_sup=stops_sup) d_reviews_partitions[reviews_partitions[i]] = feature_words items = list(d_reviews_partitions.items()) simi_scores = [0] for i in range(1, len(d_reviews_partitions)): simi_scores.append( words_simi_score(items[i][1], items[i - 1][1], w2v_model)) # 如果simi_score<thres_simi(和上一段内容不相似),则另起一段,否则合并为一段 new_reviews_partitions = [] i, j = 0, 1 while j < len(items): while j < len(items) and simi_scores[j] >= thres_simi: j += 1 new_reviews_partitions.append((items[i][0][0], items[j - 1][0][1])) i = j j += 1 return new_reviews_partitions for i in range(merge_num): reviews_partitions = merge_simis(reviews_partitions) d_reviews_partitions = { } # 将(start_date, end_date)作为key,feature_words作为value for i in range(len(reviews_partitions)): start, end = reviews_partitions[i] text = "" i = 0 while p_reviews[i][0] < dates[start]: i += 1 while p_reviews[i][0] <= dates[end]: text += p_reviews[i][1] i += 1 d_reviews_partitions[(dates[start], dates[end])] = tags_extractor( text, topk=topk, stops_sup=stops_sup) # for k in d_reviews_partitions: # print(k, d_reviews_partitions[k]) return d_reviews_partitions
def format_transactions(self, transaction_items): if isinstance(transaction_items, list): [self.format_transactions(t) for t in transaction_items] else: status = to_int(transaction_items.get('Status')) data = { 'braspag_transaction_id': transaction_items.get('BraspagTransactionId'), 'acquirer_transaction_id': transaction_items.get('AcquirerTransactionId'), 'authorization_code': transaction_items.get('AuthorizationCode'), 'amount': to_int(transaction_items.get('Amount')), 'status': status, 'status_message': self.STATUS[status], 'proof_of_sale': transaction_items.get('ProofOfSale'), } if transaction_items.has_key('MaskedCreditCardNumber'): data['masked_credit_card_number'] = transaction_items.get( 'MaskedCreditCardNumber') if transaction_items.has_key('ReturnCode'): data['return_code'] = transaction_items.get('ReturnCode') if transaction_items.has_key('ReturnMessage'): data['return_message'] = transaction_items.get('ReturnMessage') if transaction_items.has_key('ErrorCode'): data['error_code'] = transaction_items.get('ErrorCode') if transaction_items.has_key('ErrorMessage'): data['error_message'] = transaction_items.get('ErrorMessage') if transaction_items.has_key('PaymentMethod'): data['payment_method'] = to_int( transaction_items.get('PaymentMethod')) if transaction_items.has_key('CreditCardToken'): data['card_token'] = transaction_items.get('CreditCardToken') if transaction_items.has_key('PaymentMethodName'): data['payment_method_name'] = transaction_items.get( 'PaymentMethodName') if transaction_items.has_key('TransactionType'): data['transaction_type'] = to_int( transaction_items.get('TransactionType')) if transaction_items.has_key('ReceivedDate'): data['received_date'] = to_date( transaction_items.get('ReceivedDate')) if transaction_items.has_key('CapturedDate'): data['captured_date'] = to_date( transaction_items.get('CapturedDate')) if transaction_items.has_key('VoidedDate'): data['voided_date'] = to_date( transaction_items.get('VoidedDate')) if transaction_items.has_key('OrderId'): data['order_id'] = transaction_items.get('OrderId') if transaction_items.has_key('Currency'): data['currency'] = transaction_items.get('Currency') if transaction_items.has_key('Country'): data['country'] = transaction_items.get('Country') if transaction_items.has_key('NumberOfPayments'): data['number_of_payments'] = to_int( transaction_items.get('NumberOfPayments')) self.transactions.append(data)
def start_scheduler(cfg, seconds): # lastDownTime: `None` means nothing happened last check lastDownTime = {} pushover = {} while True: # Stage 1: create new user-defined chains create_user_chains(cfg) # Stage 2: start ip check task ipStatus = fetch_ip_status(cfg) # Stage 3: check iptables rule for tag in cfg: preUserChainName = cfg[tag]['prerouting']['chain'] postUserChainName = cfg[tag]['postrouting']['chain'] preUserChain = NATChain(preUserChainName) postUserChain = NATChain(postUserChainName) primaryIp = cfg[tag]['dstip']['primary'] secondaryIp = cfg[tag]['dstip']['secondary'] dstip = preUserChain.fetch_dst_ip() timeNow = int(time.time()) # 0: ok; 1: secondary fail; 2: primary fail; 3: both fail lastDownTime.setdefault(tag, {'time': None, 'status': 0}) pushover.setdefault(tag, {'message': None, 'priority': 0}) if dstip is None: # load cfg at the first time if ipStatus[tag] == 0: dstip = primaryIp pushover[tag][ 'message'] = u'可用主线IP:{ip1}\n可用备线IP:{ip2}\n业务受影响评估:无\n'.format( ip1=primaryIp, ip2=secondaryIp) elif ipStatus[tag] == 1: dstip = primaryIp lastDownTime[tag] = {'time': timeNow, 'status': 1} pushover[tag][ 'message'] = u'可用主线IP:{ip1}\n故障备线IP:{ip2}\n业务受影响评估:中\n'.format( ip1=primaryIp, ip2=secondaryIp) elif ipStatus[tag] == 2: dstip = secondaryIp lastDownTime[tag] = {'time': timeNow, 'status': 2} pushover[tag][ 'message'] = u'故障主线IP:{ip1}\n可用备线IP:{ip2}\n业务受影响评估:中\n'.format( ip1=primaryIp, ip2=secondaryIp) elif ipStatus[tag] == 3: lastDownTime[tag] = {'time': timeNow, 'status': 3} pushover[tag][ 'message'] = u'故障主线IP:{ip1}\n故障备线IP:{ip2}\n业务受影响评估:高\n'.format( ip1=primaryIp, ip2=secondaryIp) pushover[tag]['priority'] = 1 # apply iptables rules in preUserChain and postUserChain if not both failed if dstip is not None: for rule in cfg[tag]['prerouting']['rules']: to_destination = ':'.join((dstip, rule['to_ports'])) preUserChain.append_pre_rule(rule['proto'], rule['dport'], rule['target'], to_destination) for rule in cfg[tag]['postrouting']['rules']: postUserChain.append_post_rule(rule['proto'], dstip, rule['target']) if ipStatus[tag] < 3: _dstip = preUserChain.fetch_dst_ip() pushover[tag]['message'] += u'已生效IP:{ip}\n'.format( ip=_dstip) else: if ipStatus[tag] == 0: if lastDownTime[tag]['status'] != 0: # send pushover at once if both failed and recover if lastDownTime[tag]['status'] == 3: pushover[tag][ 'message'] = u'事件:主/备线路IP已恢复可用\n业务受影响评估:无\n' lastDownTime[tag] = {'time': None, 'status': 0} else: # send pushover when no downtime in the recent 10 minutes if not both failed and recover if timeNow - lastDownTime[tag]['time'] >= 600: if lastDownTime[tag]['status'] == 1: pushover[tag][ 'message'] = u'事件:备线路IP已恢复可用\n业务受影响评估:无\n' elif lastDownTime['tag']['status'] == 2: pushover[tag][ 'message'] = u'事件:主线路IP已恢复可用\n业务受影响评估:无\n' lastDownTime[tag] = {'time': None, 'status': 0} elif ipStatus[tag] == 1: if dstip == primaryIp: # dstip equals to primaryIp # nothing to do but send pushover message = u'可用主线IP:{ip1}\n故障备线IP:{ip2}\n业务受影响评估:中\n已生效IP:{ip3}\n'.format( ip1=primaryIp, ip2=secondaryIp, ip3=dstip) if lastDownTime[tag]['status'] != 1: lastDownTime[tag] = {'time': timeNow, 'status': 1} pushover[tag]['message'] = message else: # send pushover every 5 minutes if timeNow - lastDownTime[tag]['time'] >= 300: lastDownTime[tag]['time'] = timeNow pushover[tag]['message'] = message else: lastDownTime[tag] = {'time': timeNow, 'status': 1} message = u'可用主线IP:{ip1}\n故障备线IP:{ip2}\n业务受影响评估:中\n'.format( ip1=primaryIp, ip2=secondaryIp) dstip = primaryIp # apply iptables rules in preUserChain and postUserChain preUserChain.flush() for rule in cfg[tag]['prerouting']['rules']: to_destination = ':'.join( (dstip, rule['to_ports'])) preUserChain.append_pre_rule( rule['proto'], rule['dport'], rule['target'], to_destination) postUserChain.flush() for rule in cfg[tag]['postrouting']['rules']: postUserChain.append_post_rule( rule['proto'], dstip, rule['target']) # fetch new dstip after apply rules _dstip = preUserChain.fetch_dst_ip() pushover[tag][ 'message'] = message + u'已生效IP:{ip}\n'.format( ip=_dstip) elif ipStatus[tag] == 2: if dstip == secondaryIp: # dstip equals to secondaryIp # nothing to do but send pushover message = u'故障主线IP:{ip1}\n可用备线IP:{ip2}\n业务受影响程度:中\n已生效IP: {ip3}\n'.format( ip1=primaryIp, ip2=secondaryIp, ip3=dstip) if lastDownTime[tag]['status'] != 2: lastDownTime[tag] = {'time': timeNow, 'status': 2} pushover[tag]['message'] = message else: # send pushover every 5 minutes if timeNow - lastDownTime[tag]['time'] >= 300: lastDownTime[tag]['time'] = timeNow pushover[tag]['message'] = message else: lastDownTime[tag] = {'time': timeNow, 'status': 2} message = u'故障主线IP:{ip1}\n可用备线IP:{ip2}\n业务受影响评估:中\n'.format( ip1=primaryIp, ip2=secondaryIp) dstip = secondaryIp # apply iptables rules in preUserChain and postUserChain preUserChain.flush() for rule in cfg[tag]['prerouting']['rules']: to_destination = ':'.join( (dstip, rule['to_ports'])) preUserChain.append_pre_rule( rule['proto'], rule['dport'], rule['target'], to_destination) postUserChain.flush() for rule in cfg[tag]['postrouting']['rules']: postUserChain.append_post_rule( rule['proto'], dstip, rule['target']) # fetch new dstip after apply rules _dstip = preUserChain.fetch_dst_ip() pushover[tag][ 'message'] = message + u'已生效IP:{ip}\n'.format( ip=_dstip) elif ipStatus[tag] == 3: # send pushover every minute when both failed message = u'故障主线IP:{ip1}\n故障备线IP:{ip2}\n业务受影响评估:高\n'.format( ip1=primaryIp, ip2=secondaryIp) if lastDownTime[tag]['status'] != 3: lastDownTime[tag] = {'time': timeNow, 'status': 3} pushover[tag]['message'] = message pushover[tag]['priority'] = 1 else: # send pushover every minute if timeNow - lastDownTime[tag]['time'] >= 60: lastDownTime[tag]['time'] = timeNow pushover[tag]['message'] = message pushover[tag]['priority'] = 1 # Stage 4: Send pushover for tag in pushover: if pushover[tag]['message'] is not None: timestamp = lastDownTime[tag]['time'] \ if lastDownTime[tag]['time'] is not None \ else timeNow message = u'项目:{tag}\n时间:{timestamp}\nECS实例:{hostname}\n'.format( tag=tag, timestamp=to_date(timestamp), hostname=socket.gethostname()) message += pushover[tag]['message'] ret = send_pushover(to_str(message), pushover[tag]['priority']) if ret == 200: pushover[tag] = {'message': None, 'priority': 0} time.sleep(seconds)
def refresh(self): me = frozenset([ '*****@*****.**', '*****@*****.**', '*****@*****.**', '*****@*****.**' ]) # self.data = self._threads2flat(self.data) # unread unread = any(True for o in self.data if '#unread' in o['labels']) if unread: self._w_subject._original_map[0] = 'style3bold' self._w_subject._original_map[1] = 'style3bold-focus' self._w_members._original_map[0] = 'style3bold' self._w_members._original_map[1] = 'style3bold-focus' self._w_timestamp._original_map[0] = 'style4bold' self._w_timestamp._original_map[1] = 'style4bold-focus' self._w_lastmsg._original_map[0] = 'style4bold' self._w_lastmsg._original_map[1] = 'style4bold-focus' else: self._w_subject._original_map[0] = 'style3' self._w_subject._original_map[1] = 'style3-focus' self._w_members._original_map[0] = 'style3' self._w_members._original_map[1] = 'style3-focus' self._w_timestamp._original_map[0] = 'style4' self._w_timestamp._original_map[1] = 'style4-focus' self._w_lastmsg._original_map[0] = 'style4' self._w_lastmsg._original_map[1] = 'style4-focus' # indicator val = [('unread', ' ')] if unread else ' ' self._w_indicator.set_text(val) # timestamp val = to_date(self.data[-1]['timestamp']) if val == datetime_today().date(): val = val.strftime('%H:%M:%S') else: val = val.strftime('%d %b, %a') self._w_timestamp.set_text(val) # statusbar l = len(self.data) val = ('★ ' if any(True for o in self.data if '#favorite' in o['labels']) else '☆ ', '»' if any(True for o in self.data if me.intersection(o['to'])) else ' ', ' ', ' ', '99' if l >= 99 else (f'{l} ' if l < 10 else f'{l}')) self._w_statusbar.set_text(f'[{"".join(val)}]') # members val = set() for o in self.data: all = set([o['from']] + o['to']) if o['cc']: all.update(o['cc']) if o['bcc']: all.update(o['bcc']) if all.intersection(me): val.add('Me') all -= me for s in all: val.add(s.split('@', 2)[0] or s) val = sorted(val) self._w_members.set_text(f'{", ".join(val)}') # subject re_clearReply = re.compile(r'^((?:(?:re)|(?:Re)|(?:RE)):\s*)+') for o in self.data: val = re_clearReply.sub('', o['subject']) if val: break self._w_subject.set_text(val) # last message val = self.data[-1]['bodyPlain'] or self.data[-1]['bodyHtml'] val = val.replace('\r', '').replace('\n', ' ')[:80] self._w_lastmsg.set_text(val)
def component(): status_code = 400 response = None try: # extract request parameters if request.content_type == 'application/json': bug_data = request.get_json(silent=True) else: bug_data = request.form.to_dict() if not bug_data: bug_data = request.args.to_dict() # bug info is empty if (not bug_data) or (bug_data is None): response = json_error_msg( 'Invalid request, specify bug_id or start/end dates') # bug_id elif config.API_BUG_ID in bug_data: # check, one bug_id or list of bug_id bug_id = bug_data[config.API_BUG_ID].split(',') bug_id = utils.to_int_list(bug_id) # invalid bug ID(s) if bug_id is None: response = json_error_msg( 'Invalid request, bug_id must be integer value') # valid bug ID(s) else: bug_id = set(bug_id) bugs_id_count = len(bug_id) # one bug_id if bugs_id_count == 1: bug_id = list(bug_id)[0] bugs_data = storage.loadPredictionsForListFromTable( [bug_id]) if bugs_data is None or len(bugs_data) == 0: status_code = 404 response = json_error_msg( 'There is no bug with ID: {}'.format(bug_id), bug_id) else: status_code = 200 response = bugs_data_as_json(bugs_data) # bug_id list else: bugs_data = storage.loadPredictionsForListFromTable( list(bug_id)) if bugs_data is None or len(bugs_data) == 0: status_code = 404 response = json_error_msg( 'There are no bugs with requested IDs', list(bug_id)) else: status_code = 200 bugs_count = len(bugs_data) not_found_bug_id = None if bugs_count < bugs_id_count: not_found_bug_id = list(bug_id - set([ item[config.STORAGE_COLUMN_ID] for item in bugs_data ])) response = bugs_data_as_json(bugs_data, not_found_bug_id) # bug start/end dates elif (config.API_START_DATE in bug_data) and (config.API_END_DATE in bug_data): start_date = utils.to_date(bug_data[config.API_START_DATE], config.DATE_FORMAT) end_date = utils.to_date(bug_data[config.API_END_DATE], config.DATE_FORMAT) # start_date or(and) end_date are invalid if (start_date is None) or (end_date is None): response = json_error_msg( 'Invalid request, start/end dates should be {}'.format( config.API_DATE_FORMAT_MESSAGE)) #start_date > end_date elif start_date > end_date: response = json_error_msg( 'Invalid request, start date should be less or equal end date' ) # start_date and end_date are valid else: bugs_data = storage.loadPredictionsForDateFromTable( start_date, end_date) if bugs_data is None or len(bugs_data) == 0: status_code = 404 response = json_error_msg( 'There are no bugs between dates [{0}; {1}]'.format( start_date, end_date)) else: status_code = 200 response = bugs_data_as_json(bugs_data) # no bug_id, no start/end dates else: response = json_error_msg( 'Invalid request, specify bug_id or start/end dates') # server error except Exception as ex: status_code = 500 response = json_error_msg('Internal server error: {}'.format(ex)) logger.error(str(ex)) return log_response(json.dumps(response), status=status_code)