class Jimmybot(commands.Bot): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.db = Database() async def close(self): self.db.close() logger.info('closing bot') await super().close() async def on_message(self, message): # If no prefix is found discard the message if not message.content.startswith(settings.PREFIX): return # If the message has a prefix but no known command we # convert the message to an !i command before passing it view = StringView(message.content) view.skip_string(settings.PREFIX) invoker = view.get_word() for command in self.commands: if invoker == command.name: break else: message.content = message.content[:1] + "i " + message.content[1:] # if invoker not in self.commands.name: # message.content = message.content[:1] + "i " + message.content[1:] # Allow for the command to be processed by discord.py await self.process_commands(message) # Put the message in the database queue to be stored self.db.add_message_to_queue(message)
def __init__(self): """Lay thong tin config.ini""" self.configs = self.__getconfigs(config_ast.ConfigAST('config.ini')) self.SQL = Database(self.configs['DATABASE']['server'], self.configs['DATABASE']['database'], self.configs['DATABASE']['uid'], self.configs['DATABASE']['pwd']) self.image = None self.data = None self.name = None self.output = None self.information = None self.model = CompletedModel.get_instance() host = self.configs['SOCKET']['host'] port = int(self.configs['SOCKET']['port']) print(host, port) connect = socket.socket(socket.AF_INET, socket.SOCK_STREAM) connect.bind((host, port)) connect.listen(12) print("Waiting request...") while True: socket_obj, addr = connect.accept() print('Connected : {}'.format(addr)) Thread(target=self.receiver_msg, args=( socket_obj, addr, )).start()
def gen_sites(): db = Database(util.get_db_root(), util.get_part()) grid = db.grid() for tile_name in sorted(grid.tiles()): loc = grid.loc_of_tilename(tile_name) gridinfo = grid.gridinfo_at_loc(loc) tile_type = gridinfo.tile_type if tile_type in ['CLEL_L', 'CLEL_R']: # Don't fuzz the top and bottom of the grid, the interconnect # there behaves differently. _, _, y_min, y_max = grid.dims() if loc.grid_y <= y_min + 1 or loc.grid_y >= y_max - 1: continue site_name = sorted(gridinfo.sites.keys())[0] if gridinfo.tile_type[-1] == 'L': int_tile_loc = (loc.grid_x + 1, loc.grid_y) else: int_tile_loc = (loc.grid_x - 1, loc.grid_y) int_tile_name = grid.tilename_at_loc(int_tile_loc) if not int_tile_name.startswith('INT_'): continue yield int_tile_name, site_name
def fasm_to_frames( db_root, part, filename_in, sparse, bits_file, frames_file, ): db = Database(db_root, part) assembler = fasm_assembler.FasmAssembler(db) set_features = set() def feature_callback(feature): set_features.add(feature) assembler.set_feature_callback(feature_callback) extra_features = [] # Get required extra features for the part required_features = db.get_required_fasm_features(part) extra_features += list( fasm.parse_fasm_string('\n'.join(required_features))) assembler.parse_fasm_filename(filename_in, extra_features=extra_features) frames = assembler.get_frames(sparse=sparse) print('Have {} frames'.format(len(frames))) if bits_file: output_bits(bits_file, frames) dump_frm(frames_file, frames)
def load(self, config): self.db = Database("AdminPlug") self.code_db = CodeDatabase() self.logger = getLogger("AdminPlug") self.shibes = {} super().load(config) self.logger.info("Finished loading AdminPlug")
def __init__(self): # 读取邮箱信息 self.db = Database( 'mysql+mysqldb://spider_admin:' '[email protected]:' '3306/spider?charset=utf8', encoding='utf-8') # 初始化数据库 self.db1 = Database( 'mysql+mysqldb://%s:%s@%s:%s/%s?charset=utf8' % (MYSQL_USER, MYSQL_PASSWD, MYSQL_HOST, MYSQL_PORT, MYSQL_DB), encoding='utf-8') # 初始化MNS self.mns_account = Account(MNS_ENDPOINT, MNS_ACCID, MNS_ACCKEY, MNS_TOKEN) self.mns_client = self.mns_account.get_queue(MNS_QUEUE) self.logger = Logger.timed_rt_logger() self.queue = RedisQueue('t_mail_by_date', host=REDIS_HOST, port=REDIS_PORT) self.robot = DingDingRobot( access_token="eb749abfe9080a69da6524b77f589b8f6ddbc" "c182c7a41bf095b095336edb0a1")
def gen_sites(tile_type): o = {} db = Database(util.get_db_root(), util.get_part()) grid = db.grid() allowed_sites = read_allowed_sites() for tile_name in sorted(grid.tiles()): loc = grid.loc_of_tilename(tile_name) gridinfo = grid.gridinfo_at_loc(loc) if gridinfo.tile_type != tile_type: continue for site_name, site_type in gridinfo.sites.items(): if site_type in ['HPIOB_S', 'HPIOB_M', 'HDIOB_S', 'HDIOB_M' ] and site_name in allowed_sites: if tile_name not in o: o[tile_name] = [] o[tile_name].append(site_name) for tile_name in o: o[tile_name].sort(key=util.create_xy_fun('IOB_')) return o
def queryDB(self, query_stat, mode): db = Database() result = list(db.query(query_stat)) if len(result) == 0: count = None else: count = result[0][0][mode] return count
def load(self, config): self.db = Database("ShopPlug") self.lottery = LotteryDatabase() self.logger = getLogger("ShopPlug") self.shibes = {} super().load(config) self.logger.info("Finished loading ShopPlug")
def gen_lut_sites(): db = Database(util.get_db_root(), util.get_part()) grid = db.grid() for tile_name in sorted(grid.tiles()): loc = grid.loc_of_tilename(tile_name) gridinfo = grid.gridinfo_at_loc(loc) if gridinfo.tile_type in ['CLEL_L', 'CLEL_R', 'CLEM']: site_name = sorted(gridinfo.sites.keys())[0] yield site_name
def gen_bufgs(): db = Database(util.get_db_root(), util.get_part()) grid = db.grid() for tile_name in sorted(grid.tiles()): loc = grid.loc_of_tilename(tile_name) gridinfo = grid.gridinfo_at_loc(loc) for site, site_type in gridinfo.sites.items(): if "BUFGCE" in site and "HDIO" not in site and "DIV" not in site: yield site
def gen_sites(): db = Database(util.get_db_root(), util.get_part()) grid = db.grid() for tile_name in sorted(grid.tiles()): loc = grid.loc_of_tilename(tile_name) gridinfo = grid.gridinfo_at_loc(loc) for site, site_type in gridinfo.sites.items(): if site_type in ['SLICEM', 'SLICEL']: yield site
def gen_sites(): db = Database(util.get_db_root(), util.get_part()) grid = db.grid() for tile_name in sorted(grid.tiles()): loc = grid.loc_of_tilename(tile_name) gridinfo = grid.gridinfo_at_loc(loc) if gridinfo.tile_type in ['PSS_ALTO']: for site_name, site_type in gridinfo.sites.items(): if site_type == 'PS8': yield tile_name, site_name
async def check_ci_status(event, gh, repo, *args, **kwargs): """check_ci_status""" if repo in ['PaddlePaddle/Paddle']: status_dict = {} state = event.data['state'] commitId = event.data['sha'] context = event.data['context'] branch = event.data['repository']['default_branch'] status_dict['branch'] = branch status_dict['commitId'] = commitId status_dict['ciName'] = context if state in ['success', 'failure']: commit_message = event.data['commit']['commit']['message'] target_url = event.data['target_url'] if target_url.startswith('https://xly.bce.baidu.com'): ifCancel = ifCancelXly(target_url) if ifCancel == True: logger.info("cancel xly: %s" % target_url) else: document_fix = ifDocumentFix(commit_message) if document_fix == True and context != "PR-CI-CPU-Py2": EXCODE = 0 else: index_dict = generateCiIndex(repo, commitId, target_url) logger.info("target_url: %s" % target_url) logger.info("index_dict: %s" % index_dict) EXCODE = index_dict['EXCODE'] ifInsert = True status_dict['status'] = state status_dict['documentfix'] = '%s' % document_fix status_dict['EXCODE'] = EXCODE insertTime = int(time.time()) query_stat = "SELECT * FROM paddle_ci_status WHERE ciName='%s' and commitId='%s' and status='%s' order by time desc" % ( status_dict['ciName'], status_dict['commitId'], status_dict['status']) queryTime = ifAlreadyExist(query_stat) if queryTime != '': ifInsert = False if insertTime - queryTime < 30 else True logger.error("%s already insert!" % status_dict) if ifInsert == True: time_dict = generateCiTime(target_url) for key in time_dict: status_dict[key] = time_dict[key] logger.info("status_dict: %s" % status_dict) db = Database() result = db.insert('paddle_ci_status', status_dict) if result == True: logger.info( '%s %s insert paddle_ci_status success!' % (context, commitId)) else: logger.error( '%s %s insert paddle_ci_status failed!' % (context, commitId))
def ifAlreadyExist(query_stat): db = Database() result = list(db.query(query_stat)) queryTime = '' if len(result) != 0: queryTime = result[0][0]['time'].split('.')[0].replace('T', ' ') queryTime = time.strptime(queryTime, '%Y-%m-%d %H:%M:%S') dt = datetime.datetime.fromtimestamp(time.mktime(queryTime)) actualQueryTime = ( dt + datetime.timedelta(hours=8)).strftime("%Y-%m-%d %H:%M:%S") timeArray = time.strptime(actualQueryTime, "%Y-%m-%d %H:%M:%S") queryTime = int(time.mktime(timeArray)) return queryTime
def get_commit_createTime(PR, sha): """get commit createtime""" query_stat = "SELECT createTime FROM commit_create_time WHERE PR=%s and commitId='%s'" % ( PR, sha) db = Database() result = list(db.query(query_stat)) if len(result) != 0: createTime = result[0][0]['createTime'] else: logger.error("The commit created before 2020-07-03 17:10: %s, %s" % (PR, sha)) createTime = 0 return createTime
def gen_sites(): o = [] db = Database(util.get_db_root(), util.get_part()) grid = db.grid() for tile_name in sorted(grid.tiles()): loc = grid.loc_of_tilename(tile_name) gridinfo = grid.gridinfo_at_loc(loc) for site_name, site_type in gridinfo.sites.items(): if site_type in ['RAMBFIFO18']: o.append((tile_name, site_name)) return o
def main(): parser = argparse.ArgumentParser() util.db_root_arg(parser) util.part_arg(parser) parser.add_argument('input') args = parser.parse_args() db = Database(args.db_root, args.part) grid = db.grid() base_address_to_tiles = {} for tile in grid.tiles(): gridinfo = grid.gridinfo_at_tilename(tile) if BlockType.CLB_IO_CLK in gridinfo.bits: base_address = gridinfo.bits[BlockType.CLB_IO_CLK].base_address if base_address not in base_address_to_tiles: base_address_to_tiles[base_address] = [] base_address_to_tiles[base_address].append( (tile, gridinfo.bits[BlockType.CLB_IO_CLK])) for line in fasm.parse_fasm_filename(args.input): is_unknown = False annotation_data = {} for annotation in line.annotations: annotation_data[annotation.name] = annotation.value if 'unknown_bit' not in annotation_data: continue base_address = int(annotation_data['unknown_segment'], 0) frame_offset = int(annotation_data['unknown_segbit'].split('_')[0]) bit = int(annotation_data['unknown_segbit'].split('_')[1]) offset = bit // 16 if base_address not in base_address_to_tiles: print('# No tile for base address') else: for tile, bits in base_address_to_tiles[base_address]: if offset >= bits.offset and offset - bits.offset < bits.words: print('# {} : {:02d}_{:02d}'.format( tile, frame_offset, bit - bitstream.WORD_SIZE_BITS * bits.offset)) for l in fasm.fasm_line_to_string(line): print(l)
def run(db_root, part, filename_in, f_out, sparse=False, roi=None, debug=False, dump_bits=False): db = Database(db_root, part) assembler = fasm_assembler.FasmAssembler(db) set_features = set() def feature_callback(feature): set_features.add(feature) assembler.set_feature_callback(feature_callback) extra_features = [] if roi: with open(roi) as f: roi_j = json.load(f) x1 = roi_j['info']['GRID_X_MIN'] x2 = roi_j['info']['GRID_X_MAX'] y1 = roi_j['info']['GRID_Y_MIN'] y2 = roi_j['info']['GRID_Y_MAX'] assembler.mark_roi_frames(Roi(db=db, x1=x1, x2=x2, y1=y1, y2=y2)) if 'required_features' in roi_j: extra_features = list( fasm.parse_fasm_string('\n'.join(roi_j['required_features']))) # Get required extra features for the part required_features = db.get_required_fasm_features(part) extra_features += list(fasm.parse_fasm_string( '\n'.join(required_features))) assembler.parse_fasm_filename(filename_in, extra_features=extra_features) frames = assembler.get_frames(sparse=sparse) if debug: dump_frames_sparse(frames) if dump_bits: output_bits(f_out, frames) else: dump_frm(f_out, frames)
def __init__(self): self._set_env_config() self.APP_ROOT = os.path.dirname(__file__) self.TOP_WORDS_COUNT = 100 self.ADMIN_WORDS_PER_PAGE = 100 self.encryption = Encryption(self) self.http_client = AsyncHTTPClient() logger.debug('Create DB connection pool') self.db = Database(host=self.MYSQL_HOST, port=self.MYSQL_PORT, user=self.MYSQL_USER, password=self.MYSQL_PASSWORD, database=self.MYSQL_DATABASE) jinja2_env = jinja2.Environment(loader=jinja2.FileSystemLoader( os.path.join(self.APP_ROOT, 'templates')), autoescape=False) jinja2_loader = Jinja2Loader(jinja2_env) settings = dict(debug=self.DEBUG, template_loader=jinja2_loader, static_path=os.path.join(self.APP_ROOT, 'static'), static_url_prefix='/static/') super().__init__(handlers=self._get_handlers(), **settings)
def gen_sites(): db = Database(util.get_db_root(), util.get_part()) grid = db.grid() o = {} for tile_name in sorted(grid.tiles()): loc = grid.loc_of_tilename(tile_name) gridinfo = grid.gridinfo_at_loc(loc) for site_name, site_type in gridinfo.sites.items(): if site_type == 'BUFCE_LEAF': if tile_name not in o: o[tile_name] = [] o[tile_name].append(site_name) return o
def gen_sites(): """ Generates all possible SLICE sites """ SLICE_RE = re.compile(r"SLICE_X([0-9]+)Y([0-9]+)") db = Database(util.get_db_root(), util.get_part()) grid = db.grid() for tile_name in sorted(grid.tiles()): loc = grid.loc_of_tilename(tile_name) gridinfo = grid.gridinfo_at_loc(loc) if gridinfo.tile_type in ['CLEL_L', 'CLEL_R', 'CLEM', 'CLEM_R']: site_name = sorted(gridinfo.sites.keys())[0] match = SLICE_RE.match(site_name) if match is not None: site_loc = int(match.group(1)), int(match.group(2)) yield Site(tile_name, site_name, site_loc)
def gen_sites(): db = Database(util.get_db_root(), util.get_part()) grid = db.grid() tiles = {} for tile_name in sorted(grid.tiles()): loc = grid.loc_of_tilename(tile_name) gridinfo = grid.gridinfo_at_loc(loc) for site, site_type in gridinfo.sites.items(): if site_type in ['SLICEM', 'SLICEL']: if gridinfo.tile_type not in tiles: tiles[gridinfo.tile_type] = set() tiles[gridinfo.tile_type].add(site) return tiles
def gen_bufgs(): bufgces_by_tile = {} db = Database(util.get_db_root(), util.get_part()) grid = db.grid() for tile in sorted(grid.tiles()): loc = grid.loc_of_tilename(tile) gridinfo = grid.gridinfo_at_loc(loc) for site, site_type in gridinfo.sites.items(): if ("BUFGCE" in site or "BUFGCTRL" in site) and "HDIO" not in site: if tile not in bufgces_by_tile: bufgces_by_tile[tile] = [] bufgces_by_tile[tile].append((site, site_type)) return bufgces_by_tile
def gen_sites(): db = Database(util.get_db_root(), util.get_part()) grid = db.grid() bufgces = [] slices = [] for tile_name in sorted(grid.tiles()): loc = grid.loc_of_tilename(tile_name) gridinfo = grid.gridinfo_at_loc(loc) for site, site_type in gridinfo.sites.items(): if "BUFGCE" in site and "HDIO" not in site: bufgces.append(site) if "SLICEM" in site or "SLICEL" in site: slices.append(site) return bufgces, slices
def testRerun(startTime, endTime): startTime_stamp = strTimeTotimeStamp(startTime) endTime_stamp = strTimeTotimeStamp(endTime) rerun_index = {} for ci in [ 'PR-CI-Coverage', 'PR-CI-Py3', 'PR-CI-Mac', 'PR-CI-Mac-Python3', 'PR-CI-Windows' ]: rerunCount = {} count = 0 ALL_commitCount_query_stat = "SELECT COUNT(commitId) from paddle_ci_status where ciName =~ /^%s/ and commit_createTime > %s and commit_createTime < %s and time > '2020-07-13 14:20:00'" % ( ci, startTime_stamp, endTime_stamp) ALL_commitCount = queryDB(ALL_commitCount_query_stat, 'count') if ci == 'PR-CI-Mac': query_stat = "SELECT commitId from paddle_ci_status where ciName =~ /^%s/ and ciName !~ /^PR-CI-Mac-Python3/ and status='failure' and EXCODE=8 and commit_createTime > %s and commit_createTime < %s " % ( ci, startTime_stamp, endTime_stamp) elif ci == 'PR-CI-Windows': query_stat = "SELECT commitId from paddle_ci_status where ciName =~ /^%s/ and ciName !~ /^PR-CI-Windows-OPENBLAS/ and status='failure' and EXCODE=8 and commit_createTime > %s and commit_createTime < %s " % ( ci, startTime_stamp, endTime_stamp) else: query_stat = "SELECT commitId from paddle_ci_status where ciName =~ /^%s/ and status='failure' and EXCODE=8 and commit_createTime > %s and commit_createTime < %s and time > '2020-07-13 14:20:00'" % ( ci, startTime_stamp, endTime_stamp) db = Database() result = list(db.query(query_stat)) for key in result[0]: commitId = key['commitId'] if commitId not in rerunCount: rerunCount[commitId] = 1 else: value = rerunCount[commitId] value += 1 rerunCount[commitId] = value for commitId in rerunCount: if rerunCount[commitId] > 1: count += (rerunCount[commitId] - 1) rerun_index['%s_testfailed_rerunRatio' % ci] = '%.2f' % (count / ALL_commitCount * 100) rerun_index['all_testfailed_rerunRatio'] = '%.2f' % ( float(rerun_index['PR-CI-Coverage_testfailed_rerunRatio']) + float(rerun_index['PR-CI-Py3_testfailed_rerunRatio'])) return rerun_index
async def get_commitCreateTime(event, gh, repo, *args, **kwargs): "Get commit CreateTime" create_dict = {} create_dict['repo'] = repo pr_num = event.data['number'] sha = event.data["pull_request"]["head"]["sha"] create_dict['PR'] = pr_num create_dict['commitId'] = sha if event.data['action'] == "opened": CreateTime = event.data["pull_request"]["created_at"] elif event.data['action'] == "synchronize": CreateTime = event.data["pull_request"]["updated_at"] createTime = javaTimeTotimeStamp(CreateTime) create_dict['createTime'] = createTime db = Database() result = db.insert('commit_create_time', create_dict) if result == True: logger.info('%s %s insert commit_create_time success: %s!' % (pr_num, sha, createTime)) else: logger.error('%s %s insert commit_create_time failed: %s!' % (pr_num, sha, createTime))
def gen_sites(): db = Database(util.get_db_root(), util.get_part()) grid = db.grid() xy_fun = util.create_xy_fun('BITSLICE_RX_TX_') o = {} for tile_name in sorted(grid.tiles()): loc = grid.loc_of_tilename(tile_name) gridinfo = grid.gridinfo_at_loc(loc) for site_name, site_type in gridinfo.sites.items(): if site_type == 'BITSLICE_RX_TX': if tile_name not in o: o[tile_name] = [] o[tile_name].append(site_name) if tile_name in o: o[tile_name].sort(key=lambda site: xy_fun(site)) return o
async def check_ci_status(event, gh, repo, *args, **kwargs): """check_ci_status""" if repo in ['PaddlePaddle/Paddle']: status_dict = {} state = event.data['state'] commitId = event.data['sha'] context = event.data['context'] status_dict['commitId'] = commitId status_dict['ciName'] = context shortId = commitId[0:7] if state == 'success': commit_message = event.data['commit']['commit']['message'] document_fix = ifDocumentFix(commit_message) if document_fix == False: target_url = event.data['target_url'] generateCiIndex(repo, commitId, target_url) else: print("commitID: %s" % shortId) print("state : %s" % state) if state in ['success', 'failure']: ifInsert = True status_dict['status'] = state insertTime = int(time.time()) query_stat = "SELECT * FROM paddle_ci_status WHERE ciName='%s' and commitId='%s' and status='%s' order by time desc" % ( status_dict['ciName'], status_dict['commitId'], status_dict['status']) queryTime = ifAlreadyExist(query_stat) if queryTime != '': ifInsert = False if insertTime - queryTime < 30 else True if ifInsert == True: db = Database() result = db.insert('paddle_ci_status', status_dict) if result == True: logger.info('%s %s insert paddle_ci_status success!' % (context, commitId)) else: logger.error('%s %s insert paddle_ci_status failed!' % (context, commitId))
def __init__(self, worker=None, site=None): super(ResumeSpider, self).__init__(worker=worker) self.site = site # 初始化卡夫卡 self.kafka_client = KafkaClient(KAFKA_HOSTS) self.kafka_producer = self.kafka_client.topics[ KAFKA_TOPIC].get_sync_producer() # 初始化redis队列 self.q_user = RedisQueue(self.site + '_USER', host=REDIS_HOST, port=REDIS_PORT) self.q_search = RedisQueue(self.site + '_SEARCH', host=REDIS_HOST, port=REDIS_PORT) # 初始化redis集合 self.s_filter = RedisSet(self.site + '_FILTER', host=REDIS_HOST, port=REDIS_PORT) # 初始化redis哈希表 self.h_cookies = RedisHash(self.site + '_COOKIES', host=REDIS_HOST, port=REDIS_PORT) # 初始化mysql数据库 self.db = Database( 'mysql+mysqldb://%s:%s@%s:%s/%s?charset=utf8' % (MYSQL_USER, MYSQL_PASSWD, MYSQL_HOST, MYSQL_PORT, MYSQL_DB), encoding='utf-8') self.db.init_table() if self.h_cookies.hget('cookies'): self.cookies = eval(self.h_cookies.hget('cookies')) else: self.cookies = None