def selectDFByType(cls, contractType, beginDate=None, endDate=None): """根据合约类型进行数据提取,TF01.CFE, TF02.CFE, TF00.CFE etc. 日期默认升序排列 beginDate 和 endDate都包括 """ try: sqlQuery = "select " + cls.columnString + " from " + cls.tableName if beginDate is not None and endDate is None: condition = " where type = %(type)s" + " where trade_date >= %(beginDate)s" params = {"type": contractType, "beginDate": beginDate} elif endDate is not None and beginDate is None: condition = " where type = %(type)s" + " where trade_date <= %(endDate)s" params = {"type": contractType, "endDate": endDate} elif beginDate is not None and endDate is not None: condition = " where type = %(type)s" + " where trade_date BETWEEN %(beginDate)s AND %(endDate)s" params = { "type": contractType, "beginDate": beginDate, "endDate": endDate } else: condition = " where type = %(type)s" params = {"type": contractType} df = pd.read_sql(sqlQuery + condition, engine, params=params, index_col=['trade_date']) return df except BaseException as e: logger.info('selectDFByType fails!{}'.format(e)) return None
def compile(self): filepaths = {} start = time.time() self.get_rule_path(filepaths, self.rule_path) rules = yara.compile(filepaths=filepaths) logger.info(f"Compile yara rules cost {time.time() - start:.4f}") return rules
def _lookup_emails(db): logger.info("Transform usernames to email addresses...") non_email_users = 0 transformed_users = 0 removed_users = [] ldap = LdapWrapper.LdapWrapper() with db.get_session() as session: for user in session.query(User).filter(User.user.notlike('%@%')): non_email_users += 1 email = ldap.get_email(user.user) if email != "N/A": email = email.decode('unicode-escape').lower() existing_user = session.query(User).filter( User.user == email).first() if existing_user: # If the user already exist, we need to move all entries to this user instead. for defect in user.defect_mods: defect.user = existing_user for cm in user.change_metrics: cm.user = existing_user removed_users.append(user.id) else: transformed_users += 1 user.user = email session.commit() session.expire_all() for id_ in removed_users: session.delete(session.query(User).filter(User.id == id_).one()) logger.info("Found %d non email-users, transformed %d, removed %d", non_email_users, transformed_users, len(removed_users))
def _lookup_emails(db): logger.info("Transform usernames to email addresses...") non_email_users = 0 transformed_users = 0 removed_users = [] ldap = LdapWrapper.LdapWrapper() with db.get_session() as session: for user in session.query(User).filter(User.user.notlike("%@%")): non_email_users += 1 email = ldap.get_email(user.user) if email != "N/A": email = email.decode("unicode-escape").lower() existing_user = session.query(User).filter(User.user == email).first() if existing_user: # If the user already exist, we need to move all entries to this user instead. for defect in user.defect_mods: defect.user = existing_user for cm in user.change_metrics: cm.user = existing_user removed_users.append(user.id) else: transformed_users += 1 user.user = email session.commit() session.expire_all() for id_ in removed_users: session.delete(session.query(User).filter(User.id == id_).one()) logger.info( "Found %d non email-users, transformed %d, removed %d", non_email_users, transformed_users, len(removed_users) )
def scan(self, filepath): rule_set = set() def callback(data): if not self.db.is_rule_exist(data['rule']): _rule = YaraRule(data['rule']) meta = data['meta'] if 'author' in meta: _rule.author = meta['author'] if 'Author' in meta: _rule.author = meta['Author'] if 'description' in meta: _rule.description = meta['description'] if 'Description' in meta: _rule.description = meta['Description'] if 'date' in meta: _rule.date = (meta['date'] + "-01")[:10] if 'Date' in meta: _rule.date = (meta['Date'] + "-01")[:10] if 'reference' in meta: _rule.reference = meta['reference'] if 'Reference' in meta: _rule.reference = meta['Reference'] self.db.save_item(_rule) rule_set.add(data['rule']) return yara.CALLBACK_CONTINUE self.rules.match(filepath, callback=callback, which_callbacks=yara.CALLBACK_MATCHES) rules = [] for rule in rule_set: rules.append(YaraEXEResult(rule=rule, md5=os.path.basename(filepath))) logger.info(f"{filepath} match rules of {rule_set}") self.db.save_items(rules)
def update(self, key, **kwargs): try: tmp = pickle.loads(self.session[key]) for col in kwargs.keys(): setattr(tmp, col, kwargs[col]) self.session[key] = pickle.dumps(tmp) except: logger.info("update error")
def InitFileKeysArray(self, fileKeys): if (len(fileKeys) > 0): fileKeysArray = fileKeys.split('#') # 强制带上路径信息 for key in fileKeysArray: keyStr = self.GetRemovedSpaceStr(key) if keyStr != '': logger.info("需要抓取文件名的关键字:%s", keyStr) self.fileNameKeys.append(keyStr)
def InitKeyArray(self, keys): self.keyWords.append('文件路径') keyArray = keys.split('#') # 强制带上路径信息 for key in keyArray: keyStr = self.GetRemovedSpaceStr(key) if keyStr != '': logger.info("需要抓取关键字:%s", keyStr) self.keyWords.append(keyStr)
def _remove_unwanted_files(db, files): logger.info("Cleaning out unwanted files...") removed_files = 0 with db.get_session() as session: for file_ in FancyBar().iter(session.query(File)): if file_.file not in files: removed_files += 1 session.delete(file_) session.commit() logger.info("Removed %d files out of %d from db", removed_files, session.query(File).count())
def insertDF(cls, df): try: df.to_sql(cls.tableName, engine, flavor=None, if_exists='append', index=True) return True except BaseException as e: logger.info('inset data fails!{}'.format(e)) raise e
def BuildDataFrame(self, outputFolderPath): # for col, colValues in self.colValuesDict.items(): # for values in colValues: # print("列名:", col, "列的值:", values) # print("************************") logger.info("Find Excel File Count: %d", len(self.excelDict)) logger.info("BuildDataFrame %d", len(self.colValuesDict)) if len(self.colValuesDict) > 0: df = pd.DataFrame(self.colValuesDict) outputPath = os.path.join(outputFolderPath, "output.xlsx") df.to_excel(outputPath, index=False, columns=self.keyWords)
def download_emails_to_database(): # Download Emails logger.info("Downloading emails to database.") for message_id in list_message_ids(): try: print "Trying to download message %s" % message_id download_email(message_id) print t.green("Sucessfully downloaded message %s" % message_id) except Exception, e: print t.red("Error downloading message: %s: %s" % (message_id, e)) # raise print ""
def goToOutput(self): if (self.checkInputValid()): logger.info("开始抓取数据 ...") dataHandler = dh.DataHandler() dataHandler.InitFileKeysArray(self.fileKeysStr) dataHandler.ReadFileFolder(self.resPath) dataHandler.SearchContentFromFiles(self.keysStr) dataHandler.BuildDataFrame(self.outputPath) logger.info("生成数据结束 ...") msgBox = QtWidgets.QMessageBox(self) msgBox.about(self, "提示", "生成文件成功,Enjoy it!")
def start(self): count = 0 result = False for code in self.codeList: while count <= 5: count += 1 result = self.loadByCode(code, self.freq) if result: count = 0 break if not result: logger.info(u"第 {count} 次导入{code}失败".format(count=count, code=code))
def selectMaxDate(cls, target, column="code"): """返回datetime""" exp = "%" + target + "%" try: s = text('SELECT max(trade_date) FROM ' + cls.tableName + ' where ' + column + ' like :exp') s = s.bindparams(exp=exp) res = engine.execute(s) resData = res.fetchall() if resData is not None and len(resData) > 0: return resData[0][0] else: return None except BaseException as e: logger.info('selectMaxDate fails!{}'.format(e)) raise
def _lookup_defect_modifications(pool, walker, vcs, db, files, last_update): logger.info("Looking up defect modifications...") filemap = zip([vcs] * len(files), [last_update] * len(files), files) with db.get_session() as session: iter_ = FancyBar(max=len(filemap)).iter(pool.imap_unordered(worker(_get_defects), filemap)) for file_, defects in iter_: for defect, entries in defects.iteritems(): for entry in entries: for function in entry['functions']: db.insert_defect_modification(session, file_=walker.translate_env(file_), version=entry['version'], function=function, defect_id=defect, user=_translate_username(entry), date_=entry['datetime']) session.commit()
def get(self, *args, **kwargs): try: img, auth_code = create_validata_code() img_mem = io.BytesIO() img.save(img_mem, 'JPEG', quality=95) # return self.render("login.html", img=img_mem.getvalue()) self.set_header('Content-Type', 'image/jpeg') self.write(img_mem.getvalue()) #============================================ # 在没用redis作为session之前 我们必须要把验证码传入到cookie在返回给服务器 # 使用redis之后,我们只需要把auth_code放入对应的session中就好了 # 但是万万没想到的是,原来一行代码可以搞定的事情,现在变成了两行,哈哈 #self.set_secure_cookie('auth_code', auth_code) self.session.save('auth', auth_code) #============================================ img_mem.close() except: logger.info(traceback.format_exc())
def selectMaxDateByType(cls, simpleContractType): """返回datetime""" exp = "%" + simpleContractType + "0%" try: s = text('SELECT max(trade_date) FROM ' + cls.tableName + ' where type like :exp') s = s.bindparams(exp=exp) conn = engine.connect() res = conn.execute(s) resData = res.fetchall() if resData is not None and len( resData) > 0 and resData[0] is not None: return resData[0][0] else: return None except BaseException as e: logger.info('selectMaxDate fails!{}'.format(e)) return None
def _lookup_defect_modifications(pool, walker, vcs, db, files, last_update): logger.info("Looking up defect modifications...") filemap = zip([vcs] * len(files), [last_update] * len(files), files) with db.get_session() as session: iter_ = FancyBar(max=len(filemap)).iter( pool.imap_unordered(worker(_get_defects), filemap)) for file_, defects in iter_: for defect, entries in defects.iteritems(): for entry in entries: for function in entry['functions']: db.insert_defect_modification( session, file_=walker.translate_env(file_), version=entry['version'], function=function, defect_id=defect, user=_translate_username(entry), date_=entry['datetime']) session.commit()
def selectDFByCodeAndDateRange(cls, code=None, beginDate=None, endDate=None, columns=None): """提取时间选择区间,列名和产品代码提取DataFrame数据,包括开始时间和结束时间 """ try: conditionList = [] params = {} if columns is None: sqlQuery = "select " + cls.columnString + " from " + cls.tableName else: columnString = " trade_date," + ",".join(columns) sqlQuery = "select " + columnString + " from " + cls.tableName if code is not None: conditionList.append(" code like " + "%(code)s") params["code"] = "%" + code + "%" if beginDate is not None and endDate is None: conditionList.append(" trade_date >= %(beginDate)s") params["beginDate"] = beginDate elif endDate is not None and beginDate is None: conditionList.append(" trade_date <= %(endDate)s") params["endDate"] = endDate elif beginDate is not None and endDate is not None: conditionList.append( " trade_date BETWEEN %(beginDate)s AND %(endDate)s") params["beginDate"] = beginDate params["endDate"] = endDate condition = "" if len(conditionList) != 0: condition = " where " + conditionList[0] for item in conditionList[1:len(conditionList)]: condition += " and " + item condition += " order by trade_date asc" df = pd.read_sql(sqlQuery + condition, engine, params=params, index_col=['trade_date']) return df except BaseException as e: logger.info('selectDFByCodeAndDateRange fails!{}'.format(e)) return None
def reset_database(): # Delete 'emails.db' sqlite database if os.path.exists('emails.db'): os.remove('emails.db') logger.info("Deleted database 'emails.db'") # Re-create 'emails.db' sqlite database db = SqliteDatabase('emails.db') logger.info("Created database 'emails.db'") Email.create_table() SenderMetadata.create_table() Comment.create_table() Form.create_table() Category.create_table() Category(name="Meetings").save() Category(name="Opinions").save() Category(name="Question").save() Category(name="Action").save() Category(name="Thank You").save()
def _scrape_meta_defects(db, custom_bts_scraper): start = datetime.now() logger.info("Scraping Bug tracker...") defects = CustomBugTracker.call_custom_bug_tracker(custom_bts_scraper) with db.get_session() as session: DefectMeta.__table__.drop(db.engine, checkfirst=True) DefectMeta.__table__.create(db.engine, checkfirst=False) session.commit() for defect in FancyBar("Inserting:").iter(defects): if defect["product"] is not None: defect_meta = db.get_or_create(session, DefectMeta, id=defect["id"]) defect_meta.submitted_on = defect["submitted_on"] defect_meta.severity = defect["severity"] defect_meta.product = defect["product"] defect_meta.answer_code = defect["answer_code"] defect_meta.fault_code = defect["fault_code"] session.commit() logger.info("Finished scraping BTS in %s" % (datetime.now() - start))
def keyboardCommands(self): # get current key currentKey = self.keyboard.getKey() if DEBUG and (currentKey == ord('s') or currentKey == ord('S')): logger.debug("Current status: " + str(self.status)) # press P to find parking lot if currentKey == ord('p') or currentKey == ord('P'): logger.info("Looking for a parking lot") self.setStatus(Status.SEARCHING_PARK) # press M to manual control elif currentKey == ord('m') or currentKey == ord('M'): logger.info("Manual") self.setStatus(Status.MANUAL) # press A to auto control elif currentKey == ord('a') or currentKey == ord('A'): logger.info("Auto") self.setStatus(Status.FORWARD) # return current key to allow other controls return currentKey
def download_all_to_database(): # # Delete 'emails.db' sqlite database # if os.path.exists('emails.db'): # os.remove('emails.db') # logger.info("Deleted database 'emails.db'") # # Re-create 'emails.db' sqlite database # db = SqliteDatabase('emails.db') # logger.info("Created database 'emails.db'") # Email.create_table() # SenderMetadata.create_table() # Download Emails logger.info("Downloading emails to database.") for message_id in list_message_ids(): try: download_email(message_id) except Exception, e: print t.red("Error downloading message: %s" % message_id) # print t.red(e) raise print ""
def _scrape_meta_defects(db, custom_bts_scraper): start = datetime.now() logger.info("Scraping Bug tracker...") defects = CustomBugTracker.call_custom_bug_tracker(custom_bts_scraper) with db.get_session() as session: DefectMeta.__table__.drop(db.engine, checkfirst=True) DefectMeta.__table__.create(db.engine, checkfirst=False) session.commit() for defect in FancyBar('Inserting:').iter(defects): if defect['product'] is not None: defect_meta = db.get_or_create(session, DefectMeta, id=defect['id']) defect_meta.submitted_on = defect['submitted_on'] defect_meta.severity = defect['severity'] defect_meta.product = defect['product'] defect_meta.answer_code = defect['answer_code'] defect_meta.fault_code = defect['fault_code'] session.commit() logger.info("Finished scraping BTS in %s" % (datetime.now() - start))
def updatePath(self): if self.isEnabled(): logger.info("Computing new path..") p = self.positioning.getPosition() o = self.positioning.getOrientation() nearest = Map.getNearestWalkablePosition(p, o) if nearest != None: p = nearest x = p.getX() y = p.getY() if o == Orientation.NORD: Map.setNewObstacle(Position(x - 1, y)) if o == Orientation.EAST: Map.setNewObstacle(Position(x, y + 1)) if o == Orientation.SOUTH: Map.setNewObstacle(Position(x + 1, y)) if o == Orientation.WEST: Map.setNewObstacle(Position(x, y - 1)) if DEBUG: Map.printMap() self.currentPath = self.pathPlanner.getFastestRoute() self.actualTurn = 0
def _lookup_maintainers(pool, db, maintainerobj): logger.info("Updating maintainers...") with db.get_session() as session: file_tuples = db.get_file_ids_and_abspaths(session) logger.debug("Remove subsystem mappings..") for file_ in session.query(File): file_.subsystem = None session.commit() Subsystem.__table__.drop(db.engine, checkfirst=True) Subsystem.__table__.create(db.engine, checkfirst=False) logger.debug("Populating maintainers table...") for maintainer_info in maintainerobj.get_maintainer_list(): db.insert_subsystem_entry( session, subsystem=maintainer_info["subsystem"], status=maintainer_info["status"], maintainers=[ maint for maint, _ in maintainer_info["maintainer"] ]) session.commit() logger.debug("Looking up maintainer for all files") filemap = zip([maintainerobj] * len(file_tuples), file_tuples) count_some_maintainer = 0 iter_ = FancyBar(max=len(filemap)).iter( pool.imap_unordered(worker(_get_maintainers), filemap)) for file_tuple, maintainer_dicts in iter_: if maintainer_dicts is not None: count_some_maintainer += 1 file_id = file_tuple[0] for maintainer_dict in maintainer_dicts: db.update_file_entry( session, file_id=file_id, subsystem_name=maintainer_dict["subsystem"]) session.commit() logger.info("Amount of files with a maintainer: %d", count_some_maintainer) logger.info("Amount of files without a maintainer: %d", len(file_tuples) - count_some_maintainer)
def updateCommands(self): # get current key currentKey = self.keyboard.getKey() # press P to find parking lot if self.keyboard.isKeyPressed(currentKey, 'p'): logger.info("Looking for a parking lot") self.motionStatus = PARKING # press M to manual control elif self.keyboard.isKeyPressed(currentKey, 'm'): logger.info("Manual") self.motionStatus = MANUAL # press A to auto control elif self.keyboard.isKeyPressed(currentKey, 'a'): logger.info("Auto") self.motionStatus = PATH_FOLLOWING # return current key to allow other controls return currentKey
def download_all_to_database(): # Delete 'emails.db' sqlite database if os.path.exists('emails.db'): os.remove('emails.db') logger.info("Deleted database 'emails.db'") # Re-create 'emails.db' sqlite database db = SqliteDatabase('emails.db') logger.info("Created database 'emails.db'") Email.create_table() SenderMetadata.create_table() # Download Emails logger.info("Downloading emails to database.") for message_id in list_message_ids(): try: download_email(message_id) except Exception as e: print(t.red("Error downloading message: %s" % message_id)) print(t.red(str(e))) raise print("")
def _lookup_maintainers(pool, db, maintainerobj): logger.info("Updating maintainers...") with db.get_session() as session: file_tuples = db.get_file_ids_and_abspaths(session) logger.debug("Remove subsystem mappings..") for file_ in session.query(File): file_.subsystem = None session.commit() Subsystem.__table__.drop(db.engine, checkfirst=True) Subsystem.__table__.create(db.engine, checkfirst=False) logger.debug("Populating maintainers table...") for maintainer_info in maintainerobj.get_maintainer_list(): db.insert_subsystem_entry( session, subsystem=maintainer_info["subsystem"], status=maintainer_info["status"], maintainers=[maint for maint, _ in maintainer_info["maintainer"]], ) session.commit() logger.debug("Looking up maintainer for all files") filemap = zip([maintainerobj] * len(file_tuples), file_tuples) count_some_maintainer = 0 iter_ = FancyBar(max=len(filemap)).iter(pool.imap_unordered(worker(_get_maintainers), filemap)) for file_tuple, maintainer_dicts in iter_: if maintainer_dicts is not None: count_some_maintainer += 1 file_id = file_tuple[0] for maintainer_dict in maintainer_dicts: db.update_file_entry(session, file_id=file_id, subsystem_name=maintainer_dict["subsystem"]) session.commit() logger.info("Amount of files with a maintainer: %d", count_some_maintainer) logger.info("Amount of files without a maintainer: %d", len(file_tuples) - count_some_maintainer)
def run(self): logger.info("Running..") leftIsEmpty = False # flag used to detect left parking lot rightIsEmpty = False # flag used to detect right parking lot leftStartingPosition = 0.0 # length of the left parking lot rightStartingPosition = 0.0 # length of the right parking lot sideOfParkingLot = 0 # indicates the side of the parking lot found: -1 left, 1 right, 0 not found yet actualTurn = 0 while self.driver.step() != -1: ## here goes code that should be executed each step ## # get actual compass value #compassValues = self.compass.getValues() #logger.debug("COMPASS: "******"INIT status") # set wheel angle self.setAngle(0.0) # set starting speed self.setSpeed(0.5) # set new status self.setStatus(Status.TURN) # skip to next cycle to ensure everything is working fine continue # FOLLOW LINE STATUS if self.status == Status.FOLLOW_LINE: # set cruise speed self.setSpeed(0.5) # compute new angle newAngle = self.lineFollower.getNewSteeringAngle() # logger.debug("new steering angle: " + str(newAngle)) # set new steering angle if newAngle != UNKNOWN: self.setAngle(newAngle) else: self.setStatus(Status.TURN) # TURN STATUS if self.status == Status.TURN: if actualTurn < len(self.turns): turn = self.turns[actualTurn] self.setAngle(0.5 * turn * MAX_ANGLE) else: self.setStatus(Status.STOP) # compute new angle newAngle = self.lineFollower.getNewSteeringAngle() # if line is found if newAngle != UNKNOWN: self.setStatus(Status.FOLLOW_LINE) actualTurn += 1 continue # FORWARD STATUS if self.status == Status.FORWARD: # logger.debug("FORWARD status") # set cruise speed self.setSpeed(0.2) # avoing obstacles ds = self.distanceSensors # get distance sensors values frontLeftSensor = ds.frontLeft.getValue() frontRightSensor = ds.frontRight.getValue() sideLeftSensor = ds.sideLeft.getValue() sideRightSensor = ds.sideRight.getValue() # set values of thresholds tolerance = 10 sideThreshold = 950 # check if front left obstacle, turn right if frontLeftSensor > frontRightSensor + tolerance: self.setAngle(RIGHT * frontLeftSensor / 500.0 * MAX_ANGLE) # logger.debug("Steering angle: " + str(RIGHT * frontLeftSensor / 1000.0 * MAX_ANGLE)) logger.debug("Steering angle: " + str(self.angle)) # check if front right obstacle, turn left elif frontRightSensor > frontLeftSensor + tolerance: self.setAngle(LEFT * frontRightSensor / 500.0 * MAX_ANGLE) # logger.debug("Steering angle: " + str(LEFT * frontRightSensor / 1000.0 * MAX_ANGLE)) logger.debug("Steering angle: " + str(self.angle)) # check if side left obstacle, turn slight right elif sideLeftSensor > sideThreshold: self.setAngle(RIGHT * sideLeftSensor / 4000.0 * MAX_ANGLE) # check if side right obstacle, turn slight left elif sideRightSensor > sideThreshold: self.setAngle(LEFT * sideRightSensor / 4000.0 * MAX_ANGLE) # if no obstacle go straight else: self.setAngle(self.angle / 1.5) # SEARCHING_PARK STATUS if self.status == Status.SEARCHING_PARK: # set slow speed self.setSpeed(0.2) # set straight wheels self.setAngle(0.0) # get distance and position sensors ds = self.distanceSensors ps = self.positionSensors #log info for debug logger.debug("Left Distance Sensor: " + str(ds.sideLeft.getValue())) logger.debug("Left Position Sensor: " + str(ps.frontLeft.getValue()) + " rad") logger.debug("Left Wheel Length: " + str(self.leftWheelDistance) + " m") logger.debug("Starting position: " + str(leftStartingPosition) + " m") logger.debug("Parking Lot Length: " + str(self.leftWheelDistance - leftStartingPosition) + " m") sideThreshold = 650 leftSensorValue = ds.sideLeft.getValue() rightSensorValue = ds.sideRight.getValue() # checking parking lot on the LEFT side if leftSensorValue < sideThreshold and not leftIsEmpty: leftIsEmpty = True leftStartingPosition = self.leftWheelDistance # 100 elif leftSensorValue > sideThreshold and leftIsEmpty: leftIsEmpty = False elif leftIsEmpty and self.leftWheelDistance - leftStartingPosition > LENGTH + LENGTH / 3: leftStartingPosition = self.leftWheelDistance # 200 - 100 sideOfParkingLot = LEFT self.setStatus(Status.FORWARD2) # checking parking lot on the RIGHT side if rightSensorValue < sideThreshold and not rightIsEmpty: rightIsEmpty = True rightStartingPosition = self.rightWheelDistance elif rightSensorValue > sideThreshold and rightIsEmpty: rightIsEmpty = False elif rightIsEmpty and self.rightWheelDistance - rightStartingPosition > LENGTH + LENGTH / 3: rightStartingPosition = self.rightWheelDistance sideOfParkingLot = RIGHT self.setStatus(Status.FORWARD2) # this ensure that the parking manoeuvre starts after going forward and not as soon as the parking lot is detected if self.status == Status.FORWARD2: distance = 0.19 if sideOfParkingLot == LEFT: if self.leftWheelDistance - leftStartingPosition > distance: self.status = Status.PARKING elif sideOfParkingLot == RIGHT: if self.rightWheelDistance - rightStartingPosition > distance: self.status = Status.PARKING else: logger.warning( "Parking lot not found! I don't know if right or left." ) self.setStatus(Status.SEARCHING_PARK) # starting the parking manoeuvre if self.status == Status.PARKING: if sideOfParkingLot != LEFT and sideOfParkingLot != RIGHT: logger.error("side of parking lot unknown.") exit(1) # stop the vehicle, turn the wheels and go back self.setSpeed(0.0) self.setAngle(sideOfParkingLot * MAX_ANGLE) self.setSpeed(-0.1) # when should it turn the other way backThreshold = 400 ds = self.distanceSensors rear = ds.back.getValue() logger.debug("Back sensor: " + str(rear)) if rear > backThreshold: self.status = Status.PARKING2 if self.status == Status.PARKING2: self.setAngle(-1 * sideOfParkingLot * MAX_ANGLE) threshold = 945 rear = self.distanceSensors.back.getValue() if rear > threshold: self.setStatus(Status.CENTER) if self.status == Status.CENTER: self.setAngle(0.0) self.setSpeed(0.2) rear = self.distanceSensors.back.getValue() front = self.distanceSensors.frontCenter.getValue() if rear - front < 20: self.setStatus(Status.STOP) if self.status == Status.STOP: self.setSpeed(0.0) self.setAngle(0.0) # if obstacle is cleared go forward if not self.avoidObstacle( ) and self.prevStatus == Status.PARKING2: self.setStatus(Status.FORWARD) if self.status == Status.MANUAL: # get current state speed = self.speed angle = self.angle # logger.debug("Current Key: " + str(currentKey)) # keyboard controlls # accelerate if currentKey == self.keyboard.UP: if speed < 0: speed += 0.02 else: speed += 0.008 # break elif currentKey == self.keyboard.DOWN: if speed > 0: speed -= 0.02 else: speed -= 0.008 # turn left elif currentKey == self.keyboard.LEFT: angle -= 0.01 # turn right elif currentKey == self.keyboard.RIGHT: angle += 0.01 # handbreak elif currentKey == ord(' '): speed /= 4 # update state self.setSpeed(speed) self.setAngle(angle)
def create(cls, **query): inst = cls(**query) inst.save(force_insert=True) inst._prepare_instance() logger.info("Created Email %s", inst.message_id) return inst
def _main(): args, config = _parse_args() """ ##################################################### Config ##################################################### """ config_engine_url = config.get('Analyze', 'engine_url', '') config_mirror_engine_url = config.get('Analyze', 'mirror_engine_url', None) config_custom_bts_scraper = config.get('Analyze', 'custom_bts_scraper', None) config_threads = config.getint('Analyze', 'threads') config_lookup_email = config.getboolean('Analyze', 'lookup_email') config_loglevel = config.getloglevel('Analyze', 'log_level') config_logpath = config.get('Analyze', 'log_path') config_logformat = config.get('Analyze', 'log_format') config_transformerdict = config.getdict('Analyze', 'code_transformer', ()) config_mirror = args.mirror config_update_repo = args.update_repo config_scrape_bts = args.scrape_bts if args.files: config_debugging_single_file = True else: config_debugging_single_file = False logger.setup_logger(format_=config_logformat, level=config_loglevel, filename=config_logpath) logger.banner("Running " + os.path.basename(__file__)) logger.info("Options:") logger.info(" Database: %s", config_engine_url[:config_engine_url.find(':')]) logger.info(" threads: %s", config_threads) logger.info(" lookup_email: %s", config_lookup_email) if config_mirror and config_mirror_engine_url is not None: # Do not print the engine_url since it may contain user/pass logger.info(" DB mirroring: Yes") logger.info(" scrape bug tracking system: %s", config_scrape_bts) logger.info(" Update repo prior to analysis: %s", config_update_repo) os.nice(10) """ ##################################################### Setup instances ##################################################### """ vcs = VcsWrapper.VcsFactory(config=config, shared_dict_ctor=Manager().dict) maintainerobj = Maintainers.Maintainers( config.get('General', 'maintainers')) walker = SrcRootWalker.SrcRootWalker( config.getlist('Analyze', 'include_patterns'), config.getlist('Analyze', 'exclude_patterns', ())) db = MetricsDb.MetricsDb(config_engine_url) pool = Pool(processes=config_threads) last_metric_update = None """ ##################################################### Update repo ##################################################### """ if config_update_repo: logger.info('Updating repo prior to analysis...') vcs.update_repo() logger.info('Updating repo done') start = datetime.now() """ ##################################################### Scrape Bug Tracking System for defects and add to metrics.db ##################################################### """ if config_scrape_bts and config_custom_bts_scraper is not None: _scrape_meta_defects(db, config_custom_bts_scraper) """ ##################################################### Start analysis ##################################################### """ if config_lookup_email: _lookup_emails(db) else: logger.info("Looking up files to analyze...") if config_debugging_single_file: all_files = args.files else: last_metric_update = db.get_eav_value('last_metric_update') if last_metric_update is not None: # subtract an extra day to make sure that we dont miss anything during the overlap of the analysis last_metric_update = last_metric_update - timedelta(days=1) all_files = walker.find_all_files_in_srcroots() logger.info("Found %s files!", len(all_files)) if len(all_files) != 0: _lookup_defect_modifications(pool, walker, vcs, db, all_files, last_metric_update) _lookup_metrics(pool, walker, vcs, db, all_files, last_metric_update, config_transformerdict) if not config_debugging_single_file: logger.info("Doing db cleanup...") _remove_unwanted_files( db, {walker.translate_env(file_): None for file_ in all_files}) _lookup_maintainers(pool, db, maintainerobj) logger.info("Finished analysis phase in %s" % (datetime.now() - start)) if not config_debugging_single_file: db.set_eav_value('last_metric_update', start) """ ##################################################### Mirroring ##################################################### """ if config_mirror and config_mirror_engine_url is not None: logger.info( "Mirroring %s -> %s", config_engine_url[:config_engine_url.find(':')], config_mirror_engine_url[:config_mirror_engine_url.find(':')]) start = datetime.now() MetricsDb.MetricsDb(config_mirror_engine_url).copy_from_db(db) logger.info("Finished mirroring phase in %s" % (datetime.now() - start))
def _lookup_metrics(pool, walker, vcs, db, files, last_update, config_transformerdict): logger.info("Looking up metrics...") file_map = zip([vcs] * len(files), [last_update] * len(files), files) version_map = [] logger.info(" Looking up all VCS versions...") iter_ = FancyBar(max=len(file_map)).iter( pool.imap_unordered(worker(_get_lsv), file_map)) for file_, contributions in iter_: for contrib_index, contrib in enumerate(contributions): prev_version = contributions[ contrib_index - 1]['version'] if contrib_index > 0 else None filext = os.path.splitext(file_)[1] version_map.append((file_, contrib, prev_version, vcs, config_transformerdict.get(filext, None))) logger.info(" Looking up all metrics...") with db.get_session() as session: iter_ = FancyBar(max=len(version_map)).iter( pool.imap_unordered(worker(_get_metrics), version_map)) for file_, contrib in iter_: ''' Results are coming in, insert results into database We avoid doing this inside the process pool due to that sqlite is not threadsafe ''' version = contrib['version'] date_ = contrib['datetime'] user = _translate_username(contrib) translated_file = walker.translate_env(file_) complexity = contrib['complexity'] changerates = contrib['changerates'] for function, (added, changed, deleted, nloc) in changerates.iteritems(): if function not in complexity: if file_.endswith((".c", ".cc", ".cpp", ".h", ".hpp")): logger.debug( "%s Could not find complexity function %s", file_, function) logger.debug("%s Available functions:", os.path.basename(file_)) for func in complexity.iterkeys(): logger.debug("%s", func) cyclomatic_complexity = None tokens = None parameter_count = None else: cyclomatic_complexity, tokens, parameter_count = complexity[ function] db.insert_change_metric( session, file_=translated_file, version=version, function=function, date_=date_, user=user, added=added, changed=changed, deleted=deleted, nloc=nloc, cyclomatic_complexity=cyclomatic_complexity, token_count=tokens, parameter_count=parameter_count) session.commit()
def updateSpeedAndAngle(self): isLineLost = self.lineFollower.isLineLost() currentPath = self.currentPath logger.debug("Current Status: " + str(self.status) + " prev Status: " + str(self.prevStatus)) lineFollowerAngle = self.lineFollower.getNewSteeringAngle() if currentPath != UNKNOWN and self.actualTurn == 0: if self.currentPath[self.actualTurn] == U_TURN: self.setStatus(U_TURN) self.actualTurn += 1 elif self.status == FOLLOW_LINE: self.steeringAngle = lineFollowerAngle if self.isGoalReach(): self.setStatus(STOP) if isLineLost and currentPath == UNKNOWN: self.speed = 0.0 elif isLineLost and currentPath != UNKNOWN and Map.findNearestIntersection( self.positioning.getPosition(), 1) != -1: #if self.prevStatus != SEARCH_LINE: self.setStatus(TURN) elif isLineLost and Map.findNearestIntersection( self.positioning.getPosition()) == -1: self.setStatus(SEARCH_LINE) elif self.status == TURN: if currentPath != UNKNOWN and self.actualTurn < len(currentPath): turn = currentPath[self.actualTurn] self.steeringAngle = 0.57 * turn else: self.currentPath = UNKNOWN if not isLineLost: self.actualTurn += 1 self.setStatus(FOLLOW_LINE) elif self.status == SEARCH_LINE: self.steeringAngle = self.lineFollower.getSteeringAngleLineSearching( ) if not isLineLost: logger.debug("Line was lost and i found it!") self.setStatus(FOLLOW_LINE) threshold = 500 angle = 0.5 logger.debug("FRONT LEFT: ") if self.distanceSensors.frontLeft.getValue() > threshold: self.lineFollower.resetLastLineKnownZone(angle) elif self.distanceSensors.frontRight.getValue() > threshold: self.lineFollower.resetLastLineKnownZone(-angle) elif self.status == U_TURN: logger.debug("Actual orientation: " + str(self.positioning.getOrientation()) + " goal orientation: " + str(self.uTurnGoalOrientation)) self.sensors = self.distanceSensors logger.debug("U_TURN: Status: " + str(self.uTurnStatus)) if self.uTurnStatus == UNKNOWN: # check if we can do U turn self.uTurnStatus = 1 self.steeringAngle = 1 self.uTurnGoalOrientation = Orientation( (self.positioning.inaccurateOrientation + 2) % 4) if self.uTurnStatus == 1: if self.sensors.frontDistance( 950) or self.positioning.getOrientation() == ( (self.uTurnGoalOrientation + 1) % 4) or self.positioning.getOrientation() == ( (self.uTurnGoalOrientation - 1) % 4): logger.debug( "U_TURN: First step complete, going back (Status 1)") self.speed = -0.2 self.steeringAngle = -1 * self.steeringAngle self.uTurnStatus = 3 self.uTurnStartingMeter = self.positioning.getActualDistance( ) else: self.speed = 0.2 self.steeringAngle = 1 elif self.uTurnStatus == 2: if self.sensors.frontDistance(950): logger.debug( "U_TURN: First step complete, going back (status 2)") self.speed = -0.2 self.steeringAngle = -1 * self.steeringAngle self.uTurnStatus += 1 self.uTurnStartingMeter = self.positioning.getActualDistance( ) if self.positioning.getOrientation( ) == self.uTurnGoalOrientation: self.uTurnStatus += 2 elif self.uTurnStatus == 3: if self.sensors.backDistance(950): logger.debug( "U_TURN: Obstacle founded behind, going forward") self.speed = 0.2 self.steeringAngle = -1 * self.steeringAngle self.uTurnStatus -= 1 if self.positioning.getOrientation( ) == self.uTurnGoalOrientation: self.uTurnStatus += 1 else: logger.debug("U_TURN: Maneuver complete") logger.debug("U_TURN: Distance: " + str(self.positioning.getActualDistance() - self.uTurnStartingMeter)) distanzaPercorsa = self.positioning.getActualDistance( ) - self.uTurnStartingMeter if distanzaPercorsa >= 0: self.steeringAngle = -0.5 * self.steeringAngle elif abs(distanzaPercorsa) > 0.07: self.steeringAngle = -0.1 * self.steeringAngle else: self.steeringAngle = -0.2 * self.steeringAngle # if Map.getValue(self.positioning.getPosition()) == Map.C: self.steeringAngle *= -5 logger.debug("U_TURN: Distance: " + str(distanzaPercorsa) + ", sterring angle: " + str(self.steeringAngle)) self.speed = 0.5 self.uTurnStatus = UNKNOWN self.uTurnGoalOrientation = UNKNOWN self.uTurnStartingMeter = UNKNOWN self.lineFollower.resetLastLineKnownZone(self.steeringAngle) self.setStatus(SEARCH_LINE) elif self.status == STOP: self.speed = 0.0 logger.info("Destination Reached") # logger.debug("Steerign angle: " + str(self.steeringAngle) + " STATUS: " + str(self.status)) elif self.isGoalReach() and isLineLost and currentPath == UNKNOWN: self.speed = 0.0 elif not isLineLost: self.steeringAngle = self.lineFollower.getNewSteeringAngle() # self.actualTurn += 1 elif isLineLost and currentPath != UNKNOWN: if self.actualTurn < len(currentPath): turn = currentPath[self.actualTurn] self.steeringAngle = 0.5 * turn # what if U_TURN? Return it to motion to make u_turn? else: currentPath = UNKNOWN elif isLineLost and currentPath == UNKNOWN: # self.speed = 0.0 pass
def _main(): args, config = _parse_args() """ ##################################################### Config ##################################################### """ config_engine_url = config.get("Analyze", "engine_url", "") config_mirror_engine_url = config.get("Analyze", "mirror_engine_url", None) config_custom_bts_scraper = config.get("Analyze", "custom_bts_scraper", None) config_threads = config.getint("Analyze", "threads") config_lookup_email = config.getboolean("Analyze", "lookup_email") config_loglevel = config.getloglevel("Analyze", "log_level") config_logpath = config.get("Analyze", "log_path") config_logformat = config.get("Analyze", "log_format") config_transformerdict = config.getdict("Analyze", "code_transformer", ()) config_mirror = args.mirror config_update_repo = args.update_repo config_scrape_bts = args.scrape_bts if args.files: config_debugging_single_file = True else: config_debugging_single_file = False logger.setup_logger(format_=config_logformat, level=config_loglevel, filename=config_logpath) logger.banner("Running " + os.path.basename(__file__)) logger.info("Options:") logger.info(" Database: %s", config_engine_url[: config_engine_url.find(":")]) logger.info(" threads: %s", config_threads) logger.info(" lookup_email: %s", config_lookup_email) if config_mirror and config_mirror_engine_url is not None: # Do not print the engine_url since it may contain user/pass logger.info(" DB mirroring: Yes") logger.info(" scrape bug tracking system: %s", config_scrape_bts) logger.info(" Update repo prior to analysis: %s", config_update_repo) os.nice(10) """ ##################################################### Setup instances ##################################################### """ vcs = VcsWrapper.VcsFactory(config=config, shared_dict_ctor=Manager().dict) maintainerobj = Maintainers.Maintainers(config.get("General", "maintainers")) walker = SrcRootWalker.SrcRootWalker( config.getlist("Analyze", "include_patterns"), config.getlist("Analyze", "exclude_patterns", ()) ) db = MetricsDb.MetricsDb(config_engine_url) pool = Pool(processes=config_threads) last_metric_update = None """ ##################################################### Update repo ##################################################### """ if config_update_repo: logger.info("Updating repo prior to analysis...") vcs.update_repo() logger.info("Updating repo done") start = datetime.now() """ ##################################################### Scrape Bug Tracking System for defects and add to metrics.db ##################################################### """ if config_scrape_bts and config_custom_bts_scraper is not None: _scrape_meta_defects(db, config_custom_bts_scraper) """ ##################################################### Start analysis ##################################################### """ if config_lookup_email: _lookup_emails(db) else: logger.info("Looking up files to analyze...") if config_debugging_single_file: all_files = args.files else: last_metric_update = db.get_eav_value("last_metric_update") if last_metric_update is not None: # subtract an extra day to make sure that we dont miss anything during the overlap of the analysis last_metric_update = last_metric_update - timedelta(days=1) all_files = walker.find_all_files_in_srcroots() logger.info("Found %s files!", len(all_files)) if len(all_files) != 0: _lookup_defect_modifications(pool, walker, vcs, db, all_files, last_metric_update) _lookup_metrics(pool, walker, vcs, db, all_files, last_metric_update, config_transformerdict) if not config_debugging_single_file: logger.info("Doing db cleanup...") _remove_unwanted_files(db, {walker.translate_env(file_): None for file_ in all_files}) _lookup_maintainers(pool, db, maintainerobj) logger.info("Finished analysis phase in %s" % (datetime.now() - start)) if not config_debugging_single_file: db.set_eav_value("last_metric_update", start) """ ##################################################### Mirroring ##################################################### """ if config_mirror and config_mirror_engine_url is not None: logger.info( "Mirroring %s -> %s", config_engine_url[: config_engine_url.find(":")], config_mirror_engine_url[: config_mirror_engine_url.find(":")], ) start = datetime.now() MetricsDb.MetricsDb(config_mirror_engine_url).copy_from_db(db) logger.info("Finished mirroring phase in %s" % (datetime.now() - start))
def _lookup_metrics(pool, walker, vcs, db, files, last_update, config_transformerdict): logger.info("Looking up metrics...") file_map = zip([vcs] * len(files), [last_update] * len(files), files) version_map = [] logger.info(" Looking up all VCS versions...") iter_ = FancyBar(max=len(file_map)).iter(pool.imap_unordered(worker(_get_lsv), file_map)) for file_, contributions in iter_: for contrib_index, contrib in enumerate(contributions): prev_version = contributions[contrib_index - 1]["version"] if contrib_index > 0 else None filext = os.path.splitext(file_)[1] version_map.append((file_, contrib, prev_version, vcs, config_transformerdict.get(filext, None))) logger.info(" Looking up all metrics...") with db.get_session() as session: iter_ = FancyBar(max=len(version_map)).iter(pool.imap_unordered(worker(_get_metrics), version_map)) for file_, contrib in iter_: """ Results are coming in, insert results into database We avoid doing this inside the process pool due to that sqlite is not threadsafe """ version = contrib["version"] date_ = contrib["datetime"] user = _translate_username(contrib) translated_file = walker.translate_env(file_) complexity = contrib["complexity"] changerates = contrib["changerates"] for function, (added, changed, deleted, nloc) in changerates.iteritems(): if function not in complexity: if file_.endswith((".c", ".cc", ".cpp", ".h", ".hpp")): logger.debug("%s Could not find complexity function %s", file_, function) logger.debug("%s Available functions:", os.path.basename(file_)) for func in complexity.iterkeys(): logger.debug("%s", func) cyclomatic_complexity = None tokens = None parameter_count = None else: cyclomatic_complexity, tokens, parameter_count, max_nd, fin, fout = complexity[function] db.insert_change_metric( session, file_=translated_file, version=version, function=function, date_=date_, user=user, added=added, changed=changed, deleted=deleted, nloc=nloc, cyclomatic_complexity=cyclomatic_complexity, token_count=tokens, parameter_count=parameter_count, max_nesting_depth=max_nd, fan_in=fin, fan_out=fout, ) session.commit()