def List(self): # 验证 token = self.Post('token') msg = AdminToken.Verify(token, request.path) if msg != '': return self.GetJSON({'code': 4001, 'msg': msg}) # 参数 data = self.Post('data') page = self.Post('page') limit = self.Post('limit') if not data or not page or not limit: return self.GetJSON({'code': 4000, 'msg': '参数错误!'}) param = Util.JsonDecode(data) name = Util.Trim(param['name']) if 'name' in param.keys() else '' # 统计 m = SysRoleM() m.Columns('count(*) AS num') m.Where('name LIKE %s', '%' + name + '%') total = m.FindFirst() # 查询 m.Columns('id', 'name', 'FROM_UNIXTIME(ctime, %s) as ctime', 'FROM_UNIXTIME(utime, %s) as utime', 'perm') m.Where('name LIKE %s', '%Y-%m-%d %H:%i:%s', '%Y-%m-%d %H:%i:%s', '%' + name + '%') m.Page(int(page), int(limit)) list = m.Find() # 返回 return self.GetJSON({ 'code': 0, 'msg': '成功', 'list': list, 'total': total['num'] })
class TencentExtractor(object): def __init__(self): self.util = Util() self.writed_content = list() def extractor_json(self, url, html): """ json解析之后,将需要的数据写入文件,并将下一页的标签返回 游戏名, 包名, 类型, 公司名, orgame, apkCode, appId, 媒体, 关键词 """ try: orgame = '-' apkCode = '-' appId = '-' media = url['media'] html_json = json.loads(html, strict=False) obj = html_json['obj'] if obj is not None: apps = obj['appDetails'] for app in apps: app_name = app.get('appName', '-') apk_name = app.get('pkgName', '-') com_name = app.get('authorName', '-') apk_type = app.get('categoryName', '-') content = ','.join([app_name, apk_name, com_name, apk_type, orgame, apkCode, appId, media, url['keyword']]) self.util.write_data(content) pageNumberStack = obj['pageNumberStack'] return pageNumberStack except: traceback.print_exc() print('extractor_json error')
def _send_notice(self, requester_entity_id, dataset_id, expiry_date): requester_email = self._vault_client.read_entity_by_id( requester_entity_id) subject = "Vault - Access to sensitive data's key expires soon" body_html = """\ <html> <body> <p> Attention: </p> <p> Please note that your access to the key of an encrypted dataset with \ the metatdata frdr.vault.dataset_uuid {dataset_id} will expire on {expiry_date}. \ Please ensure you have downloaded the dataset from FRDR and decrypted it prior \ to {expiry_date}. After {expiry_date} you will no longer be able to \ access the key. </p> <p> If you encounter any problems or have other questions, please contact us at \ [email protected]. Best of luck with the data! </p> <br/><br/> FRDR Support [email protected] </body> </html> """.format(dataset_id=dataset_id, expiry_date=expiry_date) Util.send_email(requester_email, subject, body_html)
def enable_tasks(self, config, kapacitor_started, host_name, dev_mode): """Starting the task based on the config read from the etcd """ for task in config['task']: if 'tick_script' in task: tick_script = task['tick_script'] else: error_msg = ("tick_script key is missing in config " "Please provide the tick script to run " "EXITING!!!!") return error_msg, FAILURE if 'task_name' in task: task_name = task['task_name'] else: error_msg = ("task_name key is missing in config " "Please provide the task name " "EXITING!!!") return error_msg, FAILURE if kapacitor_started: self.logger.info("Enabling {0}".format(tick_script)) self.enable_classifier_task(host_name, tick_script, task_name) if not dev_mode: try: file_list = [KAPACITOR_CERT, KAPACITOR_KEY] Util.delete_certs(file_list) except (OSError, IOError): self.logger.error("Exception Occured while removing" "kapacitor certs") while True: time.sleep(10)
def Base64(params={}): # 参数 param = Util.ArrayMerge( { 'path': 'upload/', #上传目录 'base64': '', #文件内容 'filename': '', #文件名 'ext': 'png', #后缀 }, params) # 内容 base64 = param['base64'] # 否有类型 ct = Util.Explode(',', param['base64']) if len(ct) > 1: param['ext'] = Base64.GetExt(ct[0]) base64 = ct[1] # 创建目录 FileEo.Root = Env.root_dir if not FileEo.Mkdir(param['path']): print('[Upload] Mkdir:', '创建目录失败!') return '' # 文件名 filename = Upload.GetFileName( ) + '.' + param['ext'] if not param['filename'] else param['filename'] if not FileEo.Writer(param['path'] + filename, Base64.Decode(base64)): print('[Upload] Writer:', '保存文件失败!') return '' return filename
def corpus_to_feature_and_label_mat(self, corpus_path, result_path): if Util.is_file(result_path): Util.log_tool.log.debug("loading data") return Util.get_libsvm_data(result_path) data = codecs.open(corpus_path, 'rb', FilePathConfig.file_encodeing, 'ignore') sparse_mat = codecs.open(result_path, 'wb', FilePathConfig.file_encodeing, 'ignore') count = 0 for line in data: count += 1 if count % 10000 == 0: Util.log_tool.log.debug("add" + str(count)) document = Document(line) label_id = self.category_dic[document.label] content_words = document.get_filtered_content_words_feature() doc_len = len(content_words) words = self.lexicon.convert_document(content_words) terms = self.test_vector_builder.build(words, True, doc_len) sparse_mat.write(str(label_id)) # 将id_weight对按照id大小,从小到大排列 terms.sort(cmp=lambda x, y: cmp(x.term_id, y.term_id)) for term in terms: sparse_mat.write(" " + str(term.term_id) + ":" + str(term.weight)) sparse_mat.write("\n") data.close() sparse_mat.close() return Util.get_libsvm_data(result_path)
def __init__(self): self.util = Util() super(MiSpider, self).__init__() self.extractor = MiExtractor() self.keywords = self.util.get_keywords() self.phone_url = 'http://app.mi.com/searchAll?keywords={}&typeall=phone&page={}' self.pad_url = 'http://app.mi.com/searchAll?keywords={}&typeall=pad&page={}'
def __permArr(self, perm: str): permAll = {} arr = [] if not perm else Util.Explode(' ', perm) for val in arr: s = Util.Explode(':', val) permAll[s[0]] = int(s[1]) return permAll
def __init__(self, tokenfile): self._tokenfile = tokenfile Util.get_logger("frdr-crypto", log_level="info", filepath=os.path.join(dirs.user_data_dir, "frdr-crypto_log.txt")) self._logger = logging.getLogger("frdr-crypto.gui")
def save_report(self): time = datetime.now().strftime("-%Y-%m-%d-%H-%M") label = time + '-' + ClassifierConfig.cur_single_model Util.save_object_into_pkl( self, str(FilePathConfig.result_report_path) % label)
def __init__(self): super(DCWrapper, self).__init__() self.util = Util() self.store = Store() self.sleep_time = 5 # 没有任务休眠时间 self.sending_queue_max = 8000 # sending_queue 最大值 self.send_url_to_sended_queue()
def __init__(self, min_instances, max_instances, warmuptime, prefix, healthEndpoint): self.prefix = prefix self.min_instances = min_instances self.max_instances = max_instances self.desired_instances = min_instances self.warmuptime = warmuptime self.list_instances = [] self.stopped_instances = [] self.random = str(random.randint(1, 9999)) self.security_group, self.security_group_id = self.create_security_group( ) self._is_scaling = False self._lock = threading.Lock() self.instance_management_thread = threading.Thread( target=self.instance_management) self.watch_usage_thread = threading.Thread(target=self.watch_usage) self.healthEndpoint = healthEndpoint self.CpuUtilizeMaxMetric = CpuUtilizeMetric(60, 5, "ScaleOut") self.CpuUtilizeMinMetric = CpuUtilizeMetric(30, 10, "ScaleIn") self.config = Util().get_config() self.AcceptableScaleOutBacklogMetric = AcceptableBacklogMetric( 40, 40, self.config.get('dev', 'REQUESTS_SQS'), "ScaleOut") self.AcceptableScaleInBacklogMetric = AcceptableBacklogMetric( 40, 40, self.config.get('dev', 'REQUESTS_SQS'), "ScaleIn") self.ScaleInMetric = [self.AcceptableScaleInBacklogMetric] self.ScaleOutMetric = [self.AcceptableScaleOutBacklogMetric] self.__init()
def initConfig(self): config = self.config idType = self.__getInput("What do you want to integrate (ORGANIZATION/PROJECT)", ["ORGANIZATION", "PROJECT"]) config.setIdType(idType) id = self.__getInput("Enter your " + idType + " Id ", None) config.setId(id) util = Util(config) if idType == "ORGANIZATION": self.printProjectList(util.getProjectList(), "\nProjects") serviceAccountProjectId = self.__getInput("Enter the projectId where you want to create the Service Account", None) config.setServiceAccountProjectId(serviceAccountProjectId) else: config.setServiceAccountProjectId(id) self.printAPI("\nAPIs to be enabled") self.printProjectList(util.getProjectList(), "\nProjects") enableApi = self.getYesNoPrompt("Do You want to enable APIs in the projects(yes/no)") config.setEnableApi(enableApi) self.printRole("\nRoles Required") modifyIamPolicy = self.getYesNoPrompt("Do you want to modify "+ config.getIdType() + " IAM Policy(yes/no)") config.setSetIAMPolicy(modifyIamPolicy); self.review(util) flag = self.getYesNoPrompt("\nDo You Want to continue(yes/no)") if not flag: exit(1)
def run(self): self.util = Util(self.config) util = self.util appManager = AppManager(self.config, util) api_success_list, api_error_list, service_account, key, setIamPolicyStatus = appManager.run() if api_success_list and len(api_success_list) != 0: self.printProjectList(api_success_list, "Successfully Enabled APIs in following projects") if api_error_list and len(api_error_list) != 0: self.printProjectErrorList(api_error_list, "Error Enabling APIs in following projects") if self.config.getSetIAMPolicy(): self.printIamPolicyStatus(setIamPolicyStatus, "IAM Policy Set Status") self.printInterationData(service_account, key) if key: try: path = os.getcwd() +"/credentials.txt" self.writeToFile(service_account, key, path) logging.info("Copy Of Credentials written to file: " + path) except: logging.exception("Could not write data to file ")
def create_app(): app = Flask(__name__) with app.app_context(): config = Util().get_config() request_sqs = config.get('dev', 'REQUESTS_SQS') response_sqs = config.get('dev', 'RESPONSE_SQS') sqsworker = SQSWorker(60, request_sqs, response_sqs, 360, 1) thread = threading.Thread(target=sqsworker.listener) sqsworker.recurrentDeleteMessageJob.start() thread.start() @app.route('/health', methods=['GET']) def get_health(): result = {'Response': 'Done'} return make_response(jsonify(result), 200) @app.route('/averageResponseTime', methods=['GET']) def get_averageResponseTime(): result = {'Response': 'Done'} return make_response(jsonify(result), 200) @app.route('/stop', methods=['GET']) def stop_listner(): sqsworker.stopListner = True result = {'Response': 'Done'} return make_response(jsonify(result), 200) return app
def load(self, meterReadingFrequency: Frequency, lastNRows: int, lastNDays: int) -> Entity: client = Client(self.__pygazparOptions.username, self.__pygazparOptions.password, self.__pygazparOptions.webdriver, self.__pygazparOptions.wait_time, self.__pygazparOptions.tmpdir, lastNRows, self.__pygazparOptions.headlessMode, meterReadingFrequency, lastNDays, self.__pygazparOptions.testMode) client.update() history = client.data() if len(history) > 0: entityIdByFrequency = { Frequency.HOURLY: "sensor.gazpar_hourly_energy", Frequency.DAILY: "sensor.gazpar_daily_energy", Frequency.WEEKLY: "sensor.gazpar_weekly_energy", Frequency.MONTHLY: "sensor.gazpar_monthly_energy" } entity = Entity("sensor", entityIdByFrequency[meterReadingFrequency]) for i in range(len(history)): timePeriod = TimePeriod.parse(history[i][PropertyName.TIME_PERIOD.value], meterReadingFrequency) recordTime = timePeriod.endTime state = str(Util.toState(history[0:i + 1])) attributes = json.dumps(Util.toAttributes(self.__pygazparOptions.username, meterReadingFrequency, history[0:i + 1])) entity.addRecord(self.__context_id, recordTime, state, attributes) return entity else: return None
def load_model(self): if not Util.is_file(self.model_path): Util.log_tool.log.error("model not exist") Util.quit() else: Util.log_tool.log.debug("loading model") print self.model_path self.model = joblib.load(self.model_path)
def corpus_to_feature_mat_from_file(self, corpus_path): data = codecs.open(corpus_path, 'rb', FilePathConfig.file_encodeing, 'ignore') sparse_mat = self.data_to_feature(data) Util.save_svmlight_file(sparse_mat, np.zeros(sparse_mat.shape[0]), FilePathConfig.raw_feature_path) data.close() return sparse_mat
def __init__(self): util = Util() self.cookies = util.get_cookies() self.headers = util.get_headers() self.proxies = util.get_proxies() print(self.headers) print(self.cookies)
def __init__(self): """ Initialization of result file Creates instance according to conf file """ Util.reset_file() self.selected_algorithm = Scheduling(config['Processes'], config['Scheduling']['Deadline'])
def execute_process(self, idx): """ Executing process ... :param idx: :return: """ if idx != -1: self.processes[idx].working = True self.processes[idx].working_time += 1 Util.write('{0},{1},{2}\n'.format(self.processes[idx].name, self.time, (self.time + 1)))
def classify_documents_top_k_from_file(self, raw_documents_file_path, k): if Util.is_file(FilePathConfig.raw_feature_path): Util.log_tool.log.debug("load raw mat") feature_mat, label_vec = Util.get_libsvm_data( FilePathConfig.raw_feature_path) else: feature_mat = self.corpus_to_feature_mat_from_file( raw_documents_file_path) classify_results = self.classify_documents_top_k(feature_mat, k) return classify_results
def testStaticFile(self): global global_headers fn = Util.getTempFile("test.png", "pyunit") Util.writeFile(fn, "SimpleTest") docRoot = os.path.dirname(os.path.dirname(fn)) relPath = fn[len(docRoot):] rc = self._app.handleStaticFiles(relPath, docRoot, dummyStartResponse) self.assertEqual(("Content-Type", "image/png"), global_headers) self.assertEqual(200, global_status) self.assertEqual("Simpl", rc[0]) self.assertEqual("eTest", rc[1])
def validate(self): self.util = Util(self.config) config = self.config if config.getIdType() == "ORGANIZATION" and not config.getId().isdigit(): raise Exception("Invalid org id") # Validate service account project id for project in self.util.getProjectList(): if project['projectId'] == config.getServiceAccountProjectId(): return True raise Exception("Project Id is not valid: " + config.getServiceAccountProjectId())
def main3(): main_classifier = MainClassifier() for cur_classiier in ClassifierConfig.train_data_claasifiers: main_classifier.abstract_classifier.model = None ClassifierConfig.cur_single_model = cur_classiier main_classifier.set_model() Util.log_tool.log.debug(ClassifierConfig.cur_single_model) results = main_classifier.classify_documents_from_file( FilePathConfig.raw_news_path) Util.save_object_into_pkl( results, FilePathConfig.file_root_path + ClassifierConfig.cur_single_model + "-raw_results.pkl")
def PostJson(url: str, data: dict, header: dict = {}): # 请求头 param = Util.ArrayMerge( { 'Content-Type': 'application/json; charset=utf-8', #JSON方式 }, header) # 数据 json = Util.JsonEncode(data) if len(data) > 0 else '{}' # 发送 res = requests.post(url, data=json, headers=param).text return Util.JsonDecode(res) if len(res) > 0 else None
def Edit(self): # 验证 token = self.Post('token') msg = AdminToken.Verify(token, request.path) if msg != '': return self.GetJSON({'code': 4001, 'msg': msg}) tData = AdminToken.Token(token) # 参数 data = self.Post('data') if not data: return self.GetJSON({'code': 4000, 'msg': '参数错误!'}) param = Util.JsonDecode(data) # 数据 model = UserInfoM() info = { 'nickname': Util.Trim(param['nickname']), 'name': Util.Trim(param['name']), 'gender': Util.Trim(param['gender']), 'birthday': Util.Strtotime(Util.Trim(param['birthday']), '%Y-%m-%d'), 'position': Util.Trim(param['position']), } model.Set(info) model.Where('uid=%s', tData['uid']) model.Update() # 返回 info['uname'] = tData['uname'] info['img'] = param['img'] info['birthday'] = Util.Date('%Y-%m-%d', info['birthday']) return self.GetJSON({'code': 0, 'msg': '成功', 'uinfo': info})
def testGetBodyOfStatic(self): session = self.getSession() fn = Util.getTempFile('test_html.htm', 'sidu-help') Util.writeFile( fn, ''' <html> <body> <div id="main-page"> <p>Text</p> </div> </body> </html> ''') self.assertEquals("<p>Text</p>\n", session.getBodyOfStatic(fn))
def VerifyUserSig(userId: int, userSig: str): # 解码 base64 = Base64.UrlDecode(userSig) # 解压 un_sig = Base64.UnCompress(base64) data = Util.JsonDecode(bytes.decode(un_sig)) # 配置 cfg = Tencent.TRTC() if str(cfg['SDKAppID']) != data['TLS.sdkappid']: return 0 if str(userId) != data['TLS.identifier']: return 0 # 是否过期 now_time = Util.Time() out_time = int(data['TLS.time']) + int(data['TLS.expire']) if now_time > out_time: return 0 return out_time - now_time
def Perm(token: str): permAll = {} # Token tData = Safety.Decode(token) if not tData: return permAll # 权限 redis = Redis() permStr = redis.Get(Env.api_token_prefix + '_perm_' + tData['uid']) redis.Close() # 拆分 arr = [] if not permStr else Util.Explode(' ', permStr) for val in arr: s = Util.Explode(':', val) permAll[s[0]] = int(s[1]) return permAll
def __init__(self): """ Initializer """ self.util = Util() self.config = self.util.config self.use_web = self.config[USAGE][USE_WEB] if self.use_web: f = open(os.devnull, "w") sys.stdout = sys.stderr = f from web.server.webserver import WebServer self.web_server = WebServer(self.util, self) about = AboutScreen(self.util) about.add_listener(self.go_home) self.screens = {"about": about} self.start_audio() self.screensaver_dispatcher = ScreensaverDispatcher(self.util) if self.use_web: self.web_server.add_screensaver_web_listener(self.screensaver_dispatcher) self.event_dispatcher = EventDispatcher(self.screensaver_dispatcher, self.util) self.current_screen = None self.go_stations()
def collect(self): begin_date = self.__get_begin_date() end_date = Util.get_today() _logger.info('collect stock(%s) history data, begin date: %r, end date: %r.' % (self.__stock_code, begin_date, end_date)) market = ts.Market() if begin_date == end_date: return elif not begin_date or len(begin_date) == 0: result = market.MktEqud(ticker=self.__stock_code, field=self.FIELDS) else: result = market.MktEqud(ticker=self.__stock_code, beginDate=begin_date, endDate=end_date, field=self.FIELDS) if result is None: _logger.warn('could get stock(%r) history data from tushare.' % self.__stock_code) return if begin_date: self.__get_history_close_price(begin_date) for i in range(len(result)): record = result.iloc[i].to_dict() if record['isOpen'] == 1: fq_factor = record['accumAdjFactor'] record['fqPrice'] = record['closePrice'] * fq_factor self.__hist_close_price.append(record['fqPrice']) record['ma5'] = self.__get_ma5_price() record['ma10'] = self.__get_ma10_price() record['ma20'] = self.__get_ma20_price() self.__collection.insert_and_update('date', record['tradeDate'], **record)
def get_general_meta_data(self): general = dict() general['creation_time'] = UTC_NONE_TIME if (self.mvhd is None) else \ self.mvhd.creation_time_fmt general['modify_time'] = UTC_NONE_TIME if (self.mvhd is None) else \ self.mvhd.modification_time_fmt duration_ = self.duration() general['duration'] = Util.time_format(duration_) general['bitrate'] = str(self.bitrate()) return general
def collect_history_data(self, stock_id): history_datas = {} history_datas['code'] = stock_id url = self.__get_history_url(stock_id) content = Util.http_get_content(url=url) self.__parse_data(content, history_datas) data = list((k, v) for (k, v) in history_datas.iteritems() if k.startswith('2015')) data.sort(data_cmp) return data
def get_soun_meta_data(self): sound = dict() trk = self.get_track(SounTrackType) if trk is None: return sound sound['ID'] = str(trk.track_id()) duration_ = self.track_duration(SounTrackType) sound['duration'] = Util.time_format(duration_) sound['bitrate'] = str(self.track_bitrate(SounTrackType)) sound['fps'] = str(self.fps()) sound['create_time'] = trk.create_time() sound['modify_time'] = trk.modify_time() return sound
def get_vide_meta_data(self): video = dict() trk = self.get_track(VideTrackType) if trk is None: return video video['ID'] = str(trk.track_id()) duration_ = self.track_duration(VideTrackType) video['duration'] = Util.time_format(duration_) video['bitrate'] = str(self.track_bitrate(VideTrackType)) video['width'] = str(trk.width()) video['height'] = str(trk.height()) video['fps'] = str(self.fps()) video['create_time'] = trk.create_time() video['modify_time'] = trk.modify_time() return video
def require_level_info(self, cmd_params): level = int(cmd_params['level']['value']) level_info = Util.get_game_level_info() keys = level_info.keys() exp = conf.INFINITY_EXP error = False if 'max_level' in keys and level > level_info['max_level']: error = True if 'levels' in keys: experience = level_info['levels'][level-1] reply_info = [('error', error, TYPE.BOOLEAN), ('experience', experience, TYPE.INT)] return reply_info
def getDTObject(self): if(not hasattr(self, 'dtObj')): dtObjName = Util.getDTObjName(self.colType) try: # import the module module = __import__(self.__DTPath + self.colType,fromlist=[dtObjName]) dtObj = getattr(module, dtObjName) except: raise Exception("module {0} not implemented yet".format(dtObjName)) try: self.dtObj = dtObj(self.details) except: print("Exception in DataType initialization") return self.dtObj
def require_user_info(self, cmd_params): username = cmd_params['username']['value'] password = cmd_params['password']['value'] user_info = Util.get_game_user_info(username, password) error = False if user_info is None: error = True print 'require_user_info:', user_info level = 1 if error else user_info['level'] health = 0 if error else user_info['health'] experience = 0 if error else user_info['experience'] reply_info = [('error', error, TYPE.BOOLEAN)] reply_info.append(('level', level, TYPE.INT)) reply_info.append(('health', health, TYPE.INT)) reply_info.append(('experience', experience, TYPE.INT)) return reply_info
def collect_history_data(self): begin_date = None last_record = self.__collection.find_one('date') if last_record: begin_date = last_record['date'] _logger.info('collect stock(%s) history data, begin date: %r.' % (self.__stock_code, begin_date)) end_date = Util.get_today() _logger.info('collect stock(%s) history data, end date: %r.' % (self.__stock_code, end_date)) if begin_date == end_date: return elif not begin_date or len(begin_date) == 0: result = ts.get_hist_data(self.__stock_code) else: result = ts.get_hist_data(code=self.__stock_code, start=begin_date, end=end_date) if result is None: _logger.warn('could get stock(%r) history data from tushare.' % self.__stock_code) return datas = result.to_dict() for attr, data, in datas.iteritems(): for date, value in data.iteritems(): self.__collection.insert_and_update(date, attr, value)
def __str__(self): logstr = "\t%s\n\tcreation_time = %s(%08ld)\n\tmodification_time = %s(%08ld)" % \ (FullBox.__str__(self), self.creation_time_fmt, self.creation_time, self.modification_time_fmt, self.modification_time) logstr += "\n\ttimescale = %08ld(0x%016lx)\n\tduration = %s(0x%08ld)\n\trate = %s" % \ (self.timescale, self.timescale, Util.time_format(self.movie_duration()), self.movie_duration(), self.rate_fmt) logstr += "\n\tvolume = %s\n\treserved = %08ld(0x%016lx)\n\treserved1 = [ " % \ (self.volume, self.reserved, self.reserved) for r in self.reserved1: logstr += "%08ld(0x%016lx), " % (r, r) logstr += "]\n\tmatrix = [" for i in range(len(self.matrix)): if (0 == i) or (0 == i % 3): logstr += "\n\t\t" logstr += "%016ld(0x%016lx) " % (self.matrix[i], self.matrix[i]) logstr += "\n\t]\n\tpre_defined = [" for j in range(len(self.pre_defined)): if (0 == j) or (0 == j % 3): logstr += "\n\t\t" logstr += "%08ld(0x%016lx) " % (self.pre_defined[j], self.pre_defined[j]) logstr += "\n\t]\n\tnext_track_ID = %08ld(0x%016lx)\n" % ( self.next_track_ID, self.next_track_ID) return logstr
msg = self.spawn_enemy(cmd_params) return msg def handle(self): print 'handle......' data = self.request.recv(4096) print 'DATA RECIVED:', data cmd = json.loads(data) print 'CMD RECIVED:', cmd reply_info = self.cmd_dispatch(cmd) msg = self.pack_message(reply_info) print 'REPLY MESSAGE:', msg self.request.sendall(msg) class GameServer(): def __init__(self, host, port): self.server = SocketServer.TCPServer((host, port), GameServerHandler) def run(self): self.server.serve_forever() if __name__ == '__main__': # dbconn = DBConnection(conf.DB_USER) # dbconn.clear() Util.init_game_level() server = GameServer(conf.SERVER_IP, conf.SERVER_PORT) server.run()
# Read the data from the import csv from util.util import Util from util.aggregate import Aggregate from util.plot import Plot from learning.echo_state_network import EchoStateNetwork from learning.regression import Regression from dateutil import parser granularity = 60*6 u = Util() print 'reading the file....', dataset = u.read_file('../data/LKW2015_patient_1.csv', ',') #dataset = u.read_file('../data/test.csv', ',') print 'done.' ag = Aggregate() print 'aggregating the data' print 'aggregating time...', result = ag.aggregate_dataset(dataset, granularity) print 'done.' print 'imputing nan and normalizing set...', result_nan = ag.impute_nan(result) result_norm = ag.normalize_data(result_nan) print 'done.' print 'constructing training and test set...', #input_attributes, input_training, input_test, output_attributes, output_training, output_test = ag.identify_rnn_dataset(result_norm, 'AS14.01', 0.5, ['mood'], ['circumplex.arousal', 'circumplex.valence']) input_attributes, input_training, input_test, output_attributes, output_training, output_test = ag.identify_regression_dataset(result_norm, 'AS14.01', 0.5, ['mood'], ['circumplex.arousal', 'circumplex.valence']) print ' done.'
def decode(self, file_strm): if file_strm is None: print "file_strm is None" return file_strm file_strm = FullBox.decode(self, file_strm) if self.version == 1: self.creation_time = file_strm.read_uint64() self.creation_time -= UTC_MP4_INTERVAL self.offset += UInt64ByteLen if self.creation_time > 0: self.creation_time_fmt = Util.datetime_format( self.creation_time) self.modification_time = file_strm.read_uint64() self.modification_time -= UTC_MP4_INTERVAL self.offset += UInt64ByteLen if self.modification_time > 0: self.modification_time_fmt = Util.datetime_format( self.modification_time) self.timescale = file_strm.read_uint32() self.offset += UInt32ByteLen self.duration = file_strm.read_uint64() self.offset += UInt64ByteLen else: self.creation_time = file_strm.read_uint32() self.creation_time -= UTC_MP4_INTERVAL self.offset += UInt32ByteLen if self.creation_time > 0: self.creation_time_fmt = Util.datetime_format( self.creation_time) self.modification_time = file_strm.read_uint32() self.modification_time -= UTC_MP4_INTERVAL self.offset += UInt32ByteLen if self.modification_time > 0: self.modification_time_fmt = Util.datetime_format( self.modification_time) self.timescale = file_strm.read_uint32() self.offset += UInt32ByteLen self.duration = file_strm.read_uint32() self.offset += UInt32ByteLen self.rate = file_strm.read_uint32() self.offset += UInt32ByteLen self.rate_fmt = "%d.%d" % (self.rate >> 16, self.rate & 0x0000FFFF) self.volume = file_strm.read_uint16() self.offset += UInt16ByteLen self.volume_fmt = "%d.%d" % (self.volume >> 8, self.volume & 0x00FF) self.reserved = file_strm.read_uint16() self.offset += UInt16ByteLen for idx in range(len(self.reserved1)): reserved1_ = file_strm.read_uint32() self.offset += UInt32ByteLen self.reserved1[idx] = reserved1_ for idx in range(len(self.matrix)): matrix_ = file_strm.read_uint32() self.offset += UInt32ByteLen self.matrix[idx] = matrix_ for idx in range(len(self.pre_defined)): pre_defined_ = file_strm.read_uint32() self.offset += UInt32ByteLen self.pre_defined[idx] = pre_defined_ self.next_track_ID = file_strm.read_uint32() self.offset += UInt32ByteLen tmp_size = self.offset - self.box_offset if tmp_size != self.Size(): file_strm.seek(self.Size() - tmp_size, os.SEEK_CUR) return file_strm
def decode(self, file_strm): if file_strm is None: print "file_strm is None" return file_strm file_strm = FullBox.decode(self, file_strm) if self.version == 1: self.creation_time = file_strm.ReadUInt64() self.creation_time -= UTC_MP4_INTERVAL self.offset += UInt64ByteLen if self.creation_time > 0: self.creation_time_fmt = Util.datetime_format( self.creation_time) self.modification_time = file_strm.ReadUInt64() self.modification_time -= UTC_MP4_INTERVAL self.offset += UInt64ByteLen if self.modification_time > 0: self.modification_time_fmt = Util.datetime_format( self.modification_time) self.timescale = file_strm.read_uint32() self.offset += UInt32ByteLen self.duration = file_strm.ReadUInt64() self.offset += UInt64ByteLen else: self.creation_time = file_strm.read_uint32() self.creation_time -= UTC_MP4_INTERVAL self.offset += UInt32ByteLen if self.creation_time > 0: self.creation_time_fmt = Util.datetime_format( self.creation_time) self.modification_time = file_strm.read_uint32() self.modification_time -= UTC_MP4_INTERVAL self.offset += UInt32ByteLen if self.modification_time > 0: self.modification_time_fmt = Util.datetime_format( self.modification_time) self.timescale = file_strm.read_uint32() self.offset += UInt32ByteLen self.duration = file_strm.read_uint32() self.offset += UInt32ByteLen """ ISO Language Codes Because the language codes specified by ISO 639-2/T are three characters long, they must be packed to fit into a 16-bit field. The packing algorithm must map each of the three characters, which are always lowercase, into a 5-bit integer and then concatenate these integers into the least significant 15 bits of a 16-bit integer, leaving the 16-bit integer’s most significant bit set to zero. One algorithm for performing this packing is to treat each ISO character as a 16-bit integer. Subtract 0x60 from the first character and multiply by 2^10 (0x400), subtract 0x60 from the second character and multiply by 2^5 (0x20), subtract 0x60 from the third character, and add the three 16-bit values. This will result in a single 16-bit value with the three codes correctly packed into the 15 least significant bits and the most significant bit set to zero. Example: The ISO language code 'jpn' consists of the three hexadecimal values 0x6A, 0x70, 0x6E. Subtracting 0x60 from each value yields the values 0xA, 0x10, 0xE, as shown in Table 5-2. Table 5-2 5-bit values of UTF-8 characters Character UTF-8 code 5-bit value Shifted value j 0x6A 0xA (01010) 0x2800 (01010..........) p 0x70 0x10 (10000) 0x200 (.....10000.....) n 0x6E 0xE (01110) 0xE (..........01110) The first value is shifted 10 bits to the left (multiplied by 0x400) and the second value is shifted 5 bits to the left (multiplied by 0x20). This yields the values 0x2800, 0x200, 0xE. When added, this results in the 16-bit packed language code value of 0x2A0E """ self.language_code = file_strm.read_uint16() self.offset += UInt16ByteLen self.pad = self.language_code >> 15 & 0x01 for i in range(len(self.language)): lang_ = ((self.language_code >> ((2 - i) * 5)) & 0x1F) + 0x60 self.language[i] = chr(lang_) self.pre_defined = file_strm.read_uint16() self.offset += UInt16ByteLen tmp_size = self.offset - self.box_offset if tmp_size != self.Size(): file_strm.seek(self.Size() - tmp_size, os.SEEK_CUR) return file_strm
class Peppy(object): """ Main class """ lock = threading.RLock() def __init__(self): """ Initializer """ self.util = Util() self.config = self.util.config self.use_web = self.config[USAGE][USE_WEB] if self.use_web: f = open(os.devnull, "w") sys.stdout = sys.stderr = f from web.server.webserver import WebServer self.web_server = WebServer(self.util, self) about = AboutScreen(self.util) about.add_listener(self.go_home) self.screens = {"about": about} self.start_audio() self.screensaver_dispatcher = ScreensaverDispatcher(self.util) if self.use_web: self.web_server.add_screensaver_web_listener(self.screensaver_dispatcher) self.event_dispatcher = EventDispatcher(self.screensaver_dispatcher, self.util) self.current_screen = None self.go_stations() def start_audio(self): """ Starts audio server and client """ folder = self.config[AUDIO][SERVER_FOLDER] cmd = self.config[AUDIO][SERVER_COMMAND] client_name = self.config[AUDIO][CLIENT_NAME] linux = self.config[LINUX_PLATFORM] if folder != None and cmd != None: proxy = Proxy(linux, folder, cmd, self.config[CURRENT][VOLUME]) self.audio_server = proxy.start() p = "player.client." + client_name m = importlib.import_module(p) n = client_name.title() self.player = getattr(m, n)() self.player.set_platform(linux) self.player.set_proxy(self.audio_server) self.player.start_client() def set_current_screen_visible(self, flag): """ Set current screen visibility flag :param flag: visibility flag """ with self.lock: cs = self.current_screen if cs and self.screens and self.screens[cs]: self.screens[cs].set_visible(flag) def set_mode(self, state): """ Set current mode (e.g. Radio, Language etc) :param state: button state """ if state.name == "radio": self.go_stations(state) elif state.name == "music": self.go_hard_drive(state) elif state.name == "language": self.go_language(state) elif state.name == "stream": self.go_stream(state) elif state.name == "screensaver": self.go_savers(state) elif state.name == "about": self.go_about(state) def go_home(self, state): """ Go to the Home Screen :param state: button state """ self.set_current_screen_visible(False) try: if self.screens and self.screens["home"]: self.set_current_screen("home") return except KeyError: pass home_screen = HomeScreen(self.util, self.set_mode) self.screens["home"] = home_screen self.set_current_screen("home") if self.use_web: self.web_server.add_home_screen_web_listeners(home_screen) def go_language(self, state): """ Go to the Language Screen :param state: button state """ self.set_current_screen_visible(False) try: if self.screens["language"]: self.set_current_screen("language") return except KeyError: pass language_screen = LanguageScreen(self.util, self.change_language) self.screens["language"] = language_screen self.set_current_screen("language") if self.use_web: self.web_server.add_language_screen_web_listeners(language_screen) def change_language(self, state): """ Change current language and go to the Home Screen :param state: button state """ if state.name != self.config[CURRENT][LANGUAGE]: self.config[LABELS].clear() try: stations = self.screens["stations"] if stations: self.player.remove_player_listener(stations.screen_title.set_text) except KeyError: pass self.config[CURRENT][LANGUAGE] = state.name self.config[LABELS] = self.util.get_labels() self.screens = {k: v for k, v in self.screens.items() if k == "about"} self.current_screen = None self.go_home(state) def go_hard_drive(self, state): """ Go to the Hard Drive Screen :param state: button state """ pass def go_stream(self, state): """ Go to the Stream Screen :param state: button state """ pass def go_savers(self, state): """ Go to the Screensavers Screen :param state: button state """ self.set_current_screen_visible(False) try: if self.screens["saver"]: self.set_current_screen("saver") return except KeyError: pass saver_screen = SaverScreen(self.util, self.go_home) saver_screen.saver_menu.add_listener(self.screensaver_dispatcher.change_saver_type) saver_screen.delay_menu.add_listener(self.screensaver_dispatcher.change_saver_delay) self.screens["saver"] = saver_screen self.set_current_screen("saver") if self.use_web: self.web_server.add_saver_screen_web_listeners(saver_screen) def go_about(self, state): """ Go to the About Screen :param state: button state """ self.set_current_screen_visible(False) self.set_current_screen("about") if self.use_web: self.web_server.add_about_screen_web_listeners(self.screens["about"]) def go_stations(self, state=None): """ Go to the Stations Screen :param state: button state """ self.set_current_screen_visible(False) try: if self.screens["stations"]: self.set_current_screen("stations") return except KeyError: pass listeners = {} listeners["go home"] = self.go_home listeners["go genres"] = self.go_genres listeners["shutdown"] = self.shutdown listeners["go config"] = self.go_savers listeners["play"] = self.play_pause listeners["play-pause"] = self.play_pause listeners["set volume"] = self.player.set_volume listeners["set config volume"] = self.set_config_volume listeners["set screensaver volume"] = self.screensaver_dispatcher.change_volume listeners["mute"] = self.player.mute listeners["play"] = self.player.play station_screen = StationScreen(listeners, self.util) self.screens["stations"] = station_screen v = self.player.get_volume() if not v: v = "0" station_screen.volume.set_position(int(v)) station_screen.volume.update_position() self.set_current_screen("stations") current_station = self.config[CURRENT][STATION] station_screen.station_menu.set_station(current_station) self.screensaver_dispatcher.change_image(station_screen.station_menu.station_button.state) station_screen.station_menu.add_listener(self.screensaver_dispatcher.change_image) self.player.add_player_listener(station_screen.screen_title.set_text) if self.use_web: self.web_server.add_station_screen_web_listeners(station_screen) def set_config_volume(self, volume): """ Listener for volume change events :param volume: new volume value """ self.config[CURRENT][VOLUME] = str(int(volume)) def go_genres(self, state): """ Go to the Genre Screen :param state: button state """ self.set_current_screen_visible(False) try: if self.screens["genres"]: self.set_current_screen("genres") return except KeyError: pass genre_screen = GenreScreen(self.util, self.go_stations) self.screens["genres"] = genre_screen self.set_current_screen("genres") if self.use_web: self.web_server.add_genre_screen_web_listeners(genre_screen) def play_pause(self, state=None): """ Handle Play/Pause :param state: button state """ self.player.play_pause() def set_current_screen(self, name): """ Set current screen defined by its name :param name: screen name """ with self.lock: self.current_screen = name cs = self.screens[self.current_screen] cs.set_visible(True) cs.set_current() cs.clean_draw_update() self.event_dispatcher.set_current_screen(cs) def shutdown(self, event): """ System shutdown handler :param event: the event """ self.util.config_class.save_config() self.player.shutdown() if self.use_web: try: self.web_server.shutdown() except: pass stations = self.screens["stations"] stations.screen_title.shutdown() pygame.quit() if self.config[LINUX_PLATFORM]: subprocess.call("sudo poweroff", shell=True) else: Popen("taskkill /F /T /PID {pid}".format(pid=self.audio_server.pid)) os._exit(0)
def start(self): Util.set_token() self.__mq_server = MqServer() self.__mq_server.set_callback(self.__process) self.__mq_server.start()
def __str__(self): meta_data = self.root.get_meta_data() logstr = "file = %s\nMetaData:\n%s\n%s" % \ (self.filename, Util.dump_dict(meta_data), self.root) return logstr