def scheduler_executor(feature, method, context): """task for all apscheduler jobs While the context of apscheduler job will be serialized and saved into MySQL, it's hard that add an instance method as an apscheduler job because the context is usually very complicate and not easy to be serialized. For example, see we want to add an new job to execute 'user_mgr.get_user_info' in 5 minutes, then the whole 'user_mgr' which involves many other classes will be serialized and saved which probably fail for many of them including 'user_mgr' itself are not serializable. However functions are much easier, that's why we define function 'scheduler_executor' out of any class. It acts as a redirect engine. We find the method that the job really want to execute and then call it. :type feature: str|unicode :param feature: the instance key for hackathon_factory. :type method: str|unicode :param method: the name of method related to instance :type context: Context, see definition in hackathon/__init__.py :param context: the expected execution context of target method """ log.debug("prepare to execute '%s.%s' with context: %s" % (feature, method, context)) inst = RequiredFeature(feature) mtd = getattr(inst, method) args_len = len(inspect.getargspec(mtd).args) if args_len < 2: # if target method doesn't expect any parameter except 'self', the args_len is 1 mtd() else: # call with execution context mtd(context)
def create_or_update_register(self, **args): try: register = db_adapter.find_first_object( Register, Register.email == args['email'], Register.hackathon_id == g.hackathon_id) if register is None: #create a register log.debug("create a new register") db_adapter.add_object_kwargs( Register, register_name=args['name'], email=args['email'], create_time=datetime.utcnow(), description=args['description'], enabled=1, # 0: disabled 1:enabled jstrom_api='', jstrom_mgmt_portal='', hackathon_id=g.hackathon_id) else: #update a aready existe register log.debug("update a new register") db_adapter.update_object( register, register_name=args['name'], email=args['email'], create_time=datetime.utcnow(), description=args['description'], enabled=args['enabled'], # 0: disabled 1:enabled strom_api='', jstrom_mgmt_portal='', hackathon_id=g.hackathon_id) except Exception: log.error("create or update register faild") return {"error": "INTERNAL SERVER ERROR"}, 500
def __init__(self, app): """Initialize APScheduler :type app: Flask :param app: the Flask app """ self.app = app self.__apscheduler = None # NOT instantiate while in flask DEBUG mode or in the main thread # It's to avoid APScheduler being instantiated twice if not app.debug or os.environ.get("WERKZEUG_RUN_MAIN") == "true": self.__apscheduler = BackgroundScheduler(timezone=utc) # add MySQL job store job_store_type = safe_get_config("scheduler.job_store", "memory") if job_store_type == "mysql": log.debug("add aps_cheduler job store based on mysql") self.__apscheduler.add_jobstore('sqlalchemy', alias=self.jobstore, url=get_config("scheduler.job_store_url")) elif job_store_type == "mongodb": log.debug("add aps_cheduler job store based on mongodb") self.__apscheduler.add_jobstore('mongodb', alias=self.jobstore, database=safe_get_config("scheduler.database", "apscheduler"), collection=safe_get_config("scheduler.collection", "jobs"), host=safe_get_config("scheduler.host", "localhost"), port=safe_get_config("scheduler.port", 27017)) # add event listener self.__apscheduler.add_listener(scheduler_listener, EVENT_JOB_EXECUTED | EVENT_JOB_ERROR | EVENT_JOB_ADDED) log.info("APScheduler loaded") self.__apscheduler.start()
def __init_schedule_jobs(): """Init scheduled jobs in fact""" log.debug("init scheduled jobs......") util = RequiredFeature("util") sche = RequiredFeature("scheduler") if not util.is_local(): hackathon_manager = RequiredFeature("hackathon_manager") # schedule job to check recycle operation next_run_time = util.get_now() + timedelta(seconds=10) sche.add_interval(feature="expr_manager", method="scheduler_recycle_expr", id="scheduler_recycle_expr", next_run_time=next_run_time, minutes=10) # schedule job to pre-allocate environment hackathon_manager.schedule_pre_allocate_expr_job() # schedule job to pre-create a docker host server VM # host_server_manager.schedule_pre_allocate_host_server_job() # init the overtime-sessions detection to update users' online status sche.add_interval(feature="user_manager", method="check_user_online_status", id="check_user_online_status", minutes=10)
def __init__(self, app): """Initialize APScheduler :type app: Flask :param app: the Flask app """ self.app = app self.__apscheduler = None # NOT instantiate while in flask DEBUG mode or in the main thread # It's to avoid APScheduler being instantiated twice if not app.debug or os.environ.get('WERKZEUG_RUN_MAIN') == 'true': self.__apscheduler = BackgroundScheduler(timezone=utc) # add MySQL job store if safe_get_config("scheduler.job_store", "memory") == "mysql": log.debug("add aps_cheduler job store based on mysql") self.__apscheduler.add_jobstore( 'sqlalchemy', alias=self.jobstore, url=get_config("scheduler.job_store_url")) # add event listener self.__apscheduler.add_listener( scheduler_listener, EVENT_JOB_EXECUTED | EVENT_JOB_ERROR) log.info("APScheduler loaded") self.__apscheduler.start()
def __init__(self, app): """Initialize APScheduler :type app: Flask :param app: the Flask app """ self.app = app self.__apscheduler = None # NOT instantiate while in flask DEBUG mode or in the main thread # It's to avoid APScheduler being instantiated twice if not app.debug or os.environ.get('WERKZEUG_RUN_MAIN') == 'true': self.__apscheduler = BackgroundScheduler(timezone=utc) # add MySQL job store if safe_get_config("scheduler.job_store", "memory") == "mysql": log.debug("add aps_cheduler job store based on mysql") self.__apscheduler.add_jobstore('sqlalchemy', alias=self.jobstore, url=get_config("scheduler.job_store_url")) # add event listener self.__apscheduler.add_listener(scheduler_listener, EVENT_JOB_EXECUTED | EVENT_JOB_ERROR) log.info("APScheduler loaded") self.__apscheduler.start()
def create_or_update_register(self, **args): try: register = db_adapter.find_first_object(Register,Register.email==args['email'],Register.hackathon_id==g.hackathon_id) if register is None: #create a register log.debug("create a new register") db_adapter.add_object_kwargs(Register, register_name=args['name'], email=args['email'], create_time=datetime.utcnow(), description=args['description'], enabled=1, # 0: disabled 1:enabled jstrom_api='', jstrom_mgmt_portal='', hackathon_id=g.hackathon_id) else: #update a aready existe register log.debug("update a new register") db_adapter.update_object(register, register_name=args['name'], email=args['email'], create_time=datetime.utcnow(), description=args['description'], enabled=args['enabled'], # 0: disabled 1:enabled strom_api='', jstrom_mgmt_portal='', hackathon_id=g.hackathon_id) except Exception: log.error("create or update register faild") return {"error": "INTERNAL SERVER ERROR"}, 500
def delete(self, name, docker_host): try: containers_url = self.get_vm_url(docker_host) + "/containers/%s?force=1" % name req = requests.delete(containers_url) log.debug(req.content) except: log.error("container %s fail to stop" % name) raise
def start(self, docker_host, container_id, start_config={}): try: url = self.get_vm_url(docker_host) + "/containers/%s/start" % container_id req = requests.post(url, data=json.dumps(start_config), headers=default_http_headers) log.debug(req.content) except: log.error("container %s fail to start" % container_id) raise
def __response_with_code(code, message, friendly_message=""): log.debug("response with code: %d and message: %s" % (code, message)) return { "error": { "code": code, "message": message, "friendly_message": friendly_message } }
def containers_info(self, docker_host): try: containers_url = self.get_vm_url(docker_host) + "/containers/json" req = requests.get(containers_url) log.debug(req.content) return convert(json.loads(req.content)) except: log.error("cannot get containers' info") raise
def ping(self, docker_host): try: ping_url = self.get_vm_url(docker_host) + '/_ping' req = requests.get(ping_url) log.debug(req.content) return req.status_code == 200 and req.content == 'OK' except Exception as e: log.error(e) return False
def stop(self, name, docker_host): if self.get_container(name, docker_host) is not None: try: containers_url = self.get_vm_url(docker_host) + "/containers/%s/stop" % name req = requests.post(containers_url) log.debug(req.content) except: log.error("container %s fail to stop" % name) raise
def call(mdl_cls_func, cls_args, func_args): # todo refactoring the call method to use standard hackathon_scheduler mdl_name = mdl_cls_func[0] cls_name = mdl_cls_func[1] func_name = mdl_cls_func[2] log.debug('call: mdl_name [%s], cls_name [%s], func_name [%s]' % (mdl_name, cls_name, func_name)) mdl = importlib.import_module(mdl_name) cls = getattr(mdl, cls_name) func = getattr(cls(*cls_args), func_name) func(*func_args)
def get_available_host_port(self, docker_host, private_port): log.debug("try to assign docker port %d on server %r" % (private_port, docker_host)) containers = self.containers_info(docker_host) host_ports = flatten(map(lambda p: p['Ports'], containers)) def sub(port): return port["PublicPort"] if "PublicPort" in port else -1 host_public_ports = map(lambda x: sub(x), host_ports) return self.__get_available_host_port(host_public_ports, private_port)
def create(self, docker_host, container_config, container_name): containers_url = self.get_vm_url(docker_host) + "/containers/create?name=%s" % container_name try: req = requests.post(containers_url, data=json.dumps(container_config), headers=default_http_headers) log.debug(req.content) container = json.loads(req.content) except Exception as err: log.error(err) raise if container is None: raise AssertionError("container is none") return container
def check_admin_hackathon_authority(self): if HTTP_HEADER.HACKATHON_ID in request.headers: try: g.hackathon_id = long(request.headers['hackathon_id']) return self.validate_admin_hackathon_request(g.hackathon_id) except Exception: log.debug("hackathon_id is not a num") return False else: log.debug("HEARDER lost hackathon_id") return False
def __get_available_host_port(self, port_bindings, port): host_port = port + 10000 while host_port in port_bindings: host_port += 1 if host_port >= 65535: log.error("port used up on this host server") raise Exception("no port available") log.debug("host_port is %d " % host_port) return host_port
def scheduler_listener(event): """Custom listener for apscheduler Will write the details to log file in case apscheduler job succeeds or error occurs :param event: the event executed and related to the apscheduler job """ if event.code == EVENT_JOB_ERROR: print('The job crashed :(') log.warn("The schedule job crashed because of %s" % repr(event.exception)) else: print('The job executed :)') log.debug("The schedule job %s executed and return value is '%s'" % (event.job_id, event.retval))
def remove_job(self, job_id): """Remove job from APScheduler job store :type job_id: str | unicode :param job_id: the id of job """ if self.__apscheduler: try: self.__apscheduler.remove_job(job_id, self.jobstore) except JobLookupError: log.debug("remove job failed because job %s not found" % job_id) except Exception as e: log.error(e)
def login(self, args): access_token = args['access_token'] # get openID. openid_resp = get_remote(get_config("login.qq.openid_url") + access_token) log.debug("get access_token from qq:" + access_token) info = json.loads(openid_resp[10:-4]) openid = info['openid'] log.debug("get client_id from qq:" + openid) client_id = info['client_id'] log.debug("get openid from qq:" + client_id) # get user info url = get_config("login.qq.user_info_url") % (access_token, client_id, openid) user_info_resp = get_remote(url) log.debug("get user info from qq:" + user_info_resp) user_info = convert(json.loads(user_info_resp)) email_info = [ {'name': user_info["nickname"], 'email': None, 'id': id, 'verified': 1, 'primary': 1, 'nickname': user_info["nickname"], 'avatar_url': user_info["figureurl"]}] user_with_token = user_manager.db_login(openid, name=user_info["nickname"], nickname=user_info["nickname"], access_token=access_token, email_info=email_info, avatar_url=user_info["figureurl"]) # login flask user = user_with_token["user"] log.info("QQ user login successfully:" + repr(user)) detail = user_manager.get_user_detail_info(user) detail["token"] = user_with_token["token"].token return detail
def login(self, args): access_token = args.get('access_token') # get user info user_info_resp = get_remote(get_config('login.github.user_info_url') + access_token) # conn.request('GET',url,'',{'user-agent':'flask'}) log.debug("get user info from github:" + user_info_resp + '\n') # example: # # {"login":"******","id":8814383,"avatar_url":"https://avatars.githubusercontent.com/u/8814383?v=3","gravatar_id":"", # "url":"https://api.github.com/users/juniwang","html_url":"https://github.com/juniwang", # "followers_url":"https://api.github.com/users/juniwang/followers", # "following_url":"https://api.github.com/users/juniwang/following{/other_user}", # "gists_url":"https://api.github.com/users/juniwang/gists{/gist_id}", # "starred_url":"https://api.github.com/users/juniwang/starred{/owner}{/repo}", # "subscriptions_url":"https://api.github.com/users/juniwang/subscriptions", # "organizations_url":"https://api.github.com/users/juniwang/orgs","repos_url":"https://api.github.com/users/juniwang/repos", # "events_url":"https://api.github.com/users/juniwang/events{/privacy}", # "received_events_url":"https://api.github.com/users/juniwang/received_events","type":"User","site_admin":false, # "name":"Junbo Wang","company":"","blog":"","location":"Shanghai China", # "email":"*****@*****.**","hireable":false,"bio":null,"public_repos":12,"public_gists":0,"followers":0, # "following":1,"created_at":"2014-09-18T01:30:30Z","updated_at":"2014-11-25T09:00:37Z","private_gists":0, # "total_private_repos":0,"owned_private_repos":0,"disk_usage":14179,"collaborators":0, # "plan":{"name":"free","space":307200,"collaborators":0,"private_repos":0}} # user_info = json.loads(user_info_resp) name = user_info["login"] nickname = user_info["name"] if "name" in user_info else name openid = str(user_info["id"]) avatar = user_info["avatar_url"] # get user primary email email_info_resp = get_remote(get_config('login.github.emails_info_url') + access_token) log.debug("get email from github:" + email_info_resp + '\n') # email_info include all user email provided by github # email is user's primary email email_info = json.loads(email_info_resp) user_with_token = user_manager.db_login(openid, name=name, nickname=nickname, access_token=access_token, email_info=email_info, avatar_url=avatar) # login flask user = user_with_token["user"] log.info("github user login successfully:" + repr(user)) hackathon_name = args.get('hackathon_name') detail = user_manager.get_user_detail_info(user, hackathon_name=hackathon_name) detail["token"] = user_with_token["token"].token return detail
def init_app(): """Initialize the application. Works including : - setting up hackathon factory, - register restful API routes - initialize scheduled jobs """ init_components() from .views import init_routes init_routes() init_schedule_jobs() health_check_guacamole = RequiredFeature("health_check_guacamole") u = RequiredFeature("util") if u.is_local(): log.debug("guacamole status: %s" % health_check_guacamole.report_health())
def login(self, args): access_token = args.get('access_token') uid = args.get('uid') # get user info # https://api.weibo.com/2/users/show.json?access_token=2.005RDjXC0rYD8d39ca83156aLZWgZE&uid=1404376560 user_info_resp = get_remote(get_config('login.weibo.user_info_url') + access_token + "&uid=" + uid) user_info = json.loads(user_info_resp) log.debug("get user base info from Weibo:" + user_info_resp) # {"id":2330622122,"idstr":"2330622122","class":1,"screen_name":"test name","name":"test name", # "province":"31","city":"10","location":"shanghai yangpu","description":"","url":"", # "profile_image_url":"http://tp3.sinaimg.cn/2330622122/50/5629035320/1", # "profile_url":"u/2330622122","domain":"","weihao":"","gender":"m","followers_count":34, # "friends_count":42,"pagefriends_count":0,"statuses_count":0,"favourites_count":1, # "created_at":"Mon Aug 22 17:58:15 +0800 2011","following":false,"allow_all_act_msg":false, # "geo_enabled":true,"verified":false,"verified_type":-1,"remark":"","ptype":0,"allow_all_comment":true, # "avatar_large":"http://tp3.sinaimg.cn/2330622122/180/5629035320/1","avatar_hd":"http://tp3.sinaimg.cn/2330622122/180/5629035320/1", # "verified_reason":"","verified_trade":"","verified_reason_url":"","verified_source":"","verified_source_url":"", # "follow_me":false,"online_status":0,"bi_followers_count":8,"lang":"zh-cn","star":0,"mbtype":0,"mbrank":0, # "block_word":0,"block_app":0,"credit_score":80,"urank":6} openid = user_info['id'] name = user_info['name'] nickname = user_info['screen_name'] avatar_url = user_info['avatar_hd'] # get user email email_info = [] try: email_info_resp = get_remote(get_config('login.weibo.email_info_url') + access_token) log.debug("get email from github:" + email_info_resp) email_info_resp_json = json.loads(email_info_resp) email = email_info_resp_json['email'] email_info = [ {'name': name, 'email': email, 'id': openid, 'verified': 1, 'primary': 1, 'nickname': nickname, 'avatar_url': avatar_url}] except Exception as e: log.debug("fail to get user email from weibo") log.error(e) user_with_token = user_manager.db_login(openid, name=name, nickname=nickname, access_token=access_token, email_info=email_info, avatar_url=avatar_url) user = user_with_token["user"] log.info("weibo user login successfully:" + repr(user)) hackathon_name = args.get('hackathon_name') detail = user_manager.get_user_detail_info(user, hackathon_name=hackathon_name) detail["token"] = user_with_token["token"].token log.debug("weibo user login successfully: %r" % detail) return detail
def login(self, args): token = args.get('access_token') value = "Bearer " + token header = {"Authorization": value} opener = urllib2.build_opener(urllib2.HTTPHandler) request = urllib2.Request(get_config("login.gitcafe.user_info_url")) request.add_header("Authorization", value) user_info = opener.open(request).read() log.debug(user_info) info = json.loads(user_info) name = info['username'] email = info['email'] id = info['id'] nickname = info['fullname'] if nickname is None: nickname = name if info['avatar_url'].startswith('https'): avatar_url = info['avatar_url'] else: avatar_url = "https" + info['avatar_url'][4:] email_info = [ {'name': name, 'email': email, 'id': id, 'verified': 1, 'primary': 1, 'nickname': nickname, 'avatar_url': avatar_url}] user_with_token = user_manager.db_login(id, name=name, nickname=nickname, access_token=token, email_info=email_info, avatar_url=avatar_url) user = user_with_token["user"] log.info("gitcafe user login successfully:" + repr(user)) hackathon_name = args.get('hackathon_name') detail = user_manager.get_user_detail_info(user, hackathon_name=hackathon_name) detail["token"] = user_with_token["token"].token log.debug("gitcafe user login successfully: %r" % detail) return detail
def __init__(self, app): """Initialize APScheduler :type app: Flask :param app: the Flask app """ self.app = app self.__apscheduler = None # NOT instantiate while in flask DEBUG mode or in the main thread # It's to avoid APScheduler being instantiated twice if not app.debug or os.environ.get("WERKZEUG_RUN_MAIN") == "true": self.__apscheduler = BackgroundScheduler(timezone=utc) # add MySQL job store job_store_type = safe_get_config("scheduler.job_store", "memory") if job_store_type == "mysql": log.debug("add aps_cheduler job store based on mysql") self.__apscheduler.add_jobstore( 'sqlalchemy', alias=self.jobstore, url=get_config("scheduler.job_store_url")) elif job_store_type == "mongodb": log.debug("add aps_cheduler job store based on mongodb") self.__apscheduler.add_jobstore( 'mongodb', alias=self.jobstore, database=safe_get_config("scheduler.database", "apscheduler"), collection=safe_get_config("scheduler.collection", "jobs"), host=safe_get_config("scheduler.host", "localhost"), port=safe_get_config("scheduler.port", 27017)) # add event listener self.__apscheduler.add_listener( scheduler_listener, EVENT_JOB_EXECUTED | EVENT_JOB_ERROR | EVENT_JOB_ADDED) log.info("APScheduler loaded") self.__apscheduler.start()
# schema for api "/" and "/health" schemas["CurrentTimeResource"] = { "get": { "output": { "title": "response to get current time of hackathon server", "description": "get current time of hackathon server so that client can show right count down time", "type": "object", "properties": { "currenttime": { "type": "number", "description": "time in miliseconds" } } } } } schema_dir = join(dirname(realpath(__file__)), "schema") json_files = [ join(schema_dir, f) for f in listdir(schema_dir) if isfile(join(schema_dir, f)) ] for js_file in json_files: try: with open(js_file) as fs: schemas.update(json.load(fs)) except Exception as e: log.debug("[server api_schema] %s" % str(e))
def send_voice_verify(self, receiver, content): log.debug("voice verify is disabled.") return False
def send_sms(self, receiver, template_id, content): log.debug("SMS service is disabled.") return False