def post(self, request): group_id = request.POST.get("group_id", None) student_id = request.POST.get("student_id", 0) is_teacher = parse_bool(request.POST.get("is_teacher", False)) message = request.POST.get("message", "") if not group_id: return JsonResponse({"status": "fail", "reason": "bad input"}) target_users = [] # find group administrators admin_users = UserGroup.objects.filter(Q(role=2) | Q(role=3), group_id=group_id).all() if admin_users: for admin in admin_users: target_users.append(admin.user) else: # no admin user found. tell site admin target_users = UserProfile.objects.filter(is_staff=True) # put a message to all target users for target in target_users: msg = UserMessage() msg.to_user = target.id msg.from_user = None # system message msg.title = u"用户{0}请求加入班级".format(request.user) msg.message_type = UserMessage.MSG_TYPE_JOIN_GROUP msg.message = json.dumps({ "group_id": group_id, "user_id": request.user.id, "is_teacher": is_teacher, "student_id": student_id, "extra_message": message }) msg.save() return JsonResponse({"status": "success"})
def star(cid,starred='t'): "star or unstar a crowd" #FIXME: this should verify that it was a POST crowd = get_or_404(Crowd,cid) starred = parse_bool(starred) if starred!=crowd.star: crowd.star = starred crowd.save() return crowd.simple()
def star(cid, starred='t'): "star or unstar a crowd" #FIXME: this should verify that it was a POST crowd = get_or_404(Crowd, cid) starred = parse_bool(starred) if starred != crowd.star: crowd.star = starred crowd.save() return crowd.simple()
def crowd(q="", limit='100', sort=None, simple='t', **kwargs): """Returns a list of crowds. Here are some useful parameters: simple if true, remove details about merges and joins from the crowd q text to search for (not implemented) sort how to sort the results (not implemented) limit maximum number of crowds to return {min,max}_start when the crowd was formed {min,max}_end when the crowd ended {min,max}_size the number of users involved in the crowd {min,max}_clco the clustering coefficient of the crowd (from 0 to 1.0) Here are some example calls: /api/1/search/crowd returns 100 crowds /api/1/search/crowd?min_start=1294185600&limit=none returns all the crowds that staterd after Jan 5, 2011 /api/1/search/crowd?max_size=5&min_end=1295136000&q=love&limit=none returns all the crowds where a tweet contains the word love, there are at least five twitter users in the crowd, and the crowd ends after Jan 16, 2011. /api/1/search/crowd?simple=false&limit=10 returns 10 crowds in the full format """ query = (range_from_params(Crowd, 'clco', float, kwargs) & range_from_params(Crowd, 'start', parse_date, kwargs) & range_from_params(Crowd, 'end', parse_date, kwargs) & range_from_params(Crowd, 'size', int, kwargs)) if q: cids = intake.search.CrowdSearcher().getCrowds(q) query = query & Crowd._id.is_in(cids) limit = int(limit) if limit.lower() != "none" else None #sl = [(Crowd.star,DESCENDING), (Crowd._id,ASCENDING)] crowds = Crowd.find( query, limit=limit, ) #sort_list=sl) transform = Crowd.simple if parse_bool(simple) else Crowd.to_d return [transform(c) for c in crowds]
def crowd(q="", limit='100', sort=None, simple='t', **kwargs): """Returns a list of crowds. Here are some useful parameters: simple if true, remove details about merges and joins from the crowd q text to search for (not implemented) sort how to sort the results (not implemented) limit maximum number of crowds to return {min,max}_start when the crowd was formed {min,max}_end when the crowd ended {min,max}_size the number of users involved in the crowd {min,max}_clco the clustering coefficient of the crowd (from 0 to 1.0) Here are some example calls: /api/1/search/crowd returns 100 crowds /api/1/search/crowd?min_start=1294185600&limit=none returns all the crowds that staterd after Jan 5, 2011 /api/1/search/crowd?max_size=5&min_end=1295136000&q=love&limit=none returns all the crowds where a tweet contains the word love, there are at least five twitter users in the crowd, and the crowd ends after Jan 16, 2011. /api/1/search/crowd?simple=false&limit=10 returns 10 crowds in the full format """ query = ( range_from_params(Crowd, 'clco', float, kwargs) & range_from_params(Crowd, 'start', parse_date, kwargs) & range_from_params(Crowd, 'end', parse_date, kwargs) & range_from_params(Crowd, 'size', int, kwargs)) if q: cids = intake.search.CrowdSearcher().getCrowds(q) query = query & Crowd._id.is_in(cids) limit=int(limit) if limit.lower()!="none" else None #sl = [(Crowd.star,DESCENDING), (Crowd._id,ASCENDING)] crowds = Crowd.find(query, limit=limit, )#sort_list=sl) transform = Crowd.simple if parse_bool(simple) else Crowd.to_d return [ transform(c) for c in crowds]
def upload_batch(result_dir=None, sort=True, upload_code=None): result_dir = result_dir or dconf.RESULT_DIR sort = parse_bool(sort) results = glob.glob(os.path.join(result_dir, '*__summary.json')) if sort: results = sorted(results) count = len(results) LOG.info('Uploading %d samples from %s...', count, result_dir) for i, result in enumerate(results): prefix = os.path.basename(result) prefix_len = os.path.basename(result).find('_') + 2 prefix = prefix[:prefix_len] upload_result(result_dir=result_dir, prefix=prefix, upload_code=upload_code) LOG.info('Uploaded result %d/%d: %s__*.json', i + 1, count, prefix)
def download_debug_info(pprint=False): pprint = parse_bool(pprint) url = '{}/dump/{}'.format(dconf.WEBSITE_URL, dconf.UPLOAD_CODE) params = {'pp': int(True)} if pprint else {} rsp = requests.get(url, params=params) if rsp.status_code != 200: raise Exception('Error downloading debug info.') filename = rsp.headers.get('Content-Disposition').split('=')[-1] file_len, exp_len = len(rsp.content), int(rsp.headers.get('Content-Length')) assert file_len == exp_len, 'File {}: content length != expected length: {} != {}'.format( filename, file_len, exp_len) with open(filename, 'wb') as f: f.write(rsp.content) LOG.info('Downloaded debug info to %s', filename) return filename
def _modify_website_object(obj_name, action, verbose=False, **kwargs): verbose = parse_bool(verbose) if obj_name == 'project': valid_actions = ('create', 'edit') elif obj_name == 'session': valid_actions = ('create', 'edit') elif obj_name == 'user': valid_actions = ('create', 'delete') else: raise ValueError( 'Invalid object: {}. Valid objects: project, session'.format( obj_name)) if action not in valid_actions: raise ValueError('Invalid action: {}. Valid actions: {}'.format( action, ', '.join(valid_actions))) data = {} for k, v in kwargs.items(): if isinstance(v, (dict, list, tuple)): v = json.dumps(v) data[k] = v url_path = '/{}/{}/'.format(action, obj_name) response = requests.post(dconf.WEBSITE_URL + url_path, data=data) content = response.content.decode('utf-8') if response.status_code != 200: raise Exception("Failed to {} {}.\nStatus: {}\nMessage: {}\n".format( action, obj_name, response.status_code, content)) json_content, decoded = _http_content_to_json(content) if verbose: if decoded: LOG.info('\n%s_%s = %s', action.upper(), obj_name.upper(), json.dumps(json_content, indent=4)) else: LOG.warning("Content could not be decoded.\n\n%s\n", content) return response, json_content, decoded
def post(self, request): form = CreateQuizForm(request.POST) if form.is_valid(): # save the quiz object quiz = Quiz() quiz.author = request.user quiz.max_word_time = form.data["max_word_time"] quiz.max_total_time = form.data["max_total_time"] quiz.password = "" quiz.description = form.data["description"] quiz.book_id = form.data["book_id"] quiz.is_public = parse_bool(form.data.get("is_public", False)) quiz.save() # do nothing return redirect( reverse('testings.edit_quiz', kwargs={"quiz_id": quiz.id})) else: # invalid form wordbooks = WordBook.objects.all() return render(request, 'quiz_create.html', { "page": "testings", "wordbooks": wordbooks })
def post(self, request): try: data = json.loads(request.body.decode("utf-8")) quiz_id = data["quiz_id"] description = data.get("description", None) max_total_time = data.get("max_total_time", None) max_word_time = data.get("max_word_time", None) is_public = parse_bool(data.get("is_public", False)) # update the quiz name quiz = Quiz.objects.filter(id=quiz_id).get() if description: quiz.description = description if max_total_time: quiz.max_total_time = max_total_time if max_word_time: quiz.max_word_time = max_word_time quiz.is_public = is_public quiz.save() return JsonResponse({"status": "ok"}) except: return JsonResponse({"status": "fail"})
def __substitute_graph_args(self, attrs): ''' Attr format: * value (3.14, "foo", true or ['1', 'b'], etc) * variable ("$foo"). In this case value from graph_args will be substituted. * special dict. Dict must contain key 'cuber' with value true. The dict may specify variable (key "var") and defult value (key "default"), if variable is not set in graph_args. ''' attrs_ = {} for key, value in attrs.iteritems(): if isinstance(value, basestring) and value.startswith('$'): graph_args_key = value[1:] logger.debug('Substitute param: {}'.format(graph_args_key)) if graph_args_key not in self.graph_args: raise ValueError( 'Key {} is not specified in graph args: {}'.format( graph_args_key, self.graph_args)) attrs_[key] = self.graph_args[graph_args_key] elif isinstance(value, dict) and utils.parse_bool( value.get('cuber', 'false')): assert value['var'].startswith('$') graph_args_key = value['var'][1:] logger.debug('Substitute param: {}'.format(graph_args_key)) if graph_args_key not in self.graph_args: if 'default' in value: attrs_[key] = value['default'] else: raise ValueError( '''Key {} is not specified in graph args and default value is not set: attr: {}, graph_args: {}'''.format(graph_args_key, value, self.graph_args)) else: attrs_[key] = self.graph_args[graph_args_key] else: attrs_[key] = value return attrs_
def __run_graph(self, graph_, disable_inmemory_cache, disable_file_cache, cleanup, perfomance_logging): ''' TODO: improve excprions for incorrect graph ''' logger.debug('Graph to do: {}'.format(graph_)) graph_ = self.__fold_graph(graph_) graph_descriptor = graph_['name'] if 'name' in graph_ else str(graph_) graph_id = graph_[ 'name'] if 'name' in graph_ else utils.universal_hash(graph_) for key in {'module', 'class'}: if key not in graph_: raise GraphErrorSpecifiedSubgraph( 'Cube description must have {} parameter.'.format(key), subgraph=graph_descriptor) for key in graph_.keys(): graph_possible_params = { 'attrs', 'deps', 'class', 'module', 'comment', 'name', 'frozen', 'disable_inmemory_cache', 'disable_file_cache' } if key not in graph_possible_params: raise GraphErrorSpecifiedSubgraph('Cube description has param {} that is not allowed. Check for typos. Possible values: {}' \ .format(key, graph_possible_params), subgraph = graph_descriptor) def get_frozen_path(): frozen_path = os.path.join(self.frozens_dir, self.frozens_id, '{}.pkl'.format(graph_id)) frozen_path_dir = os.path.join(self.frozens_dir, self.frozens_id) logger.info('Frozen path: {}'.format(frozen_path)) return frozen_path, frozen_path_dir if utils.parse_bool(graph_.get('frozen', 'false')) and self.use_frozens and \ not os.path.isfile(get_frozen_path()[0]): if not self.use_frozen_only_if_exists: raise GraphErrorSpecifiedSubgraph( 'Frozen {} does not exists, but frozens are enabled and flag "use_frozens_only_if_exists" is not enabled.' .format(get_frozen_path()[0]), subgraph=graph_descriptor) if utils.parse_bool(graph_.get('frozen', 'false')) and self.use_frozens and \ os.path.isfile(get_frozen_path()[0]): logger.info('Loading from frozen') with open(get_frozen_path()[0], 'rb') as f: return pickle.load(f) attrs = copy.deepcopy(graph_.get('attrs', {})) attrs = self.__substitute_graph_args(attrs) for i, dep_ in enumerate(graph_.get('deps', {})): dep = dep_ if isinstance(dep_, dict) else {'graph': dep_} dep_descriptor = dep['name'] if isinstance( dep, dict) and 'name' in dep else '{}-th dep (zero-based)'.format(i) for key in {'graph'}: if key not in dep: raise GraphErrorSpecifiedDep( 'Dep description must have {} parameter.'.format(key), subgraph=graph_descriptor, dep=dep_descriptor) for key in dep.keys(): dep_possible_params = { 'fields', 'graph', 'prefix', 'comment', 'name', 'enable_if' } if key not in dep_possible_params: raise GraphErrorSpecifiedDep('Dep description has param {} that is not allowed. Check for typos. Possible values: {}' \ .format(key, dep_possible_params), subgraph = graph_descriptor, dep = dep_descriptor) if 'enable_if' in dep: if not self.eval_expression(dep['enable_if']): logger.info( 'Skip dependecy "{}" of "{}" because if clause is false' .format(dep_descriptor, graph_descriptor)) continue res = self.__run_graph( dep['graph'], disable_inmemory_cache=disable_inmemory_cache, disable_file_cache=disable_file_cache, cleanup=cleanup, perfomance_logging=perfomance_logging, ) if not isinstance(res, dict): raise GraphErrorSpecifiedDep('You may not use non-dict-result cube as a dependency. Result data ({}): {}.' \ .format(type(res), res), subgraph = graph_descriptor, dep = dep_descriptor) if 'fields' not in dep: for key in res: attr_key = dep.get('prefix', '') + key if attr_key in attrs: raise GraphErrorSpecifiedDep('Argument "{}" for is not unique.' \ .format(attr_key), subgraph = graph_descriptor, dep = dep_descriptor) attrs[attr_key] = res[key] else: for new_key, old_key_ in dep['fields'].iteritems(): attr_key = dep.get('prefix', '') + new_key pack_to_dict = None if isinstance(old_key_, basestring): old_key = old_key_ if old_key_ != '$' else new_key elif isinstance(old_key_, dict): old_key = old_key_['source_field'] if old_key_[ 'source_field'] != '$' else new_key pack_to_dict = old_key_.get('pack_to_dict', None) if pack_to_dict is None: if attr_key in attrs: raise GraphErrorSpecifiedDep('Argument "{}" for is not unique.' \ .format(attr_key), subgraph = graph_descriptor, dep = dep_descriptor) if old_key not in res: raise GraphErrorSpecifiedDep( 'Field "{}" is not got from dependency. Got: {}' .format(old_key, ', '.join(res.keys())), subgraph=graph_descriptor, dep=dep_descriptor) attrs[attr_key] = res[old_key] else: if pack_to_dict not in attrs: attrs[pack_to_dict] = {} if attr_key in attrs[pack_to_dict]: raise GraphErrorSpecifiedDep('Argument "{}" for is not unique for packing dict "{}".' \ .format(attr_key, pack_to_dict), subgraph = graph_descriptor, dep = dep_descriptor) if old_key not in res: raise GraphErrorSpecifiedDep( 'Field "{}" is not got from dependency. Got: {}' .format(old_key, ', '.join(res.keys())), subgraph=graph_descriptor, dep=dep_descriptor) attrs[pack_to_dict][attr_key] = res[old_key] module = importlib.import_module(graph_['module']) logger.debug('Attrs keys: {}'.format(attrs.keys())) try: cube_init = getattr(module, graph_['class'])(**attrs) except Exception as e: logging.error('Faild to init cube:\nCube: {cube}\nGraph part: {graph_part}\nAttrs: {attrs}\nError: {error}\nTraceback: {tb}' \ .format( cube = graph_['module'], graph_part = str(graph_), attrs = utils.dict_to_string(attrs, brackets = True), error = str(e), tb = traceback.format_exc(), ) ) raise try: res = cube_init.get( disable_inmemory_cache=disable_inmemory_cache or utils.parse_bool( graph_.get('disable_inmemory_cache', 'false')), disable_file_cache=disable_file_cache or utils.parse_bool(graph_.get('disable_file_cache', 'false')), cleanup=cleanup, perfomance_logging=perfomance_logging, ) except Exception as e: logging.error('Faild to cube.get():\nCube: {cube}\nGraph part: {graph_part}\nAttrs: {attrs}\nError: {error}\nTraceback: {tb}' \ .format( cube = graph_['module'], graph_part = str(graph_), attrs = utils.dict_to_string(attrs, brackets = True), error = str(e), tb = traceback.format_exc(), ) ) raise if utils.parse_bool(graph_.get('frozen', 'false')) and self.create_frozens: frozen_path, frozen_path_dir = get_frozen_path() if not os.path.isdir(frozen_path_dir): os.makedirs(frozen_path_dir) with open(frozen_path, 'wb') as f: pickle.dump(res, f) logger.info('Frozen point created') return res