def index(): rpts = db.reports import gluon.contrib.simplejson as json reports_features = db(db.reports_features).select().as_list() reports_features_js=json.dumps(reports_features) reports_educations = db(db.reports_educations).select().as_list() reports_educations_js=json.dumps(reports_educations) _reports = db(rpts.is_verified > 0).select() reports = sorted(_reports.as_list(), key=lambda report: report['incident_date']) row = db(db.reports_statistics).select().first() return dict(timeline= row.timeline if row else None, report_features= row.report_features if row else None, male_victims_count= row.male_victims_count if row else 0, reports_count= row.total if row else 0 , report_education_stats = row.report_educations if row else None, reports_educations_js=reports_educations_js, reports_features_js=reports_features_js, reports_js=json.dumps(reports), reports=_reports )
def import_file(file_content, file_name = ''): file_content = file_content.strip() action = '' graph = {} if file_content.startswith('{') and file_content.endswith('}'):#basic check json_string = file_content try: graph = simplejson.loads(file_content) except simplejson.JSONDecodeError: action = 'loaded %s but could not parse json'%file_name graph = dict(nodes = [], edges = []) else: action = 'loaded %s'%file_name elif ('http://sbgn.org/libsbgn/' in file_content): graph = sbgnml2jsbgn(file_content) json_string = simplejson.dumps(graph) action = 'loaded %s'%file_name elif 'http://www.sbml.org/sbml/' in file_content: graph = sbml2jsbgn(file_content) json_string = simplejson.dumps(graph) action = 'loaded %s'%file_name if action and graph and json_string: return action, graph, json_string else: print 'action: ',action print 'graph: ',graph print 'sbgnml? :',('http://sbgn.org/libsbgn/pd' in file_content) or ('http://sbgn.org/libsbgn/0.2' in file_content)
def process(channel, message): sri = message['row_index'] sci = message['col_index'] sw = channel.board[(sri, sci)] if channel.to_find[sw] == 0: channel.logger.error( 'Received unassigned word "{}".'.format(sw)) channel.logger.debug('Expecting solutions: {}'.format( channel.to_find)) return channel.to_find[sw] -= 1 if sum(channel.to_find.itervalues()) > 0: channel.opponent.push( dumps( dict(purpose='update', opponent_score=message['score']))) channel.logger.info('Reached score {}.'.format( message['score'])) else: channel.state = ClientChannel.WaitState channel.opponent.push( dumps( dict(purpose='opponent_finished', opponent_score=message['score']))) channel.logger.info( 'Reached the finish line with score {}.'.format( message['score'])) if channel.opponent.state is ClientChannel.WaitState: channel.close_when_done() channel.opponent.close_when_done() del channel.opponent channel.logger.info('Cached up with opponent, game over.')
def encode(obj): if isinstance(obj,jQuery): return str(obj) if isinstance(obj,str): if is_script(obj):return obj if hasattr(obj, "xml"): return dumps(obj.xml().replace('"',"'")) return dumps(obj).replace("'",'"')
def process(channel, message): sri = message['row_index'] sci = message['col_index'] sw = channel.board[(sri, sci)] if channel.to_find[sw] == 0: channel.logger.error( 'Received unassigned word "{}".'.format(sw)) channel.logger.debug( 'Expecting solutions: {}'.format(channel.to_find)) return channel.to_find[sw] -= 1 if sum(channel.to_find.itervalues()) > 0: channel.opponent.push(dumps(dict( purpose='update', opponent_score=message['score']))) channel.logger.info('Reached score {}.'. format(message['score'])) else: channel.state = ClientChannel.WaitState channel.opponent.push(dumps(dict( purpose='opponent_finished', opponent_score=message['score']))) channel.logger.info('Reached the finish line with score {}.'. format(message['score'])) if channel.opponent.state is ClientChannel.WaitState: channel.close_when_done() channel.opponent.close_when_done() del channel.opponent channel.logger.info('Cached up with opponent, game over.')
def queue_task(self, function, pargs=[], pvars={}, **kwargs): """ Queue tasks. This takes care of handling the validation of all values. :param function: the function (anything callable with a __name__) :param pargs: "raw" args to be passed to the function. Automatically jsonified. :param pvars: "raw" kwargs to be passed to the function. Automatically jsonified :param kwargs: all the scheduler_task columns. args and vars here should be in json format already, they will override pargs and pvars returns a dict just as a normal validate_and_insert, plus a uuid key holding the uuid of the queued task. If validation is not passed, both id and uuid will be None, and you'll get an "error" dict holding the errors found. """ if hasattr(function, '__name__'): function = function.__name__ targs = 'args' in kwargs and kwargs.pop('args') or dumps(pargs) tvars = 'vars' in kwargs and kwargs.pop('vars') or dumps(pvars) tuuid = 'uuid' in kwargs and kwargs.pop('uuid') or web2py_uuid() tname = 'task_name' in kwargs and kwargs.pop('task_name') or function rtn = self.db.scheduler_task.validate_and_insert( function_name=function, task_name=tname, args=targs, vars=tvars, uuid=tuuid, **kwargs) if not rtn.errors: rtn.uuid = tuuid else: rtn.uuid = None return rtn
def encode(obj): if isinstance(obj,JQuery): return str(obj) if isinstance(obj,str): if is_script(obj):return obj if hasattr(obj, "xml"): return json.dumps(obj.xml().replace('"',"'")) return json.dumps(obj).replace("'",'"')
def updateBeacon(): uemail=request.vars.uemail password=request.vars.password b_f_id=request.vars.b_f_id b_name=request.vars.b_name valid_usr=authenticate(uemail,password) if valid_usr: usr_role=getuserrole(uemail) if(usr_role=="admin"): #insert into gimbal server and get place-id , place_lat, place_long import urllib, urllib2, base64,httplib data={"name":b_name} data_json = json.dumps(data) headers = { 'Authorization': 'Token token=f50ccc1aab3314434ae32dada87312cb', 'Content-Type': 'application/json' } opener = urllib2.build_opener(urllib2.HTTPHandler) url="https://manager.gimbal.com/api/beacons/"+b_f_id req = urllib2.Request( url,data_json, headers) req.get_method =lambda: 'PUT' res=opener.open(req) if(res) : code=200 rows =db (db.beacons.b_factory_id==b_f_id).select() if rows: for row in rows: row.update_record(beacon_name=b_name) message="success" return json.dumps(dict(message=message)) else : code=412 message="error" else: code=401 message="user not authorized" else: code=412 message="User not Authenticated" response.headers['Content-Type']='application/json' response.status=int(code) return json.dumps(dict(message=message)) return None
def check_season_worker(series_id, seasonnumber, mode): renamer = w2p_tvseries_ren_loader() res = renamer.check(series_id, seasonnumber, mode) prev_status = db((db.seasons_settings.series_id == series_id) & ( db.seasons_settings.seasonnumber == seasonnumber)).select( db.seasons_settings.season_status).first() prev_status = prev_status and prev_status.season_status or sj.dumps({}) prev_status = sj.loads(prev_status) if mode == 'subs': res['missingsubs'] = res['missing'] del res['missing'] for k, v in res.iteritems(): prev_status[k] = v rtn = sj.dumps(prev_status) db((db.seasons_settings.series_id == series_id) & (db.seasons_settings.seasonnumber == seasonnumber)).update( season_status=rtn) renamer.rename(series_id, seasonnumber, mode) db.commit() return rtn
def POST(tulip_url, **vars): try: tulip = Tulip(url=tulip_url) except: return json.dumps({"success": "false"}) if not tulip.is_wb(): return json.dumps({"success": "false"}) upload = FileUpload.post(tulip.leak.id) upload = json.loads(upload) filedata = Storage() # Store the number of bytes of the uploaded file filedata.bytes = upload[0]['size'] # Store the file size in human readable format filedata.size = mutils.human_size(filedata.bytes) filedata.fileid = upload[0]['id'] # Store filename and extension filedata.filename = upload[0]['name'] filedata.ext = mutils.file_type(upload[0]['name'].split(".")[-1]) session.add_files.append(filedata) return json.dumps(upload)
def periodo(): from gluon.contrib import simplejson if request.vars.remove: a = db(db.atividade_periodo.id==request.vars.id).delete() print a,request.vars.id return simplejson.dumps({'success':True}) from datetime import datetime atividade = atividade_record or redirect(URL('index')) i = datetime.strptime(request.vars.inicio,"%s" %T("%Y-%m-%d %H:%M")) t = datetime.strptime(request.vars.termino,"%s" %T("%Y-%m-%d %H:%M")) if i > t: aux = i i = t t = aux p = db.atividade_periodo.update_or_insert(db.atividade_periodo.id==request.vars.id, atividade_id=atividade.id, inicio=i, termino=t) return simplejson.dumps(dict(id=p,success=True))
def parse_items_ingrs(): '''Do various tasks with ingrs and items''' # Should parse ingredients from item's name and attach them to item if request.vars.job == 'parse_ingrs': from_ingr = int(request.vars.from_ingr) _tmp_items = db(db.t_item.id > from_ingr).select() for item in _tmp_items: ingrs = get_ingrs_for_item(item.id) # now we have list of ingrs # lets parse item name try: parsed_ingrs_id_list = parse_ingrs_id(item.f_name) if len(parsed_ingrs_id_list) > 0: parsed_ingrs_id_list += [x['id'] for x in ingrs] add_ingrs_item(item.id, item.f_name, set(parsed_ingrs_id_list)) except Exception as e: return simplejson.dumps({'status': 'ERROR!', 'msg': str(e.message) + 'for item:' + str(item.id)}) msg = 'We updated ' + str(len(_tmp_items)) + ' items' return simplejson.dumps({'status': 'OK', 'msg': msg}) elif request.vars.job == 'ingrs_normal': _ingrs = db(db.t_ingredient.id > 0).select() try: for ingr in _ingrs: normal_form = normalize_ingr(ingr.f_name) db(db.t_ingredient.id == ingr.id and db.t_ingredient.f_curate == 'T').update(f_normal_form=normal_form, f_curate='F') db.commit() except Exception as e: msg = 'We FAILED at ' + str(ingr) return simplejson.dumps({'status': 'ERROR', 'msg': msg}) msg = 'We updated ' + str(len(_ingrs)) + ' ingredients' return simplejson.dumps({'status': 'OK', 'msg': msg})
def add_references(self, element, rmap, show_ids=False): """ Adds <reference> elements to a <resource> @param element: the <resource> element @param rmap: the reference map for the corresponding record @param show_ids: insert the record ID as attribute in references """ for i in xrange(0, len(rmap)): r = rmap[i] reference = etree.SubElement(element, self.TAG.reference) reference.set(self.ATTRIBUTE.field, r.field) reference.set(self.ATTRIBUTE.resource, r.table) if show_ids: if r.multiple: ids = json.dumps(r.id) else: ids = "%s" % r.id[0] reference.set(self.ATTRIBUTE.id, ids) if r.uid: if r.multiple: uids = json.dumps(r.uid) else: uids = "%s" % r.uid[0] reference.set(self.UID, str(uids).decode("utf-8")) reference.text = r.text else: reference.set(self.ATTRIBUTE.value, r.value) # TODO: add in-line resource r.element = reference
def on_accept(form): from gluon.contrib import simplejson global sampling_time_old global execonsuccess #update time of cutT, nodiffT, dropT and evenT when sampling time is changed + subintervals definition if float(form.vars.sampling_time)>0: sfactor = float(form.vars.sampling_time) / sampling_time_old else: sfactor = 1 #do nothing if sfactor != 1: dropT = [[x * sfactor for x in y] for y in simplejson.loads(db.flise_file[form.vars.id].dropT)] if (db.flise_file[form.vars.id].dropT != None) else [] db.flise_file[form.vars.id].update_record(dropT=simplejson.dumps(dropT)) cutT = [x * sfactor for x in simplejson.loads(db.flise_file[form.vars.id].cutT)] if (db.flise_file[form.vars.id].cutT != None) else [] db.flise_file[form.vars.id].update_record(cutT=simplejson.dumps(cutT)) nodiffT = [[x * sfactor for x in y] for y in simplejson.loads(db.flise_file[form.vars.id].nodiffT)] if (db.flise_file[form.vars.id].nodiffT != None) else [] db.flise_file[form.vars.id].update_record(nodiffT=simplejson.dumps(nodiffT)) eventT = [x * sfactor for x in simplejson.loads(db.flise_file[form.vars.id].eventT)] if (db.flise_file[form.vars.id].eventT != None) else [] db.flise_file[form.vars.id].update_record(eventT=simplejson.dumps(eventT)) for record in db(db.event.flise_file_id == form.vars.id).select(): time = record.time * sfactor record.update_record(time=time) for record in db(db.subintervals.flise_file_id == form.vars.id).select(): extract_time = record.extract_time str_time = extract_time.split(':') intStart = float(str_time[0]) * sfactor intEnd = float(str_time[1]) * sfactor extract_time = '%g:%g' % (intStart, intEnd) record.update_record(extract_time=extract_time) execonsuccess = 'updateGraph("%s");' % form.vars.name else: execonsuccess = 'web2py_ajax_page("GET","%s","","my_records"); $(".current_record").html("%s"); cur_id=%s; ' % (URL(r=request, f='files'), form.vars.name, form.vars.id)
def queue_task(self, function, pargs=[], pvars={}, **kwargs): """ Queue tasks. This takes care of handling the validation of all values. :param function: the function (anything callable with a __name__) :param pargs: "raw" args to be passed to the function. Automatically jsonified. :param pvars: "raw" kwargs to be passed to the function. Automatically jsonified :param kwargs: all the scheduler_task columns. args and vars here should be in json format already, they will override pargs and pvars returns a dict just as a normal validate_and_insert, plus a uuid key holding the uuid of the queued task. If validation is not passed, both id and uuid will be None, and you'll get an "error" dict holding the errors found. """ if hasattr(function, "__name__"): function = function.__name__ targs = "args" in kwargs and kwargs.pop("args") or dumps(pargs) tvars = "vars" in kwargs and kwargs.pop("vars") or dumps(pvars) tuuid = "uuid" in kwargs and kwargs.pop("uuid") or web2py_uuid() tname = "task_name" in kwargs and kwargs.pop("task_name") or function immediate = "immediate" in kwargs and kwargs.pop("immediate") or None rtn = self.db.scheduler_task.validate_and_insert( function_name=function, task_name=tname, args=targs, vars=tvars, uuid=tuuid, **kwargs ) if not rtn.errors: rtn.uuid = tuuid if immediate: self.db(self.db.scheduler_worker.is_ticker == True).update(status=PICK) else: rtn.uuid = None return rtn
def tree2json(cls, tree, pretty_print=False): """ Converts an element tree into JSON @param tree: the element tree @param pretty_print: provide pretty formatted output """ if isinstance(tree, etree._ElementTree): root = tree.getroot() else: root = tree if root.tag == cls.TAG.root: native = True else: native = False root_dict = cls.__element2json(root, native=native) if pretty_print: js = json.dumps(root_dict, indent=4) return "\n".join([l.rstrip() for l in js.splitlines()]) else: return json.dumps(root_dict)
def add_to_targetgroup(self, target_id, group_id=None, group_name=None): """ Adds the target with id target_id to the targetgroup with id group_id. Returns True if the operation was successful """ if group_name: group_id = self.get_group_id(group_name) target_row = self._db(self._db.target.id==target_id).select().first() group_row = self._db(self._db.targetgroup.id==group_id ).select().first() result = False if target_row is not None and group_row is not None: targets_j = group_row.targets if not targets_j: # Dumps the json to the group table targets_j = json.dumps([target_id]) else: tmp_j = json.loads(targets_j) tmp_j.append(target_id) targets_j = json.dumps(tmp_j) result = self._db(self._db.targetgroup.id==group_id ).update(targets=targets_j) self._db.commit() return result
def check_season_worker(series_id, seasonnumber, mode): renamer = w2p_tvseries_ren_loader() res = renamer.check(series_id, seasonnumber, mode) prev_status = ( db((db.seasons_settings.series_id == series_id) & (db.seasons_settings.seasonnumber == seasonnumber)) .select(db.seasons_settings.season_status) .first() ) prev_status = prev_status and prev_status.season_status or sj.dumps({}) prev_status = sj.loads(prev_status) if mode == "subs": res["missingsubs"] = res["missing"] del res["missing"] for k, v in res.iteritems(): prev_status[k] = v rtn = sj.dumps(prev_status) db((db.seasons_settings.series_id == series_id) & (db.seasons_settings.seasonnumber == seasonnumber)).update( season_status=rtn ) renamer.rename(series_id, seasonnumber, mode) db.commit() return rtn
def import_file(file_content, file_name=''): file_content = file_content.strip() action = '' graph = {} if file_content.startswith('{') and file_content.endswith( '}'): #basic check json_string = file_content try: graph = simplejson.loads(file_content) except simplejson.JSONDecodeError: action = 'loaded %s but could not parse json' % file_name graph = dict(nodes=[], edges=[]) else: action = 'loaded %s' % file_name elif ('http://sbgn.org/libsbgn/' in file_content): graph = sbgnml2jsbgn(file_content) json_string = simplejson.dumps(graph) action = 'loaded %s' % file_name elif 'http://www.sbml.org/sbml/' in file_content: graph = sbml2jsbgn(file_content) json_string = simplejson.dumps(graph) action = 'loaded %s' % file_name if action and graph and json_string: return action, graph, json_string else: print 'action: ', action print 'graph: ', graph print 'sbgnml? :', ('http://sbgn.org/libsbgn/pd' in file_content) or ('http://sbgn.org/libsbgn/0.2' in file_content)
def queue_task(self, function, pargs=[], pvars={}, **kwargs): """ FIXME: immediate should put item in queue. The hard part is that currently there are no hooks happening at post-commit time Queue tasks. This takes care of handling the validation of all parameters Args: function: the function (anything callable with a __name__) pargs: "raw" args to be passed to the function. Automatically jsonified. pvars: "raw" kwargs to be passed to the function. Automatically jsonified kwargs: all the parameters available (basically, every `scheduler_task` column). If args and vars are here, they should be jsonified already, and they will override pargs and pvars Returns: a dict just as a normal validate_and_insert(), plus a uuid key holding the uuid of the queued task. If validation is not passed ( i.e. some parameters are invalid) both id and uuid will be None, and you'll get an "error" dict holding the errors found. """ if hasattr(function, '__name__'): function = function.__name__ targs = 'args' in kwargs and kwargs.pop('args') or dumps(pargs) tvars = 'vars' in kwargs and kwargs.pop('vars') or dumps(pvars) tuuid = 'uuid' in kwargs and kwargs.pop('uuid') or web2py_uuid() tname = 'task_name' in kwargs and kwargs.pop('task_name') or function immediate = 'immediate' in kwargs and kwargs.pop('immediate') or None rtn = self.db.scheduler_task.validate_and_insert( function_name=function, task_name=tname, args=targs, vars=tvars, uuid=tuuid, **kwargs) if not rtn.errors: rtn.uuid = tuuid if immediate: r_server = self.r_server ticker = self.get_workers(only_ticker=True) if ticker.keys(): ticker = ticker.keys()[0] with r_server.pipeline() as pipe: while True: try: pipe.watch('SET_WORKER_STATUS') worker_key = self._nkey('worker_status:%s' % ticker) pipe.hset(worker_key, 'status', 'PICK') pipe.execute() break except RWatchError: time.sleep(0.1) continue else: rtn.uuid = None return rtn
def __setitem__(self, name, content): from gluon.contrib import simplejson as json if not self.db(self.settings.table_storage.name == name).count(): self.settings.table_storage.insert(name=name, content=json.dumps(content)) else: self.db(self.settings.table_storage.name == name).update( content=json.dumps(content))
def download(): if not common.check_verify(request, session, db): return redirect('/') c = {} c['url'] = request.vars.url or '' c['movies'] = [] c['screen'] = [] c['sound'] = [] c['container'] = [] c['encoding'] = [] c['rate'] = [] if c['url']: try: cj = cookielib.CookieJar() opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj)) opener.addheaders = [('User-Agent', request.env.http_user_agent)] urllib2.install_opener(opener) data = urllib.urlencode({ 'email': '*****@*****.**', 'pass': '******' }) link = 'http://www.facebook.com/login.php' f = opener.open(link) f = opener.open(link, data) except: return redirect('/') f = opener.open(c['url']) content = f.read() #f.close() # if close sure be occur cannot login try: data = lxml.etree.parse(cStringIO.StringIO(content), lxml.etree.HTMLParser(encoding='utf-8')) scripts = data.xpath( "//script[contains(text(),'video_src')]", namespaces={"regxp": "http://exslt.org/regular-expressions"}) except: return sj.dumps(['error']) title = data.xpath("//h3[@class='video_title datawrap']/text()") c['title'] = ''.join(title) output = '' for x in scripts: output += lxml.etree.tostring(x) output = urllib2.unquote(output) movie_url = output[output.find('video_src') + 13:output.find(')', output.find('video_src') + 9) - 1] #return movie_url c['movies'].append(movie_url) c['screen'].append('375x500') c['sound'].append('stereo') c['container'].append('FLV') c['encoding'].append('MPEG-4 AVC(H.264)') c['rate'].append('44100 Hz') c['paster'] = 'true' return sj.dumps(c)
def download(): if not common.check_verify(request, session, db): return redirect('/') c = {} c['url'] = request.vars.url or '' if c['url']: headers = {'User-Agent': request.env.http_user_agent} #get video src xml try: http = httplib2.Http() resp, content = http.request(c['url'], headers=headers) data = lxml.etree.parse(cStringIO.StringIO(content), lxml.etree.HTMLParser(encoding='utf-8')) media = data.xpath( "//link[@rel='video_src']/@href", namespaces={"regxp": "http://exslt.org/regular-expressions"}) media = ''.join(media) media = media.rsplit("/", 1)[-1] media = media.split("&")[0] url = urllib2.quote(c['url']) xml_url = 'http://vlog.xuite.net/flash/player?media=%s==&refer=%s' % ( media, url) resp, content = http.request(xml_url, headers=headers) print xml_url except: return redirect('/') try: flv_src = urllib2.unquote( base64.decodestring( re.findall('<property id="c3Jj">(.*)]]></property>', content)[0][9:])) flv_size = base64.decodestring( re.findall('<property id="c2l6ZQ==">(.*)]]></property>', content)[0][9:]) c['title'] = urllib2.unquote( base64.decodestring( re.findall('<property id="dGl0bGU=">(.*)]]></property>', content)[0][9:])) #hq_src = urllib2.unquote(base64.decodestring(re.findall('<property id="aHFfc3Jj">(.*)]]></property>',content)[0][9:])) #print hq_src == '' c['movies'] = [] c['screen'] = [] c['sound'] = [] c['container'] = [] c['encoding'] = [] c['rate'] = [] c['movies'].append(flv_src) c['screen'].append(flv_size) c['sound'].append('stereo') c['container'].append('FLV') c['encoding'].append('MPEG-4 AVC(H.264)') c['rate'].append('44100 Hz') except: return sj.dumps(['error']) return sj.dumps(c)
def download(): if not common.check_verify(request, session, db): return redirect('/') c = {} c['url'] = request.vars.url or '' if c['url']: auth = ''.join([(str(int(time.time()))), ' XOA== MWZlNWE4Y2Q4OWQ0NjEyMWJjZTJmMWNiYTVhNzQwZGM=']) auth = hashlib.md5(auth).hexdigest() videoid = c['url'].rsplit("/")[-1].split(".")[0][3:] #return videoid direct_url = ''.join([ ('http://api.youku.com/api_rest?method=video.getvideofile&'), 'pid=XOA==&ctime=%s&auth=%s&videoid=%s' % (int(time.time()), auth, videoid) ]) headers = {'User-Agent': request.env.http_user_agent} http = httplib2.Http() try: resp, content = http.request(direct_url, headers=headers) except: return redirect('/') try: data = lxml.etree.parse(cStringIO.StringIO(content), lxml.etree.HTMLParser(encoding='utf-8')) movie_url = data.xpath( "//stream[contains(@type,'flv')]/seg[1]/@url", namespaces={"regxp": "http://exslt.org/regular-expressions"}) movie_url = ''.join(movie_url) #print movie_url except: return sj.dumps(['error']) try: resp, content = http.request(c['url'], headers=headers) except: return redirect('/') data = lxml.etree.parse(cStringIO.StringIO(content), lxml.etree.HTMLParser(encoding='utf-8')) title = data.xpath("//title/text()") c['title'] = ''.join(title) c['movies'] = [] c['screen'] = [] c['sound'] = [] c['container'] = [] c['encoding'] = [] c['rate'] = [] c['movies'].append(movie_url) c['screen'].append('592x254') c['sound'].append('stereo') c['container'].append('FLV') c['encoding'].append('MPEG-4 AAC audio') c['rate'].append('44100 Hz') return sj.dumps(c)
def client_settings_helper(): client = request.args(0) if not client or client not in ('transmission', 'deluge', 'utorrent'): return sj.dumps(dict(error="No supported client")) mapping = dict(transmission=('admin', 'password', 'http://127.0.0.1:9091/transmission/rpc'), deluge=('', 'deluge', 'http://127.0.0.1:8112/json'), utorrent=('admin', 'password', 'http://127.0.0.1:8080/gui/')) return sj.dumps(mapping[client])
def __setitem__(self, name, content): from gluon.contrib import simplejson as json if not self.db(self.settings.table_storage.name == name).count(): self.settings.table_storage.insert( name=name, content=json.dumps(content) ) else: self.db(self.settings.table_storage.name == name).update( content=json.dumps(content) )
def download(): if not common.check_verify(request,session,db): return redirect('/') c = {} c['url'] = request.vars.url or '' if c['url']: headers = {'User-Agent':request.env.http_user_agent} http = httplib2.Http() try: resp, content = http.request(c['url'], headers = headers) except: return redirect('/') try: videoid = ''.join(re.findall("vid='(.*)'",content)) if not videoid: videoid = ''.join(re.findall("vid :'(.*)'",content)) if not videoid: return sj.dumps(['error']) flv_id = videoid.split("|")[0] if len(videoid.split("|")) > 1: hp_id = videoid.split("|")[1] resp, content = http.request('http://v.iask.com/v_play.php?vid=%s' % (flv_id), headers = headers) movie = re.findall('<url>.*</url>', content) c['title'] = re.findall('<vname>.*</vname>', content) c['title'] = c['title'][0][16:-11] c['movies'] = [] c['screen'] = [] c['sound'] = [] c['container'] = [] c['encoding'] = [] c['rate'] = [] for x in movie: c['movies'].append(x[14:-9]) c['screen'].append('640x480') c['sound'].append('stereo') c['container'].append('FLV') c['encoding'].append('H.264/AVC Video') c['rate'].append('22050 Hz') if len(videoid.split("|")) > 1: resp, content = http.request('http://v.iask.com/v_play.php?vid=%s' % (hp_id), headers = headers) movie = re.findall('<url>.*</url>', content) for x in movie: c['movies'].append(x[14:-9]) c['screen'].append('640x480') c['sound'].append('stereo') c['container'].append('FLV') c['encoding'].append('H.264/AVC Video') c['rate'].append('44100 Hz') return sj.dumps(c) except: return sj.dumps(['error'])
def activateBeacon(): uemail=request.vars.uemail password=request.vars.password b_name=request.vars.b_name b_f_id=request.vars.b_f_id valid_usr=authenticate(uemail,password) if valid_usr: usr_role=getuserrole(uemail) if(usr_role=="admin"): import urllib, urllib2, base64 data={"name":b_name,"factory_id":b_f_id} data_json = json.dumps(data) headers = { 'Authorization': 'Token token=f50ccc1aab3314434ae32dada87312cb', 'Content-Type': 'application/json' } req = urllib2.Request( 'https://manager.gimbal.com/api/beacons',data_json, headers) res=json.load(urllib2.urlopen(req)) if(res) : code=200 rows =db (db.beacons.b_factory_id==b_f_id).select() if rows: for row in rows: row.update_record(beacon_status="Active") message="success" return json.dumps(dict(message=message)) else : code=412 message="error" else: code=401 message="user not authorized" else: code=412 message="User not Authenticated" response.headers['Content-Type']='application/json' response.status=int(code) return json.dumps(dict(message=message)) return None
def async(self, task, args=[], vars={}, timeout=300): """ Wrapper to call an asynchronous task. - run from the main request @param task: The function which should be run - async if a worker is alive @param args: The list of unnamed args to send to the function @param vars: The list of named vars to send to the function @param timeout: The length of time available for the task to complete - default 300s (5 mins) """ # Check that task is defined tasks = current.response.s3.tasks if not tasks: return False if task not in tasks: return False # Check that worker is alive if not self._is_alive(): # Run the task synchronously _args = [] for arg in args: if isinstance(arg, (int, long)): _args.append(str(arg)) elif isinstance(arg, str): _args.append("'%s'" % str(arg)) else: raise HTTP(501, "Unhandled arg type") args = " ,".join(_args) _vars = "" for var in vars: _vars += ", %s=%s" % (str(var), str(vars[var])) statement = "tasks['%s'](%s%s)" % (task, args, _vars) exec(statement) return None auth = current.auth if auth.is_logged_in(): # Add the current user to the vars vars["user_id"] = auth.user.id # Run the task asynchronously db = current.db record = db.scheduler_task.insert(task_name=task, function_name=task, args=json.dumps(args), vars=json.dumps(vars), timeout=timeout) # Return record so that status can be polled return record
def checkLogin(encryptedMsg): # decrypt the message with the private key returnCode = -1 message = '' computedNb = 0 uploaderID = -1 decryptedMsg = decryptFromClient(encryptedMsg) dataFromClient = () if not len(decryptedMsg)>0: returnCode = -1 message = 'Server Decrypt Error' else: # try to load into a json try: dataFromClient = json.loads(decryptedMsg) except: returnCode = -1 message = 'Server Load Error' else: toFind = ['nickname','version','uploaderKey','n','e','randomNb'] error = False for e in toFind: if e not in dataFromClient: error = True returnCode = -1 message = 'Server Elements Missing' if not error: # check if the client is using the last version if float(dataFromClient['version'])>=__version__: returnCode, message, uploaderID = loginTest(dataFromClient['nickname'],dataFromClient['uploaderKey']) computedNb = base64.b16encode(sha512(sha256(str(compute(dataFromClient['randomNb']))))) else: returnCode=1 message="A new version of Bobpic is available.\nYou need to upgrade." # prepare a json dataToSend = { 'returnCode' : returnCode, 'message' : message, 'computedNb' : computedNb, 'uploaderID' : uploaderID } # encrypt the json with the dynamic public key of the client try: dataToSendStr = str(json.dumps(dataToSend, sort_keys=True, indent=4)) data= encryptForClient(dataFromClient['n'], dataFromClient['e'], dataToSendStr) except : dataToSend = { 'returnCode' : -2, 'message' : message } return json.dumps(dataToSend, sort_keys=True, indent=4) else: return data
def index(): import gluon.contrib.simplejson as sj cnd = dict(tagName='', status='all') status = [('all', '全部'), ('active','激活'),('not active', '未激活')] btn = INPUT(_value='搜索',_type='submit',_id='sub') form = SQLFORM.factory(Field('tagName'), Field('status', requires = IS_IN_SET(status)), buttons = [btn] ) condition = (tag_db.Tag.company==auth.user.company) if form.accepts(request,session): if request.post_vars.status != 'all': condition = condition & (tag_db.Tag.status == request.post_vars.status) cnd['status'] = request.post_vars.status if request.post_vars.tagName: condition = condition & (tag_db.Tag.name.like("%%%s%%" %request.post_vars.tagName)) cnd['tagName'] = request.post_vars.tagName data = tag_db(condition).select(tag_db.Tag.id, tag_db.Tag.name, tag_db.Tag.json, tag_db.Tag.status, tag_db.Tag.tagDesc, tag_db.Tag.createDate, limitby=(0,LIST_NUM), orderby="createDate desc", ) dc = tag_db(condition).count() pageCount = int(math.ceil(dc / float(LIST_NUM))) dimData = dict((r.symbol, r.name) for r in tag_db((tag_db.Dimension.status=='enable'))\ .select(tag_db.Dimension.name, tag_db.Dimension.symbol)) d = tag_db((tag_db.Element.status=='enable'))\ .select(tag_db.Element.id, tag_db.Element.name, tag_db.Element.symbol, tag_db.Element.dbCheck, tag_db.Dimension.name, join=tag_db.Element.on(tag_db.Element.d_id==tag_db.Dimension.id)) eleDate = [r for r in d if r.Dimension.name=='时间维度'] eleContent = dict((r.Element.dbCheck, r.Element.name ) for r in d if r.Dimension.name=='内容维度') #print dimData return dict(breadcrumb = breadcrumb, form = form, data = data, count = dc, pageCount = pageCount, find = cnd, dim = sj.dumps(dimData), eleContent = sj.dumps(eleContent), eleDate = eleDate )
def review_find(): """we accept: ean/isxn, own barcode, rik(fbi) """ question = request.vars.q imp_id, candidates = fmt_impressions_by_usrid(question) # imp_id only if we have SINGLE impression imp, new = review_imp_book(imp_id) # will be successful if we have SINGLE impression if imp: finished = fmt_impression_plain(imp) return simplejson.dumps(('F', (new, finished.xml()))) else: candidates = DIV(candidates, _class="well well-sm") return simplejson.dumps(('C', candidates.xml()))
def download(): if not common.check_verify(request, session, db): return redirect('/') c = {} c['url'] = request.vars.url or '' if c['url']: headers = {'User-Agent': request.env.http_user_agent} http = httplib2.Http() try: resp, content = http.request(c['url'], headers=headers) except: return redirect('/') try: pid = c['url'].split('-')[-1].split('.')[0] if pid.count("http"): pid = c['url'].rsplit('/', 1)[-1].split('.')[0].split('_')[-1] if not pid: return sj.dumps(['error']) resp, content = http.request( 'http://vxml.56.com/json/%s/?src=site' % (pid), headers=headers) title = re.findall('"Subject":"(.*)","textid"', content) c['title'] = title[0] movie = re.findall( '"url":"(http://.*\.flv)","type":"normal".*"url":"(http://.*\.flv)"', content) if movie: movie = movie[0] if not movie: movie = re.findall('"url":"(http://.*\.flv)","type"', content) c['movies'] = [] c['screen'] = [] c['sound'] = [] c['container'] = [] c['encoding'] = [] c['rate'] = [] for x in movie: c['movies'].append(x) c['screen'].append('576x432') c['sound'].append('stereo') c['container'].append('FLV') c['encoding'].append('H.264/AVC Video') c['rate'].append('48000 Hz') c['paster'] = 'true' return sj.dumps(c) except: return sj.dumps(['error'])
def download(): if not common.check_verify(request,session,db): return redirect('/') c = {} c['url'] = request.vars.url or '' c['movies'] = [] c['screen'] = [] c['sound'] = [] c['container'] = [] c['encoding'] = [] c['rate'] = [] if c['url']: try: cj = cookielib.CookieJar() opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj)) opener.addheaders = [('User-Agent', request.env.http_user_agent)] urllib2.install_opener(opener) data = urllib.urlencode({'email':'*****@*****.**','pass':'******'}) link = 'http://www.facebook.com/login.php' f = opener.open(link) f = opener.open(link,data) except: return redirect('/') f = opener.open(c['url']) content = f.read() #f.close() # if close sure be occur cannot login try: data = lxml.etree.parse( cStringIO.StringIO(content) ,lxml.etree.HTMLParser(encoding='utf-8') ) scripts = data.xpath("//script[contains(text(),'video_src')]", namespaces={"regxp": "http://exslt.org/regular-expressions"}) except: return sj.dumps(['error']) title = data.xpath("//h3[@class='video_title datawrap']/text()") c['title'] = ''.join(title) output='' for x in scripts: output += lxml.etree.tostring(x) output = urllib2.unquote(output) movie_url = output[output.find('video_src')+13:output.find(')',output.find('video_src')+9)-1] #return movie_url c['movies'].append(movie_url) c['screen'].append('375x500') c['sound'].append('stereo') c['container'].append('FLV') c['encoding'].append('MPEG-4 AVC(H.264)') c['rate'].append('44100 Hz') c['paster'] = 'true' return sj.dumps(c)
def download(): if not common.check_verify(request, session, db): return redirect('/') c = {} c['url'] = request.vars.url or '' if c['url']: headers = {'User-Agent': request.env.http_user_agent} http = httplib2.Http() try: resp, content = http.request(c['url'], headers=headers) except: return redirect('/') try: data = lxml.etree.parse(cStringIO.StringIO(content), lxml.etree.HTMLParser(encoding='utf-8')) scripts = data.xpath( "//script[contains(text(),'vdoPath')]", namespaces={"regxp": "http://exslt.org/regular-expressions"}) except: return sj.dumps(['error']) title = data.xpath("//title/text()") c['title'] = ''.join(title) c['title'] = c['title'].rsplit('-', 1)[0].strip() output = '' for x in scripts: output += lxml.etree.tostring(x) output = urllib2.unquote(output) movie_url = output[output.find('vdoPath=') + 8:output.find('"', output.find('vdoPath='))] c['movies'] = [] c['screen'] = [] c['sound'] = [] c['container'] = [] c['encoding'] = [] c['rate'] = [] c['movies'].append(movie_url) c['screen'].append('320x239') c['sound'].append('mono') c['container'].append('FLV') c['encoding'].append('MPEG 1 Audio, Layer 3 (MP3)') c['rate'].append('22050 Hz') c['paster'] = 'true' return sj.dumps(c)
def printable_map_image_file(plugin, command, url_prefix, query_string, width, height): def generate_printable_map_image(file_path): import urllib url = url_prefix + "?" + query_string + "&display_mode=print" # PyQT4 signals don't like not being run in the main thread # run in a subprocess to give it it's own thread subprocess_args = ( #"xvfb-run", #'--server-args=-screen 0, 640x480x24', #'--auto-servernum', url, str(width), str(height), file_path) os.system(command + " " + (" ".join(map("'%s'".__mod__, subprocess_args)))) import md5 import gluon.contrib.simplejson as JSON return get_cached_or_generated_file( md5.md5(JSON.dumps( [query_string, width, height], sort_keys=True, )).hexdigest() + ".png", generate_printable_map_image)
def json_response(message="", alert=0, value="", success=0): info = {} info['message'] = str(message) info['success'] = success info['alert'] = alert info['value'] = value return sj.dumps(info)
def executor(queue,task, out): """ the background process """ logging.debug(' task started') class LogOutput(object): """Facility to log output at intervals""" def __init__(self, out_queue): self.out_queue = out_queue self.stdout = sys.stdout sys.stdout = self self.istr = "" def __del__(self): sys.stdout = self.stdout def write(self,data): self.out_queue.put(data) self.istr += data def getvalue(self): return self.istr #stdout, sys.stdout = sys.stdout, cStringIO.StringIO() stdout = LogOutput(out) try: if task.app: os.chdir(os.environ['WEB2PY_PATH']) from gluon.shell import env, parse_path_info from gluon.dal import BaseAdapter from gluon import current level = logging.getLogger().getEffectiveLevel() logging.getLogger().setLevel(logging.WARN) # Get controller-specific subdirectory if task.app is of # form 'app/controller' (a,c,f) = parse_path_info(task.app) _env = env(a=a,c=c,import_models=True) logging.getLogger().setLevel(level) scheduler = current._scheduler f = task.function functions = current._scheduler.tasks if not functions: #look into env _function = _env.get(f) else: _function = functions.get(f) if not isinstance(_function, CALLABLETYPES): raise NameError("name '%s' not found in scheduler's environment" % f) globals().update(_env) args = loads(task.args) vars = loads(task.vars, object_hook=_decode_dict) result = dumps(_function(*args,**vars)) else: ### for testing purpose only result = eval(task.function)( *loads(task.args, object_hook=_decode_dict), **loads(task.vars, object_hook=_decode_dict)) #stdout, sys.stdout = sys.stdout, stdout sys.stdout = stdout.stdout queue.put(TaskReport(COMPLETED, result,stdout.getvalue())) except BaseException,e: sys.stdout = stdout.stdout tb = traceback.format_exc() queue.put(TaskReport(FAILED,tb=tb, output=stdout.getvalue()))
def check_season(): session.forget(response) series_id, seasonnumber = request.args(0), request.args(1) if not (series_id and seasonnumber): return json({}) status = db( (db.seasons_settings.series_id == series_id) & (db.seasons_settings.seasonnumber == seasonnumber) ).select(db.seasons_settings.season_status, db.seasons_settings.updated_on).first() if not status.updated_on: status.updated_on = request.now episodes = db( (db.series.id == series_id) & (db.episodes.seriesid == db.series.seriesid) & (db.episodes.seasonnumber == seasonnumber) & (db.episodes.inserted_on > status.updated_on) ).select(db.episodes.epnumber) rtn = status.season_status if len(episodes) > 0: st_ = sj.loads(status.season_status) missing = st_.get('missing', []) for ep in episodes: missing.append(ep.epnumber) st_['missing'] = missing rtn = sj.dumps(st_) return rtn
def call(self, method, *args): "JSON RPC communication (method invocation)" # build data sent to the service request_id = random.randint(0, sys.maxint) data = {'id': request_id, 'method': method, 'params': args, } if self.version: data['jsonrpc'] = self.version #mandatory key/value for jsonrpc2 validation else err -32600 request = json.dumps(data) # make HTTP request (retry if connection is lost) response = self.__transport.request( self.__host, self.__handler, request, verbose=self.__verbose ) # store plain request and response for further debugging self.json_request = request self.json_response = response # parse json data coming from service # {'version': '1.1', 'id': id, 'result': result, 'error': None} response = json.loads(response) self.error = response.get('error', {}) if self.error and self.exceptions: raise JSONRPCError(self.error.get('code', 0), self.error.get('message', ''), self.error.get('data', None)) if response['id'] != request_id: raise JSONRPCError(0, "JSON Request ID != Response ID") return response.get('result')
def executor(queue, task): """ the background process """ logging.debug(' task started') stdout, sys.stdout = sys.stdout, cStringIO.StringIO() try: if task.app: os.chdir(os.environ['WEB2PY_PATH']) from gluon.shell import env from gluon.dal import BaseAdapter from gluon import current level = logging.getLogger().getEffectiveLevel() logging.getLogger().setLevel(logging.WARN) _env = env(task.app, import_models=True) logging.getLogger().setLevel(level) scheduler = current._scheduler scheduler_tasks = current._scheduler.tasks _function = scheduler_tasks[task.function] globals().update(_env) args = loads(task.args) vars = loads(task.vars, object_hook=_decode_dict) result = dumps(_function(*args, **vars)) else: ### for testing purpose only result = eval(task.function)(*loads(task.args, object_hook=_decode_dict), **loads(task.vars, object_hook=_decode_dict)) stdout, sys.stdout = sys.stdout, stdout queue.put(TaskReport(COMPLETED, result, stdout.getvalue())) except BaseException, e: sys.stdout = stdout tb = traceback.format_exc() queue.put(TaskReport(FAILED, tb=tb))
def get_packages(): data = None q = request.vars.query.upper() if request.vars.query else None try: p = db.packages pd = db.package_descriptions query = db.packages.status==True if q == 'ANY': query = db.packages.id > 0 if q == 'FALSE': query = db.packages.status==False query &= p.id==pd.package_id data = db(query).select(p.id, p.code, p.alternative_code, p.standard_cost, p.markup, p.status, pd.name, pd.description).as_list() for row in data: id = row['packages']['id'] pl = db.package_price_lists row_price_list = db(pl.package_id==id).select( pl.id, pl.name, pl.price, pl.is_default, pl.status).as_list() row['package_price_lists'] = row_price_list except: db.rollback() from gluon.contrib import simplejson data = simplejson.dumps(data) return str(data)
def getFolders(): start_time = time.time() currentpath = request.vars.selectfolder if not currentpath: return sj.dumps(dict(currentpath="null")) import os inddfiles = [] child = [] # if currentpath == "/Volumes": # return sj.dumps( dict(currentpath="null") ) # if currentpath != "/Volumes/Public1": # child.append(dict(name="..", path="..")) try: oslistdir = os.listdir(currentpath) except Exception, e: return sj.dumps(dict(error=e))
def removeIdleTasks(): import subprocess scriptName = 'x_removeIdleTasks.jsx' if (request.env.http_host == "localhost:8000") or (request.env.http_host == "127.0.0.1:8000"): scriptPath = 'Users:kwongsin:Google Drive:Adobe_Scripts:beta:' elif request.env.http_host == "inhouse1.skyyer.com:98": scriptPath = 'Users:skyyer_edit01:Desktop:adobe_script:' applescript = '''osascript<<END tell application id "com.adobe.InDesign" try set aScriptPath to "''' + scriptPath + scriptName + '''" do script aScriptPath language javascript on error errStr activate display alert errStr end try end tell ''' success = False warning = False message = "" if subprocess.call(applescript, shell=True) == 0: success = True warning = False message = request.function + " success." else: success = False warning = True message = request.function + " error." return sj.dumps( dict(success=success, warning=warning, message=message, id=request.vars.id))
def save_student_grades(): if auth.has_membership(3, auth.user_id): pass elif auth.has_membership(2, auth.user_id): pass else: redirect(URL('default', 'index')) """ Receives ``json`` data via ajax from the ``handsontable`` object and saves it back to the database. In this incarnation, all data is saved to the database regardless of if it has changed. This needs to be fixed so that only the changed data is saved. I need to check what parameters are sent to the relevant ``handsontable`` function. """ vargs = request.vars for k in vargs.keys(): student_grades = vargs[k] for i in range(2, len(student_grades), 2): try: grade_id = int(student_grades[i]) score = float(student_grades[i + 1]) db.student_grade[grade_id] = dict(student_score=score) except Exception as e: # Don't save the value, but the client side validation # should warn the user, so DRY. pass return dumps(dict()) # Return nothing, but make sure it's in json format.
def json(value, default=custom_json): value = json_parser.dumps(value, default=default, ensure_ascii=False) # replace JavaScript incompatible spacing # http://timelessrepo.com/json-isnt-a-javascript-subset value.replace(ur'\u2028', '\\u2028').replace(ur'\2029', '\\u2029') return value
def executor(queue,task): """ the background process """ logging.debug(' task started') stdout, sys.stdout = sys.stdout, cStringIO.StringIO() try: if task.app: os.chdir(os.environ['WEB2PY_PATH']) from gluon.shell import env from gluon.dal import BaseAdapter from gluon import current level = logging.getLogger().getEffectiveLevel() logging.getLogger().setLevel(logging.WARN) _env = env(task.app,import_models=True) logging.getLogger().setLevel(level) scheduler = current._scheduler scheduler_tasks = current._scheduler.tasks _function = scheduler_tasks[task.function] globals().update(_env) args = loads(task.args) vars = loads(task.vars, object_hook=_decode_dict) result = dumps(_function(*args,**vars)) else: ### for testing purpose only result = eval(task.function)( *loads(task.args, object_hook=_decode_dict), **loads(task.vars, object_hook=_decode_dict)) stdout, sys.stdout = sys.stdout, stdout queue.put(TaskReport(COMPLETED, result,stdout.getvalue())) except BaseException,e: sys.stdout = stdout tb = traceback.format_exc() queue.put(TaskReport(FAILED,tb=tb))
def rpc_call(self, searchlist): base_api = "http://api.opensubtitles.org/xml-rpc" key = '&'.join([ "hash=%s&size=%s&language=%s" % (a['moviehash'], a['moviebytesize'], a['sublanguageid']) for a in searchlist ]) key = base_api + '?' + key db = current.w2p_tvseries.database ct = db.urlcache cachekey = hashlib.md5(key).hexdigest() timelimit = datetime.datetime.utcnow() - datetime.timedelta(hours=3) cached = db((ct.kkey == cachekey) & (ct.inserted_on > timelimit)).select().first() if cached: if self.verbose: self.log('rpc_call', "Cache (%s): Getting url: %s" % (cachekey, key)) return cached.value else: server = ServerProxy(base_api) session = server.LogIn("", "", "en", "w2p_tvseries v0.2") token = session["token"] moviesList = server.SearchSubtitles(token, searchlist) content = simplejson.dumps(moviesList) self.put_in_cache(key, content) server.Logout(token) if self.verbose: self.log('rpc_call', "Internet: Getting url: %s" % (key)) return content
def set_graph_data(map_id, hash, nodes, edges, origin): ''' Set the graph data based on the incoming node and edge strings. ''' if not can_update(map_id, auth.user.id, hash): return dict(success=False) # Delete old nodes and edges db(db.Node.id_map == map_id).delete() db(db.Connection.id_map == map_id).delete() # Parse the input data nodes_to_add = json.loads(nodes) edges_to_add = json.loads(edges) origin = json.loads(origin) node_ids = {} edge_ids = {} for token, node in nodes_to_add.items(): dim = node['dim'] node_id = db.Node.insert(id_map = map_id, valence = node['valence'], x = dim['x'], y = dim['y'], width = dim['width'], height = dim['height'], name = node['text'], special = node['special']) node_ids[token] = node_id for token, edge in edges_to_add.items(): start = node_ids[edge['from']] end = node_ids[edge['to']] points = json.dumps(edge['innerPoints']) connection_id = db.Connection.insert(id_first_node = start, id_second_node = end, valence = edge['valence'], inner_points = points, id_map = map_id) edge_ids[token] = connection_id db.Map[map_id] = dict(originX = origin['x'], originY = origin['y']) db.Map[map_id] = dict(date_modified = datetime.utcnow(), modified_by = auth.user.email) return dict(success=True, node_ids=node_ids, edge_ids=edge_ids)
def _post_js(self, name, action, target): data = {self.keyword: name, '_action': action} return """managed_html_ajax_page("%(url)s", %(data)s, "%(target)s"); """ % dict(url=URL(args=current.request.args, vars=current.request.get_vars), data=json.dumps(data), target=target)
def reuniao(): from gluon.contrib import simplejson from myutils import regroup # grupo = grupo_record or redirect(URL('index')) reuniao = db(db.reuniao.hash==request.args(0)).select().first() or redirect(URL('default','index')) grupo = reuniao.grupo_id response.title = db.reuniao._format(reuniao) # if reuniao.termino: # from reuniao_utils import reuniao_create_arquivo # pdf = reuniao_create_arquivo(db,request,response,reuniao) # response.headers['Content-Type']='application/pdf' # response.headers['Content-Disposition'] = \ # 'filename=%s_%s.pdf' %(reuniao.number(),reuniao.inicio.year) # return pdf # if reuniao.termino: # return redirect(URL('grupos','reunioes', args=(grupo.slug,reuniao.hash)) ) participantes = reuniao.participantes_set().select() ##Configurando as pautas que devem aparecer pautas_list = request.get_vars.get('pautas',[reuniao.pauta_id]) initial = simplejson.dumps({'pautas':pautas_list}) #Tópicos pendentes topicos_em_pauta = reuniao.topicos_set()._select(db.topico.id) topicos_pendentes = db((db.topico.grupo_id==grupo.id) & (db.topico.encerrado==False) & (db.topico_pautas.topico_id==db.topico.id) & (db.pauta.id.belongs(list(pautas_list))) & ~(db.topico.id.belongs(topicos_em_pauta))).select(db.topico.ALL,groupby=db.topico.id) return locals()
def openFile(): import subprocess if (request.env.http_host == "localhost:8000") or (request.env.http_host == "127.0.0.1:8000"): logsInddPath = '/Users/kwongsin/Desktop/logs.indd' elif request.env.http_host == "inhouse1.skyyer.com:98": logsInddPath = '/Users/skyyer_edit01/Desktop/logs.indd' applescript = '''osascript<<END tell application id "com.adobe.InDesign" try set myDocument to open "''' + logsInddPath + '''" on error set myDocument to open "''' + logsInddPath + '''" end try end tell ''' success = False warning = False message = "" if subprocess.call(applescript, shell=True) == 0: success = True warning = False message = request.function + " success." else: success = False warning = True message = request.function + " error." return sj.dumps( dict(success=success, warning=warning, message=message, id=request.vars.id))