def plugin_jqgrid(table,fieldname=None,fieldvalue=None,col_widths={}, _id=None,columns=None,col_width=80,width=700,height=300): """ just do to embed the jqGrid with ajax search capability and pagination {{=plugin_jqgrid(db.tablename)}} - table is the db.tablename - fieldname, fieldvalue are an optional filter (fieldname==fieldvalue) - _id is the "id" of the DIV that contains the jqGrid - columns is a list of columns names to be displayed - cold_width is the width of each column - height is the height of the jqGrid """ from gluon.serializers import json _id = 'jqgrid_%s' % table columns = columns or [x for x in table.fields if table[x].readable] colnames = [x.replace('_',' ').capitalize() for x in columns] colmodel = [{'name':x,'index':x, 'width':col_widths.get(x,col_width), 'sortable':True} \ for x in columns if table[x].readable] callback = URL(r=request,c='plugin_jqgrid',f='data', vars=dict(tablename=table._tablename, columns=','.join(columns), fieldname=fieldname or '', fieldvalue=fieldvalue, )) script=""" jQuery(document).ready(function(){jQuery("#%(id)s").jqGrid({ url:'%(callback)s', datatype: "json", colNames: %(colnames)s,colModel:%(colmodel)s, rowNum:10, rowList:[20,50,100], pager: '#%(id)s_pager', viewrecords: true,height:%(height)s});jQuery("#%(id)s").jqGrid('navGrid','#%(id)s_pager',{search:true,add:false,edit:false,del:false});jQuery("#%(id)s").setGridWidth(%(width)s,false);}); """ % dict(callback=callback,colnames=json(colnames), colmodel=json(colmodel),id=_id,height=height,width=width) return TAG[''](TABLE(_id=_id), DIV(_id=_id+"_pager"), SCRIPT(script))
def cantidad_eventos_area(): query_desde_hasta = (db.reclamo.horario_llamada >= request.args[0]) & ( db.reclamo.horario_llamada <= request.args[1]) areas_eventos_cantidades = [] for area in [1, 2, 3, 4]: eventos = {} for evento in db().select(db.evento.nombre): cantidad = db((db.reclamo.area == area) & (db.reclamo.evento == evento.nombre) & query_desde_hasta).count() eventos[evento.nombre] = cantidad import operator eventos_mayores = sorted(eventos.items(), key=operator.itemgetter(1), reverse=True) lista_eventos = [] lista_cantidad = [] for item in range(5): lista_eventos.append(eventos_mayores[item][0]) lista_cantidad.append(eventos_mayores[item][1]) from gluon.serializers import json areas_eventos_cantidades.append( [str(area), json(lista_eventos), json(lista_cantidad)]) return dict(areas_eventos_cantidades=areas_eventos_cantidades)
def __orderby(self): """ :raises HTTP: http.BAD_REQUEST :rtype: str """ order_field = self.request_vars["orderby"] sort_order = self.request_vars['sort'] or 'ASC' if order_field: order_field = order_field.lower() # todo: Essa é a melhor forma ? if order_field not in self.table.fields: headers = {"InvalidParameters": json(order_field)} raise HTTP( http.BAD_REQUEST, "%s não é um campo válido para ordenação." % order_field, **headers) if sort_order not in self.__sorting_options: headers = {"InvalidParameters": json(sort_order)} raise HTTP(http.BAD_REQUEST, "%s não é uma ordenacão válida." % sort_order, **headers) return "%s %s" % (self.table[order_field], sort_order) elif self.table._primarykey: return self.table._primarykey return self.table.fields[0]
def index(): # Serves a badge SVG file if the badge argument is present if request.args(0) == 'badge': # Return to dashboard if project not found q = db(db.project.project_name == request.vars.id).select()[0] if request.args(0) is None or q is None: redirect('default', 'index') session.flash = T('Project not found') experiment = db(db.experiment.build_id == q.id).select( orderby=~db.experiment.id)[0] # Return to dashboard if not the owner of the project if db(db.build.id == experiment.build_id).select()[0].user_id != auth.user.id: redirect('default', 'index') session.flash = T('Permission denied') # Set the view to the SVG file response.view = 'default/badge.svg' return dict(status=experiment.status) # Fetch and return the project, build, and experiment lists in JSON format otherwise project_list = XML(json(load_projects())) build_list = XML(json(load_builds(request.vars.id))) experiment_list = XML( json(load_experiments(request.vars.id, request.vars.build))) validation_list = XML( json( load_validations(request.vars.id, request.vars.build, request.vars.experiment))) print load_validations(request.vars.id, request.vars.build, request.vars.experiment) return dict(project_list=project_list, build_list=build_list, experiment_list=experiment_list, validation_list=validation_list)
def admin_upload_file(): """ File upload handler for the ajax form of the plugin jquery-file-upload Return the response in JSON required by the plugin """ try: import re from gluon.serializers import json resource_file = request.vars['files[]'] resource_type = re.compile( '^(\s|\.|-|\w|á|é|í|ó|ú|ñ|Á|É|Í|Ó|Ú|Ñ){1,40}\.(pdf|epub|png)$') filename = resource_file.filename if resource_type.match(filename): # Store file id = db.resource.insert(resource=db.resource.resource.store( request.vars['files[]'], request.vars['files[]'].filename), title=request.vars['files[]'].filename, category=request.vars['category'], rtype=request.vars['rtype']) response.flash = CAT(T("I uploaded resource named"), " ", request.vars['files[]'].filename) return response.json(json({True})) else: response.flash = CAT(T("Error filename"), " ", request.vars['files[]'].filename) return response.json(json({False})) except: response.flash = T("Failed uploading file") return response.json(json({False}))
def editarUsuario(): if request.vars: idusuario = request.vars.id nome = request.vars.nome sobrenome = request.vars.sobrenome email = request.vars.email senha = request.vars.senha if request.vars.tipo: tipo = request.vars.tipo else: tipo = 2 from gluon.serializers import json try: db(db.auth_user.id == idusuario).update(first_name=nome, last_name=sobrenome, email=email, password=senha, senha=senha) except Exception as e: error = [{'code': 0, 'error': e}] return XML(json(error)) else: return XML(json([{'code': 1}]))
def op_status(): session.forget(response) st = db2.scheduler_task sw = db2.scheduler_worker operations = db2( (st.task_name.startswith('now_or_never')) | (st.task_name.startswith('spec')) ).select() timelimit = request.now - datetime.timedelta(seconds=10) worker = db2(sw.last_heartbeat > timelimit).select(sw.id).first() if not operations.first(): rtn = dict(status='complete', text='0/0', perc=0, worker=worker) return json(rtn) todo = operations.find(lambda row: (row.times_run == 0 and row.status not in ('RUNNING', 'FAILED'))) todo = len(todo) operations = len(operations) text = "%s/%s" % (operations-todo,operations) perc = "%s%%" % ((operations - todo) * 1.0 / operations * 100) rtn = dict(status='loading', text=text, perc=perc, worker=worker) return json(rtn)
def login(): if request.vars: email = request.vars.email senha = request.vars.senha usuarios = db(db.auth_user.id > 0).select() tam = len(usuarios) login = False usuarioLogin = None for usuario in usuarios: if email == usuario.email: if senha == usuario.senha: login = True usuarioLogin = usuario if login: resultado = [{'resultado': 1, 'dados': [usuarioLogin]}] from gluon.serializers import json return XML(json(resultado)) else: resultado = [{'resultado': 0}] from gluon.serializers import json return XML(json(resultado))
def callback(self): from gluon.serializers import json if 'request_json' in self.request.vars and self.request.vars.request_json == 'True': query = None if 'prefetch' in self.request.vars and self.request.vars.prefetch == 'True': self.limitby = None if not self.query: query = self.fields[0] else: query = self.query if 'remote' in self.request.vars: if not self.query: query = self.fields[0].contains(self.request.vars.remote) else: query = self.query & self.fields[0].contains( self.request.vars.remote) rows = self.db(query).select(orderby=self.orderby, limitby=self.limitby, distinct=self.distinct, *self.fields) if rows: raise HTTP(200, json(rows)) else: raise HTTP(200, json([]))
def plugin_jqgrid(table,fieldname=None,fieldvalue=None,col_widths={}, _id=None,columns=None,col_width=80,width=700,height=300, onselect=""): """ just do to embed the jqGrid with ajax search capability and pagination {{=plugin_jqgrid(db.tablename)}} - table is the db.tablename - fieldname, fieldvalue are an optional filter (fieldname==fieldvalue) - _id is the "id" of the DIV that contains the jqGrid - columns is a list of columns names to be displayed - cold_width is the width of each column - height is the height of the jqGrid """ from gluon.serializers import json _id = 'jqgrid_%s' % table columns = columns or [x for x in table.fields if table[x].readable] colnames = [str(table[x].label) for x in columns] #colnames = [x.replace('_',' ').capitalize() for x in columns] colmodel = [{'name':x,'index':x, 'width':col_widths.get(x,col_width), 'sortable':True} \ for x in columns if table[x].readable] callback = URL(r=request,c='plugin_jqgrid',f='data', vars=dict(tablename=table._tablename, columns=','.join(columns), fieldname=fieldname or '', fieldvalue=fieldvalue, )) script=""" jQuery(document).ready(function(){jQuery("#%(id)s").jqGrid({ url:'%(callback)s', datatype: "json", colNames: %(colnames)s,colModel:%(colmodel)s, rowNum:50, rowList:[20,50,100], pager: '#%(id)s_pager', viewrecords: true,height:%(height)s, onSelectRow: function(id){%(onselect)s}});jQuery("#%(id)s").jqGrid('navGrid','#%(id)s_pager',{search:true,add:false,edit:false,del:false});jQuery("#%(id)s").hideCol("id").setGridWidth(%(width)s,false);}); """ % dict(callback=callback,colnames=json(colnames), colmodel=json(colmodel),id=_id,height=height,width=width, onselect=onselect) return TAG[''](TABLE(_id=_id), DIV(_id=_id+"_pager"), SCRIPT(script))
def callback(self): from gluon.serializers import json if 'request_json' in self.request.vars and self.request.vars.request_json == 'True': query = None if 'prefetch' in self.request.vars and self.request.vars.prefetch == 'True': self.limitby = None if not self.query: query = self.fields[0] else: query = self.query if 'remote' in self.request.vars: if not self.query: query = self.fields[0].contains(self.request.vars.remote) else: query = self.query & self.fields[0].contains(self.request.vars.remote) rows = self.db(query).select( orderby=self.orderby, limitby=self.limitby, distinct=self.distinct, *self.fields ) if rows: raise HTTP(200, json(rows)) else: raise HTTP(200, json([]))
def jqgrid(table, fieldname=None, fieldvalue=None, col_widths='', colnames=None, _id=None, fields='', col_width=80, width=700, height=300): """ ## Embed a jqGrid plugin - ``table`` is the table name - ``fieldname``, ``fieldvalue`` are an optional filter (fieldname==fieldvalue) - ``_id`` is the "id" of the DIV that contains the jqGrid - ``fields`` is a list of columns names to be displayed - ``colnames`` is a list of column headers - ``col_width`` is the width of each column (default) - ``height`` is the height of the jqGrid - ``width`` is the width of the jqGrid """ from gluon.serializers import json _id = 'jqgrid_%s' % table if not fields: fields = [ x.strip() for x in db[table].fields if db[table][x.strip()].readable ] elif isinstance(fields, str): fields = [x.strip() for x in fields.split(',')] else: fields = fields if col_widths: col_widths = [x.strip() for x in col_widths.split(',')] elif not col_widths: col_widths = [col_width for x in fields] if not colnames: colnames = [(db[table][x].label or x) for x in fields] elif isinstance(colnames, str): colnames = [x.strip() for x in colnames] colmodel = [{'name':x,'index':x, 'width':col_widths[i], 'sortable':True} \ for i,x in enumerate(fields)] callback = URL('plugin_wiki', 'jqgrid', vars=dict( tablename=table, columns=','.join(fields), fieldname=fieldname or '', fieldvalue=fieldvalue, )) script = """ jQuery(document).ready(function(){jQuery("#%(id)s").jqGrid({ url:'%(callback)s', datatype: "json", colNames: %(colnames)s,colModel:%(colmodel)s, rowNum:10, rowList:[20,50,100], pager: '#%(id)s_pager', viewrecords: true,height:%(height)s});jQuery("#%(id)s").jqGrid('navGrid','#%(id)s_pager',{search:true,add:false,edit:false,del:false});jQuery("#%(id)s").setGridWidth(%(width)s,false);}); """ % dict(callback=callback, colnames=json(colnames), colmodel=json(colmodel), id=_id, height=height, width=width) return TAG[''](TABLE(_id=_id), DIV(_id=_id + "_pager"), SCRIPT(script))
def persona_ajax(): query = str(request.vars.query) result = db(Persona.fsearch.lower().contains(query.lower())) if not result.isempty(): return json([{'id': r.id, 'value': Persona._format % r.as_dict()} \ for r in result.select()]) else: return json([{'id': '', 'value': '<span style="color:red">Sin resultados</span>'}])
def jqgrid(table,fieldname=None,fieldvalue=None,col_widths='', colnames=None,_id=None,fields='', col_width=80,width=700,height=300,dbname='db'): """ ## Embed a jqGrid plugin - ``table`` is the table name - ``fieldname``, ``fieldvalue`` are an optional filter (fieldname==fieldvalue) - ``_id`` is the "id" of the TABLE that contains the jqGrid - ``fields`` is a list of columns names to be displayed - ``colnames`` is a list of column headers - ``col_width`` is the width of each column (default) - ``height`` is the height of the jqGrid - ``width`` is the width of the jqGrid """ from gluon.serializers import json _id = _id or 'jqgrid_%s' % table db = globals()[dbname] if not fields: fields = [x.strip() for x in db[table].fields if db[table][x.strip()].readable] elif isinstance(fields,str): fields = [x.strip() for x in fields.split(',')] else: fields = fields if col_widths: if isinstance(col_widths,(list,tuple)): col_widths = [str(x) for x in col_widths] else: col_widths = [x.strip() for x in col_widths.split(',')] if width=='auto': width=sum([int(x) for x in col_widths]) elif not col_widths: col_widths = [col_width for x in fields] if isinstance(colnames,str): colnames = [x.strip() for x in colnames.split(',')] else: colnames = [(db[table][x].label or x) for x in fields] colmodel = [{'name':x,'index':x, 'width':col_widths[i], 'sortable':True}\ for i,x in enumerate(fields)] callback = URL('plugin_wiki','jqgrid', vars=dict(dbname=dbname, tablename=table, columns=','.join(fields), fieldname=fieldname or '', fieldvalue=fieldvalue, ), hmac_key=auth.settings.hmac_key, salt=auth.user_id ) script=""" jQuery(document).ready(function(){jQuery("#%(id)s").jqGrid({ url:'%(callback)s', datatype: "json", colNames: %(colnames)s,colModel:%(colmodel)s, rowNum:10, rowList:[20,50,100], pager: '#%(id)s_pager', viewrecords: true,height:%(height)s});jQuery("#%(id)s").jqGrid('navGrid','#%(id)s_pager',{search:true,add:false,edit:false,del:false});jQuery("#%(id)s").setGridWidth(%(width)s,false);jQuery('select.ui-pg-selbox,input.ui-pg-input').css('width','50px');}); """ % dict(callback=callback,colnames=json(colnames), colmodel=json(colmodel),id=_id,height=height,width=width) return TAG[''](TABLE(_id=_id), DIV(_id=_id+"_pager"), SCRIPT(script))
def _sync(dataset, params, procedure): try: response_dataset = result = procedure.perform_work(dataset) if params['fields']: response_dataset = {k: v for k, v in result.iteritems() if k in params['fields']} raise HTTP(http.CREATED, json(response_dataset)) except ProcedureException as e: headers = {'error': e.cause} raise HTTP(http.INTERNAL_SERVER_ERROR, json(dataset), **headers)
def _grading(userId, course, exercise): response = dict() enrollment = db((db.enrollment.course == course) & (db.enrollment.student == userId)).select() # no enrollment found if len(enrollment) < 1: response['error'] = T('You are not enrolled to the course you tried to get grades for') raise HTTP(422, XML(json(response))) exerciseAssignment = db((db.course_exercise.exercise == exercise) & (db.course_exercise.course == course)).select() # no enrollment found if len(exerciseAssignment) < 1: response['error'] = T('There is no such exercise in this course') raise HTTP(422, XML(json(response))) exerciseAssignment = exerciseAssignment.first() grades = db((db.grading.enrollment == enrollment.first()) & (db.grading.exercise == exerciseAssignment.id)).select() # no grades found if len(grades) < 1: response['error'] = T('It seems like there is no grading for this exercise') raise HTTP(422, XML(json(response))) # in an exercise is assigned multiple times response['grades'] = [] for grade in grades: # get the point groups currentGrading = dict() pointGroups = db((db.points_grading.grading == grade.id)).select() currentGrading['overallPoints'] = 0 currentGrading['pointGroups'] = [] # iterate over them and build processable dicts for pointGroup in pointGroups: # fetch the referenced point group to gather data referencedPointGroup = db((db.points.id == pointGroup.points)).select().first() currentGrading['pointGroups'].append(dict(number=referencedPointGroup.number_of_points, passed=pointGroup.succeeded)) # sum up the results if pointGroup.succeeded: currentGrading['overallPoints'] += referencedPointGroup.number_of_points # put the grading into the array response['grades'].append(currentGrading) return response
def submit(): project=request.vars.project course=request.vars.course main=request.vars.execute buildId = runsystem.generateBuildId(BUILD_ID_LENGTH) if len(course) == 0: raise HTTP(422, XML(json(dict(error=T('We can\'t do anything for you until you specify a course'))))) try: runsystem.invokeBuild(mode='submit', buildId=buildId, project=project, course=course, main=main, userId=auth.user_id) except Exception, e: raise HTTP(500, XML(json(dict(error=T('We got an error while trying to build the project: ') + T(str(e))))))
def _sync(dataset, params, procedure): try: response_dataset = result = procedure.perform_work(dataset) if params['fields']: response_dataset = { k: v for k, v in result.iteritems() if k in params['fields'] } raise HTTP(http.CREATED, json(response_dataset)) except ProcedureException as e: headers = {'error': e.cause} raise HTTP(http.INTERNAL_SERVER_ERROR, json(dataset), **headers)
def message_updates(db): cursor = int(current.session.cursor or 0) q = db.chat.id > cursor rows = db(q).select() if not rows: new_message_event.wait() rows = db(q).select() if rows: # This should be always True current.session.cursor = rows.last().id return json({'messages': rows.as_list()}) # Should never happen. Just in case return json({'messages': []})
def update_from_tvdb(): series_id = request.args(0) if not series_id: return '' rtn = myscheduler.queue_task( 'update_single_series', [series_id], {}, task_name = 'spec:update_single_series', timeout=180, immediate=True) if rtn.id: return json(dict(result='ok')) else: return json(dict(result='error'))
def check(): session.forget(response) if not request.vars.directory: return 0 base, hint = os.path.split(request.vars.directory) if not base: return json([]) if not os.access(base, os.R_OK): return json([]) res = find_matching_subdir(base, hint) if res: return res else: return json([])
def upload_file(): """ File upload handler for the ajax form of the plugin jquery-file-upload Return the response in JSON required by the plugin """ try: import re from gluon.serializers import json resource_file = request.vars['files[]'] resource_type = re.compile( '^(\s|\.|-|\w|á|é|í|ó|ú|ñ|Á|É|Í|Ó|Ú|Ñ){1,40}\.(pdf|epub|png)$') filename = resource_file.filename if resource_type.match(filename): # Store file id = db.resource.insert(resource=db.resource.resource.store( resource_file, filename), title=filename) response.flash = CAT(T("I uploaded resource named "), filename) return response.json( json({ "name": filename, "success": True, "url": URL('resource', 'edites', args=['edit', 'resource', id], user_signature=True) })) else: return response.json( json({ "name": filename, "success": False, "message": T("Invalid file name") })) except Exception, e: response.flash = T("Failed uploading file") return response.json( json({ "name": resource_file.filename, "success": False, "message": str(e) }))
def _validate_fields(self): """ Método que verifica se os parâmetros passados são válidos ou não. Um dicionário com duas chaves é retornado: `valid` uma lista de campos cujos nomes estão contidos na lista de colunas da tabela requisitada `special` uma lista de campos cujos nomes, com um sufixo válido, estão contidos na lista de colunas da tabela requisitada :rtype : dict :return: Um dicionário contendo os campos válidos """ endpoint_fields = self.datasource[self.endpoint].fields fields = {"valid": [], "special": []} invalid_fields = [] for k in self.lower_vars.keys(): if k in endpoint_fields: fields['valid'].append(k) elif self._is_valid_field_with_sufix(k): fields['special'].append(k) else: if k not in self.valid_parameters: invalid_fields.append(k) if invalid_fields: headers = {"InvalidParameters": json(invalid_fields)} raise HTTP(http.BAD_REQUEST, "Alguns parâmetros da requisição são incompatíveis.", **headers) return fields
def form_archivo(): from gluon.serializers import json solicitud = db(db.solicitud.estado_solicitud == "Pendiente").select() an = json(solicitud) return dict(an=an, solicitud=solicitud)
def index(): last_time = session.refresh_log if not last_time or request.vars.refresh == '1': last_records = db(db.global_log.id > 0).select( orderby=~db.global_log.id, limitby=(0, 20)) session.refresh_log_lastid = last_records.first( ) and last_records.first().id or 0 else: last_records = db( db.global_log.id > session.refresh_log_lastid).select( orderby=~db.global_log.id, limitby=(0, 20)) session.refresh_log_lastid = last_records.first( ) and last_records.first().id or session.refresh_log_lastid rtn = [] for row in last_records: type = 'ok' trclass = 'info' if row.log_error: type = 'ko' trclass = 'error' row.log_operation = row.log_error rtn.append( str( TR(TD( SPAN( w2p_icon(type), "%s: %s - %s " % (row.dat_insert, row.log_module, row.log_function)), ), TD(SPAN(row.log_operation)), _class=trclass))) rtn = json(rtn) session.refresh_log = 1 return rtn
def history(): if request.args(0) not in INSTANCE_VALID_ARG: non_valid_instance() curr_env = { 'base_url': URL('console', 'history') } return dict(curr_env=json(curr_env))
def concat(): if request.vars: a = request.vars.a b = request.vars.b result = {'Resultado': a + b} from gluon.serializers import json return XML(json(result))
def por_sistema(): from gluon.serializers import json #importo json setsoli = db(db.solicitud.estado_solicitud == "Confeccion").select( ) #realizo un set de los registros con estado confeccion co = json(setsoli) #trasformo el set a formato json return dict(co=co)
def add(): rows = db(db.tpp_education_degrees.id > 0).select() degrees = TAG.select(_id='degree', _name='degree', *[TAG.option( r['name'], _value=r['id'] ) for r in rows]) rows2 = db((db.tpp_user_educations.school == db.tpp_schools.id) & (db.tpp_cities.id == db.tpp_schools.city) & (db.tpp_user_educations.degree == db.tpp_education_degrees.id) & (db.tpp_user_educations.created_by == auth.user_id) ).select( db.tpp_user_educations.start, db.tpp_user_educations.end, db.tpp_user_educations.note, db.tpp_user_educations.major, db.tpp_education_degrees.name, db.tpp_user_educations.id, db.tpp_schools.name, db.tpp_cities.name, db.tpp_school_majors.name, left=db.tpp_school_majors.on(db.tpp_user_educations.major==db.tpp_school_majors.id) ) temp = {}; for edu in rows2: temp[edu.tpp_user_educations.id] = db.tpp_user_educations.major.represent(edu.tpp_user_educations.major) ; result = {}; result['majors'] = temp result['edu'] = rows2 return dict(degrees=degrees, education = json(result) )
def get_data(): import pandas import numpy as np from gluon.serializers import json import os path = os.path.join(request.folder) df = pandas.read_csv(path + 'private\\data\\dataset_1\\Video_Games_Sales_as_at_22_Dec_2016.csv') publishersTop = df['Publisher'].value_counts().nlargest(10).axes[0].tolist() publishersTop.append('Bethesda Softworks') output = {} output['publishers'] = publishersTop for p in publishersTop: tempdf = df[df['Publisher'] == p] scatter_data = [] pie_data = [] line_data_labels = [] line_data_values = [] for i, row in tempdf.iterrows(): scatter_data.append([row[5], row[6], str(row[0])]) line_data_labels.append(str(row[0])) line_data_values.append(row[5]) pie_data.append({'name': str(row[0]), 'value': row[5]}) output[p] = {'scatter_data': scatter_data}, {'pie_data': pie_data}, {'line_data': {'labels': line_data_labels, 'values': line_data_values}} # import ipdb; ipdb.set_trace() return json(output)
def index(): last_time = session.refresh_log if not last_time or request.vars.refresh == '1': last_records = db(db.global_log.id>0).select(orderby=~db.global_log.id, limitby=(0,20)) session.refresh_log_lastid = last_records.first() and last_records.first().id or 0 else: last_records = db(db.global_log.id>session.refresh_log_lastid).select(orderby=~db.global_log.id, limitby=(0,20)) session.refresh_log_lastid = last_records.first() and last_records.first().id or session.refresh_log_lastid rtn = [] for row in last_records: type = 'ok' trclass = 'info' if row.log_error: type = 'ko' trclass = 'error' row.log_operation = row.log_error rtn.append( str(TR( TD( SPAN(w2p_icon(type), "%s: %s - %s " % (row.dat_insert, row.log_module, row.log_function)), ), TD( SPAN(row.log_operation) ), _class=trclass) ) ) rtn = json(rtn) session.refresh_log = 1 return rtn
def index(): from gluon.serializers import json json_list = dict(xml_url='') locale = 'de_DE' if session.forced_language == 'en': locale = 'en_US' book_id = request.args[0] file_id = request.args[1] # check if it is xml if str(file_id).endswith('.xml'): query = ((db.submission_settings.submission_id == int(book_id)) & (db.submission_settings.locale == locale)) author_q = ((db.authors.submission_id == book_id)) authors_list = db(author_q).select( db.authors.first_name, db.authors.last_name) authors = '' for i in authors_list: authors += i.first_name + ' ' + i.last_name + ', ' if authors.endswith(', '): authors = authors[:-2] return dict(json_list=XML(json(json_list)), authors=authors) else: path = os.path.join(request.folder, 'static/monographs', book_id, 'submission/', file_id) return response.stream(path)
def pesquisaAreaPorId(): if request.vars: idArea = request.vars.id #setor = db(Setor.id == idSetor).select() area = db.executesql("SELECT a.id, a.setor as idsetor, s.numero as setor, a.coordenador as idcoordenador, au.first_name as coordenador FROM area as s INNER JOIN auth_user as au on au.id = s.coordenador INNER JOIN setor as s on s.id=a.setor WHERE s.id = %s" %idSetor, as_dict=True) from gluon.serializers import json return XML(json(setor))
def jqgrid(): from gluon.serializers import json import cgi hash_vars = 'dbname|tablename|columns|fieldname|fieldvalue|user'.split('|') if not URL.verify(request,hmac_key=auth.settings.hmac_key, hash_vars=hash_vars,salt=auth.user_id): raise HTTP(404) dbname = request.vars.dbname or 'db' tablename = request.vars.tablename or error() columns = (request.vars.columns or error()).split(',') rows=int(request.vars.rows or 25) page=int(request.vars.page or 0) sidx=request.vars.sidx or 'id' sord=request.vars.sord or 'asc' searchField=request.vars.searchField searchString=request.vars.searchString searchOper={'eq':lambda a,b: a==b, 'nq':lambda a,b: a!=b, 'gt':lambda a,b: a>b, 'ge':lambda a,b: a>=b, 'lt':lambda a,b: a<b, 'le':lambda a,b: a<=b, 'bw':lambda a,b: a.startswith(b), 'bn':lambda a,b: ~a.startswith(b), 'ew':lambda a,b: a.endswith(b), 'en':lambda a,b: ~a.endswith(b), 'cn':lambda a,b: a.contains(b), 'nc':lambda a,b: ~a.contains(b), 'in':lambda a,b: a.belongs(b.split()), 'ni':lambda a,b: ~a.belongs(b.split())}\ [request.vars.searchOper or 'eq'] table=globals()[dbname][tablename] if request.vars.fieldname: names = request.vars.fieldname.split('|') values = request.vars.fieldvalue.split('|') query = reduce(lambda a,b:a&b, [table[names[i]]==values[i] for i in range(len(names))]) else: query = table.id>0 dbset = table._db(query) if searchField: dbset=dbset(searchOper(table[searchField],searchString)) orderby = table[sidx] if sord=='desc': orderby=~orderby limitby=(rows*(page-1),rows*page) fields = [table[f] for f in columns] records = dbset.select(orderby=orderby,limitby=limitby,*fields) nrecords = dbset.count() items = {} items['page']=page items['total']=int((nrecords+(rows-1))/rows) items['records']=nrecords readable_fields=[f.name for f in fields if f.readable] def f(value,fieldname): r = table[fieldname].represent if r: value=r(value) try: return value.xml() except: return cgi.escape(str(value)) items['rows']=[{'id':r.id,'cell':[f(r[x],x) for x in readable_fields]} \ for r in records] return json(items)
def listaSetores(): #setores = db(Setor.id > 0).select() setores = db.executesql( "SELECT s.id, s.numero, s.coordenador as idcoordenador, au.first_name as coordenador FROM setor as s INNER JOIN auth_user as au on au.id = s.coordenador", as_dict=True) from gluon.serializers import json return XML(json(setores))
def extract_examles(): """Извлечение примеров из словаря""" db = current.db slovar = db.slovar i,j,k,l=0,0,0,0 rows=db((slovar.is_example==False)&(slovar.processed==False)) n=rows.count() for x in rows.iterselect(slovar.id,slovar.perevod): i+=1 updated,inserted=extract_save_examples(x.perevod, x.id) j+=updated+inserted k+=updated l+=inserted msg=json( dict(id=x.id, progress=round(float(i)/n*100, 2), founded=j, updated=k, inserted=l )) current.W2P_TASK.logger.write('!clear!%s'%msg) x.processed=True x.update_record() # if j%10000==0:db.commit() # Фиксируем каждые 10000 обновлений db.commit() return "Complite" # from bkrstools import extract # def extract_save_examples(perevod,id=None): """Извлечение и сохранение примеров из перевода""" """slovar=current.slovar
def createlinks(): """Создание ссылок между записями словарных статей""" db = current.db slovar = db.slovar reg_ref=re.compile(r"\[ref\](.*?)\[/ref\]") i,j=0,0 n=db(slovar.id>0).count() for x in db(slovar.id>0).iterselect(): i+=1 for slovlnk in reg_ref.findall(x.perevod): row=db(slovar.slovo==slovlnk).select().first() if row==None:continue tolist=x.linksto if x.linksto!=None else [] if row.id not in tolist: tolist.append(row.id) x.update_record(linksto=tolist) fromlist=row.linksfrom if row.linksfrom!=None else [] if x.id not in fromlist: fromlist.append(x.id) row.update_record(linksfrom=fromlist) j+=1 if j%1000==0: db.commit() # Фиксируем каждые 1000 вставок msg=json( dict(founded=j, progress=round(float(i)/n*100, 2) )) current.W2P_TASK.logger.write('!clear!%s'%msg) db.commit() return "Complite"
def f(_action=action, *a, **b): request.is_restful = True env = request.env is_json = env.content_type == 'application/json' method = env.request_method if len(request.args) and '.' in request.args[-1]: request.args[-1], _, request.extension = request.args[ -1].rpartition('.') current.response.headers['Content-Type'] = \ contenttype('.' + request.extension.lower()) rest_action = _action().get(method, None) if not (rest_action and method == method.upper() and callable(rest_action)): raise HTTP(405, "method not allowed") try: res = rest_action(*request.args, **request.vars) if is_json and not isinstance(res, str): res = json(res) return res except TypeError, e: exc_type, exc_value, exc_traceback = sys.exc_info() if len(traceback.extract_tb(exc_traceback)) == 1: raise HTTP(400, "invalid arguments") else: raise
def sozdanie_bazy(file, truncate=False): """Заполнение базы из файла словаря в формате DSL.""" if not os.path.exists(file): file=os.path.normpath(os.path.join(current.request.folder, file)) if not os.path.exists(file): return "Файл не найден" # convertfile(file)#Пересохраняем в utf-8 db = current.db slovar = db.slovar if truncate:slovar.truncate()#Опустошаем таблицу базы данных, если надо inserted, updated = 0, 0 #Счетчик вставленных и обновленных записей в базе данных parser = dsl_parser(r"(?m)^([^\s].*?)\n\s(.*?)\n\s(.*?)$") for blocktext, progress in read_by_blocks(file): for slovo, pinyin, perevod in parser.send(blocktext): try: slovar.insert(slovo = slovo, pinyin = pinyin, perevod = perevod ) inserted+=1 except: db.commit() row=db(slovar.slovo == slovo).select().first() if not row: continue row.pinyin = pinyin row.perevod = perevod row.update_record() updated += 1 if inserted%5000==0: db.commit() msg=dict(inserted=inserted, updated=updated, progress=progress) current.W2P_TASK.logger.write('!clear!%s'%json(msg)) parser.close() db.commit()
def f(_action=action, *a, **b): request.is_restful = True env = request.env is_json = env.content_type == "application/json" method = env.request_method if len(request.args) and "." in request.args[-1]: request.args[-1], _, request.extension = request.args[-1].rpartition(".") current.response.headers["Content-Type"] = contenttype("." + request.extension.lower()) rest_action = _action().get(method, None) if not (rest_action and method == method.upper() and callable(rest_action)): raise HTTP(405, "method not allowed") try: vars = request.vars if method == "POST" and is_json: body = request.body.read() if len(body): vars = sj.loads(body) res = rest_action(*request.args, **vars) if is_json and not isinstance(res, str): res = json(res) return res except TypeError, e: exc_type, exc_value, exc_traceback = sys.exc_info() if len(traceback.extract_tb(exc_traceback)) == 1: raise HTTP(400, "invalid arguments") else: raise
def check_season(): session.forget(response) series_id, seasonnumber = request.args(0), request.args(1) if not (series_id and seasonnumber): return json({}) status = db( (db.seasons_settings.series_id == series_id) & (db.seasons_settings.seasonnumber == seasonnumber) ).select(db.seasons_settings.season_status, db.seasons_settings.updated_on).first() if not status.updated_on: status.updated_on = request.now episodes = db( (db.series.id == series_id) & (db.episodes.seriesid == db.series.seriesid) & (db.episodes.seasonnumber == seasonnumber) & (db.episodes.inserted_on > status.updated_on) ).select(db.episodes.epnumber) rtn = status.season_status if len(episodes) > 0: st_ = sj.loads(status.season_status) missing = st_.get('missing', []) for ep in episodes: missing.append(ep.epnumber) st_['missing'] = missing rtn = sj.dumps(st_) return rtn
def jqgrid(): """ jqgrid callback retrieves records http://trirand.com/blog/jqgrid/server.php?q=1&_search=false&nd=1267835445772&rows=10&page=1&sidx=amount&sord=asc&searchField=&searchString=&searchOper= """ from gluon.serializers import json import cgi tablename = request.vars.tablename or error() columns = (request.vars.columns or error()).split(',') rows=int(request.vars.rows or 25) page=int(request.vars.page or 0) sidx=request.vars.sidx or 'id' sord=request.vars.sord or 'asc' searchField=request.vars.searchField searchString=request.vars.searchString searchOper={'eq':lambda a,b: a==b, 'nq':lambda a,b: a!=b, 'gt':lambda a,b: a>b, 'ge':lambda a,b: a>=b, 'lt':lambda a,b: a<b, 'le':lambda a,b: a<=b, 'bw':lambda a,b: a.like(b+'%'), 'bn':lambda a,b: ~a.like(b+'%'), 'ew':lambda a,b: a.like('%'+b), 'en':lambda a,b: ~a.like('%'+b), 'cn':lambda a,b: a.like('%'+b+'%'), 'nc':lambda a,b: ~a.like('%'+b+'%'), 'in':lambda a,b: a.belongs(b.split()), 'ni':lambda a,b: ~a.belongs(b.split())}\ [request.vars.searchOper or 'eq'] table=db[tablename] if request.vars.fieldname: names = request.vars.fieldname.split('|') values = request.vars.fieldvalue.split('|') query = reduce(lambda a,b:a&b, [table[names[i]]==values[i] for i in range(len(names))]) else: query = table.id>0 dbset = table._db(query) if searchField: dbset=dbset(searchOper(table[searchField],searchString)) orderby = table[sidx] if sord=='desc': orderby=~orderby limitby=(rows*(page-1),rows*page) fields = [table[f] for f in columns] records = dbset.select(orderby=orderby,limitby=limitby,*fields) nrecords = dbset.count() items = {} items['page']=page items['total']=int((nrecords+(rows-1))/rows) items['records']=nrecords readable_fields=[f.name for f in fields if f.readable] def f(value,fieldname): r = table[fieldname].represent if r: value=r(value) try: return value.xml() except: return cgi.escape(str(value)) items['rows']=[{'id':r.id,'cell':[f(r[x],x) for x in readable_fields]} \ for r in records] return json(items)
def jqgrid(): """ jqgrid callback retrieves records http://trirand.com/blog/jqgrid/server.php?q=1&_search=false&nd=1267835445772&rows=10&page=1&sidx=amount&sord=asc&searchField=&searchString=&searchOper= """ from gluon.serializers import json import cgi tablename = request.vars.tablename or error() columns = (request.vars.columns or error()).split(',') rows = int(request.vars.rows or 25) page = int(request.vars.page or 0) sidx = request.vars.sidx or 'id' sord = request.vars.sord or 'asc' searchField = request.vars.searchField searchString = request.vars.searchString searchOper={'eq':lambda a,b: a==b, 'nq':lambda a,b: a!=b, 'gt':lambda a,b: a>b, 'ge':lambda a,b: a>=b, 'lt':lambda a,b: a<b, 'le':lambda a,b: a<=b, 'bw':lambda a,b: a.like(b+'%'), 'bn':lambda a,b: ~a.like(b+'%'), 'ew':lambda a,b: a.like('%'+b), 'en':lambda a,b: ~a.like('%'+b), 'cn':lambda a,b: a.like('%'+b+'%'), 'nc':lambda a,b: ~a.like('%'+b+'%'), 'in':lambda a,b: a.belongs(b.split()), 'ni':lambda a,b: ~a.belongs(b.split())}\ [request.vars.searchOper or 'eq'] table = db[tablename] if request.vars.fieldname: dbset = table._db( table[request.vars.fieldname] == request.vars.fieldvalue) else: dbset = table._db(table.id > 0) if searchField: dbset = dbset(searchOper(table[searchField], searchString)) orderby = table[sidx] if sord == 'desc': orderby = ~orderby limitby = (rows * (page - 1), rows * page) fields = [table[f] for f in columns] records = dbset.select(orderby=orderby, limitby=limitby, *fields) nrecords = dbset.count() items = {} items['page'] = page items['total'] = int((nrecords + (rows - 1)) / rows) items['records'] = nrecords readable_fields = [f.name for f in fields if f.readable] def f(value, fieldname): r = table[fieldname].represent if r: value = r(value) try: return value.xml() except: return cgi.escape(str(value)) items['rows']=[{'id':r.id,'cell':[f(r[x],x) for x in readable_fields]} \ for r in records] return json(items)
def _serialize(data): if request.extension == 'json': return XML(json(data)) elif request.extension == 'xml': return XML(xml(data)) # use the html view return data
def find_matching_subdir(base, hint): session.forget(response) topdirs = [name for name in os.listdir(base) if os.path.isdir(os.path.join(base, name))] res = [] for a in topdirs: if a.startswith(hint): res.append(os.path.join(base, a, '')) return json(res)
def overview(): if request.args(0) not in INSTANCE_VALID_ARG: non_valid_instance() curr_env = { 'base_url': URL('console', 'overview'), } return dict(curr_env=json(curr_env))
def message_new(db): name = current.session.auth.user.first_name body = current.request.vars.body msg = _create_message(db, name, body) new_message_event.set() new_message_event.clear() return json(msg)
def _notify_clients(self, message, group='default'): try: websocket_send("http://%s:%s" % (self.host, self.port), json(message), self.password, group) except IOError as e: if self.verbose: print("{notifier}: Unable to notify clients \"{exception}\". " "The server is up and running? Follow the instructions " "at \"gluon/contrib/websocket_messaging.py\"".format(notifier=self.__class__.__name__, exception=e.strerror))
def aadata(self, totalrows, displayrows, id, sEcho, flist, stringify=True, action_col=None, **attr): """ Method to render the data into a json object @param totalrows: The total rows in the unfiltered query. @param displayrows: The total rows in the filtered query. @param id: The id of the table for which this ajax call will respond to. @param sEcho: An unaltered copy of sEcho sent from the client used by dataTables as a draw count. @param flist: The list of fields @param attr: dictionary of attributes which can be passed in dt_action_col: The column where the action buttons will be placed dt_bulk_actions: list of labels for the bulk actions. dt_bulk_col: The column in which the checkboxes will appear, by default it will be the column immediately before the first data item dt_group_totals: The number of record in each group. This will be displayed in parenthesis after the group title. """ data = self.data if not flist: flist = self.lfields start = self.start end = self.end if action_col is None: action_col = attr.get("dt_action_col", 0) structure = {} aadata = [] for i in xrange(start, end): row = data[i] details = [] for field in flist: if field == "BULK": details.append( "<INPUT id='select%s' type='checkbox' class='bulkcheckbox'>" % row[flist[action_col]] ) else: details.append(s3_unicode(row[field])) aadata.append(details) structure["dataTable_id"] = id structure["dataTable_filter"] = self.filterString structure["dataTable_groupTotals"] = attr.get("dt_group_totals", []) structure["dataTable_sort"] = self.orderby structure["aaData"] = aadata structure["iTotalRecords"] = totalrows structure["iTotalDisplayRecords"] = displayrows structure["sEcho"] = sEcho if stringify: from gluon.serializers import json return json(structure) else: return structure
def post(): if request.extension=='json' and 'X-Progress-ID' in request.get_vars: cache_key = 'X-Progress-ID:'+request.get_vars['X-Progress-ID'] length=cache.ram(cache_key+':length', lambda: 0, None) uploaded=cache.ram(cache_key+':uploaded', lambda: 0, None) from gluon.serializers import json return json(dict(length=length, uploaded=uploaded)) form = FORM(INPUT(_type='file', _name='file',requires=IS_NOT_EMPTY()), INPUT(_type='submit', _value='SUBMIT')) return dict(form=form, myuuid = "1234567890abcdefgh")
def email(): if session.Username: for user in db(db.EmailIds.Username == session.Username).select(): ctr = 1 if user['EID2']: ctr = ctr + 1 if user['EID3']: ctr = ctr + 1 if user['EID4']: ctr = ctr + 1 if user['EID5']: ctr = ctr + 1 emailids = [] unreads = [] unread_messages = [] for i in range(ctr): i = i + 1 emailid = 'EID' + str(i) pwdid = 'Pass' + str(i) eid = user[emailid] pwd = user[pwdid] emailids.append(eid) with MailBox(eid,pwd) as mbox: unread = mbox.get_count() #x = 'unread' + str(i) unreads.append(unread) unread_msg_ids = mbox.get_msg_ids() unread_msgs = [] #print unread_msg_ids if unread_msg_ids != []: ctr2 = 0 for i in unread_msg_ids: ctr2 = ctr2 + 1 if ctr2 == 11: break unread_msg = get_msg(eid,pwd,unread,i) unread_msgs.append(unread_msg) #y = 'unread_msgs' + str(i) unread_messages.append(unread_msgs) return dict(ctr=ctr,emailids=json(emailids),unreads=json(unreads),unread_messages=json(unread_messages)) #return dict(unread1=unread1,unread_msg_ids=json(unread_msg_ids)) else: redirect(URL('login'))
def get_chart1_data_json(): ''' Wrapper for get_chart1_data() to allow getting a json-formatted return val. ''' user_id = request.vars['user_id'] if 'user_id' in request.vars else auth.user_id set = request.vars['set'] if 'set' in request.vars else None tag = request.vars['tag'] if 'tag' in request.vars else None chart1_data = get_chart1_data(user_id=user_id, set=set, tag=tag) return json(chart1_data['chart1_data'])
def result(): data = db(db.current_builds.BuildId == request.vars.buildId).select().first() # if the data is not available throw an error if data == None: errorObject = {'message': T('Build ID does not exist')} raise HTTP(422, json(errorObject)) # if data is available and the build has finsihed return the outputs if data.finished: out = dict(finished=data.finished, output=data.output, error=data.error) if data.buildError: raise HTTP(500, XML(json(out))) return out # if data is available and the build is still running tell the client to wait else: return dict(finished=data.finished, timeout=CLIENT_TIMEOUT)