Exemplo n.º 1
0
    def default(self, id=None):

        if cherrypy.request.method == "POST" and id is None:
            # Create a new process record.
            (pid, protocol) = simplejson.load(cherrypy.request.body)
            record = self.process_pool.create_process_record(pid, protocol)
            self.process_pool.create_job_for_process(record)

            return simplejson.dumps(record.as_descriptor())

        elif cherrypy.request.method == "GET" and id is None:
            # Return a list of process IDs.
            return simplejson.dumps(self.process_pool.get_process_ids())

        elif id is not None:
            # Return information about a running process (for debugging).
            record = self.process_pool.get_process_record(id)
            if record is None:
                raise cherrypy.HTTPError(404)
            elif cherrypy.request.method != "GET":
                raise cherrypy.HTTPError(405)
            else:
                return simplejson.dumps(record.as_descriptor())

        else:
            raise cherrypy.HTTPError(405)
Exemplo n.º 2
0
def create_json_error_answer(exception, status=200):
    from indico.core.config import Config
    from indico.core.errors import IndicoError, get_error_description
    if isinstance(exception, IndicoError):
        details = exception.toDict()
    else:
        exception_data = exception.__dict__
        try:
            _json.dumps(exception_data)
        except Exception:
            exception_data = {}
        details = {
            'code': type(exception).__name__,
            'type': 'noReport' if ((not session.user and isinstance(exception, Forbidden)) or
                                   _is_no_report_error(exception)) else 'unknown',
            'message': unicode(get_error_description(exception)),
            'data': exception_data,
            'requestInfo': get_request_info(),
            'inner': traceback.format_exc()
        }

    return current_app.response_class(dumps({
        'version': Config.getInstance().getVersion(),
        'result': None,
        'error': details
    }), mimetype='application/json', status=status)
Exemplo n.º 3
0
    def get(self, agent_id):
        username = self.get_current_user()
        customer_name = get_current_customer_name(username)
        uri = self.request.uri
        method = self.request.method
        try:
            count = int(self.get_argument('count', 20))
            offset = int(self.get_argument('offset', 0))
            sort = self.get_argument('sort', 'desc')
            sort_by = self.get_argument('sort_by', OperationKey.CreatedTime)
            operations = (
                OperationRetriever(
                    username, customer_name,
                    uri, method, count, offset,
                    sort, sort_by
                )
            )

            results = operations.get_all_operations_by_agentid(agent_id)

            self.set_status(results['http_status'])
            self.set_header('Content-Type', 'application/json')
            self.write(json.dumps(results, indent=4))

        except Exception as e:
            results = (
                GenericResults(
                    username, uri, method
                ).something_broke('operation', 'search by oper type', e)
            )
            logger.exception(results)
            self.set_status(results['http_status'])
            self.set_header('Content-Type', 'application/json')
            self.write(json.dumps(results, indent=4))
Exemplo n.º 4
0
 def test_tuple_array_dump(self):
     t = (1, 2, 3)
     expect = json.dumps(list(t))
     # Default is True
     sio = StringIO()
     json.dump(t, sio)
     self.assertEqual(expect, sio.getvalue())
     sio = StringIO()
     json.dump(t, sio, tuple_as_array=True)
     self.assertEqual(expect, sio.getvalue())
     self.assertRaises(TypeError, json.dump, t, StringIO(),
                       tuple_as_array=False)
     # Ensure that the "default" does not get called
     sio = StringIO()
     json.dump(t, sio, default=repr)
     self.assertEqual(expect, sio.getvalue())
     sio = StringIO()
     json.dump(t, sio, tuple_as_array=True, default=repr)
     self.assertEqual(expect, sio.getvalue())
     # Ensure that the "default" gets called
     sio = StringIO()
     json.dump(t, sio, tuple_as_array=False, default=repr)
     self.assertEqual(
         json.dumps(repr(t)),
         sio.getvalue())
Exemplo n.º 5
0
 def api(self, command, **args):
     try:
         if command == 'difficulty':
             diff_q = query_single('SELECT curr_diff FROM stats')
             difficulty = {'difficulty': diff_q[0]}
             return json.dumps(difficulty)
         elif command == 'totalmint':
             total_m = query_single('SELECT total_mint FROM stats')
             minted = {'total minted': total_m[0]}
             return json.dumps(minted)
         elif command == 'getsigs':
             if args.get('adm', 'False') == 'False':
                 signatures = query_multi(
                     "SELECT s.signerId, s.signature,a.alias FROM signatures s "
                     "LEFT JOIN cvnalias a on s.signerId = a.nodeId "
                     "where s.height = %s",
                     args.get('block', '-1'))
             else:
                 signatures = query_multi(
                     "SELECT s.adminId, s.signature, a.alias FROM adminSignatures s "
                     "LEFT JOIN cvnalias a on s.adminId = a.nodeId "
                     "where height = %s",
                     args.get('block', '-1'))
             data = []
             if signatures:
                 for row in signatures:
                     data.append({'signer': row[0], 'sig': row[1], 'alias': row[2]})
             else:
                 data.append({'signer': 'no data', 'sig': 'available'})
             return json.dumps(data)
         return json.dumps({'error':'invalid'})
     except Exception:
         raise cherrypy.HTTPError(503)
def find_class(year, season, category, timeStart, days):
    connection = sqlite.connect(DATABASE_NAME)
    cursor = connection.cursor()
    
    # validate input data
    if re.search(r'[^a-zA-Z:0-9\s]+', year + season + category + timeStart + days) is not None:
        return json.dumps({'error': 'sql'})
    
    timeMatch = re.match(r'(?P<hour>\d+):(?P<minute>\d+)\s*(?P<ampm>[AMP]+)', timeStart)
    timeStart = '%02d:%02d %s' % (int(timeMatch.group('hour')), int(timeMatch.group('minute')), timeMatch.group('ampm'))
    
    # form the category filter
    cat_hum = ['AAS','AFRO','AFST','AIS','ANTH','ARCH','ART','ARTD','ARTE','ARTF','ARTH','ARTS','ASST','CHLH','CINE','CLCV','CMN','CW','CWL','EALC','EDPR','EIL','ENGL','ENVS','EOL','EPS','EPSY','ESL','EURO','FAA','GEOG','GER','GLBL','GMC','GS','GWS','HCD','HDES','HDFS','HIST','HRE','HUM','JOUR','JS','LAST','LLS','MDIA','MDVL','MUS','MUSE','NUTR','PHIL','PS','PSYC','REES','REHB','RHET','RLST','RSOC','RST','RUSS','SAME','SCAN','SCR','SLAV','SOC','SPAN','SPED','SWAH','TURK','UKR','WLOF','WRIT','YDSH','ZULU']
    cat_eng = ['ABE','ACES','AE','ASTR','BIOC','BIOE','BIOL','BIOP','BTW','CB','CDB','CEE','CHBE','CHEM','CPSC','CS','CSE','ECE','ECON','ENG','ENGH','ESE','GE','GEOG','GEOL','HORT','IB','IE','LIS','MATH','MCB','ME','MICR','MSE','NEUR','NPRE','NRES','PATH','PBIO','PHYS','PLPA','STAT','TE','TSM']
    catpred = ''
    
    if category == 'humanities':
        catpred = 'AND subject IN ("' + '","'.join(cat_hum) + '")'
    elif category == 'engineering':
        catpred = 'AND subject IN ("' + '","'.join(cat_eng) + '")'
    else:
        catpred = ''
    
    FIND_CLASS_QUERY = 'SELECT year, semester, subject, number, name FROM (uiuc_sections INNER JOIN uiuc_courses ON uiuc_sections.course_pk=uiuc_courses.pk) WHERE semester="%s" AND year="%s" AND time_start="%s" AND days="%s" %s GROUP BY name ORDER BY subject ASC'
    formed_query = FIND_CLASS_QUERY % (season, year, timeStart, days, catpred)
    
    cursor.execute(formed_query)
    res = cursor.fetchall()
    
    return json.dumps(res)
def query_class(className):
	connection = sqlite.connect(DATABASE_NAME)
	cursor = connection.cursor()
		
	classToken = re.match('(?P<department>[a-zA-Z]+)(?P<number>\d+)', className)

	classDept = classToken.group('department').upper()
	classNumber = classToken.group('number')

	cursor.execute("SELECT * FROM %s WHERE subject=\"%s\" AND number=%s" % (COURSE_TABLE_NAME, classDept, classNumber))
	res = cursor.fetchall()

	if len(res) <= 0:
		return json.dumps({'error': 'class not found'})

	# For each <class> find the corresponding sections and place in section table
	sectionTable = {}

	for row in res:
		rowPk = int(row[0])
		cursor.execute("SELECT * FROM %s WHERE course_pk=%d" % (SECTION_TABLE_NAME, rowPk))
		sectionResult = cursor.fetchall()
		
		for section in sectionResult:
			timeStart = to_military(section[5])
			timeEnd = to_military(section[6])
			
			try:
				sectionTable[section[3]].append(timeStart)
			except KeyError:
				sectionTable[section[3]] = [timeStart]
				
	return json.dumps(sectionTable)
			
Exemplo n.º 8
0
def destroy(request, app_name, model_name, user, id=None):
    '''
    ' Receive a model_name and data object via ajax, and remove that item,
    ' returning either a success or error message.
    '''

    cls = apps.get_model(app_name, model_name)
    try:
        obj = cls.objects.get_editable_by_pk(user, id)
        if obj is None:
            transaction.rollback()
            error = "User %s does not have permission to delete this object." % user
            return HttpResponse(json.dumps({'errors': error}, indent=4), content_type="application/json")
    except Exception as e:
        transaction.rollback()
        error = "There was an error for user %s trying to delete this object: %s" % (user, str(e))
        return HttpResponse(json.dumps({'errors': error}, indent=4), content_type="application/json")

    try:
        obj.delete()
    except Exception as e:
        transaction.rollback()
        error = "Unexpected error deleting object: %s: %s" % (type(e), e)
        return HttpResponse(json.dumps({'errors': error}, indent=4), content_type="application/json")

    transaction.commit()
    dump = json.dumps({'success': 'Successfully deleted item with primary key: %s' % id}, indent=4)
    response = HttpResponse(dump, content_type="application/json")
    response.status_code = 201
    return response
Exemplo n.º 9
0
def text_to_transcript(text_file, output_file):
    text = open(text_file).read()

    filedir = os.path.dirname(os.path.realpath(__file__))
    schema_path = os.path.join(
        filedir, "alignment-schemas/transcript_schema.json")

    transcript_schema = json.load(open(schema_path))

    paragraphs = text.split("\n\n")
    out = []
    for para in paragraphs:
        para = para.replace("\n", " ")
        if para == "" or para.startswith("#"):
            continue

        line = {
            "speaker": "narrator",
            "line": para
        }
        out.append(line)

    jsonschema.validate(out, transcript_schema)
    if output_file is None:
        print json.dumps(out, indent=4)
    else:
        with open(output_file, 'w') as f:
            f.write(json.dumps(out, indent=4))
    return
Exemplo n.º 10
0
  def put(self):
    allowed_routes = ['CICERO_FUNCTION_NAME']
    function = self.request.get('f')
    input_source = self.request.get('input1')
    json_data = {'f':function, 'input1':input_source}

    output = ''
    if self.request.get('output') == '':
      key_length = 16  # for now, randomly generates keys 16 chars long
      json_data['output'] = os.urandom(key_length)  # TODO - does this work in app engine?
    else:
      json_data['output'] = str(self.request.get('output'))
    output = str(json_data['output'])

    if function in allowed_routes:
      url = '/' + function
      logging.debug('starting a request for url ' + url)
      new_task = taskqueue.add(url=url, params={'data': json.dumps(json_data)})
      # TODO - adding the task does not imply success - when does it not?
      result = {'result':'success', 'task_id':new_task.name, 'output':output, 'id':new_task.name}
      logging.debug('result of job with input data' + str(json_data) + ' was ' + str(result))
      self.response.out.write(json.dumps(result))
    else:
      reason = 'Cannot add a task for function type ' + str(function)
      result ={'result':'failure', 'reason':reason}
      self.response.out.write(json.dumps(result))
Exemplo n.º 11
0
def main():
    prog, args = sys.argv[0], sys.argv[1:]
    if not args:
        print 'usage: %s username' % prog
        return
    tags = taglist(args[0])
    print simplejson.dumps(tags)
Exemplo n.º 12
0
 def _getLastEntry(self, channel):
     """fetche the json data from either a file stored from the
     last request (only 1 request per 15 minutes allowed) or
     makes a new request and stores the result in a file as well
     """
     filename = plugins.makeChannelFilename('bitcoin', channel)
     # if file older than 15 minutes -> new query
     if os.path.exists(filename) == True:
         delta = datetime.timedelta(minutes=15)
         statbuf = os.stat(filename)
         now = datetime.datetime.now()
         modtime = datetime.datetime.fromtimestamp(statbuf.st_mtime) 
         if (now - delta) > modtime:
             data = self._fetchJsonData()
             self._writeToFile(simplejson.dumps(data), filename)
             log.info('new data')
             return simplejson.dumps(data)
         else:
             data = self._readFromFile(filename)
             log.info('old data')
             return  simplejson.dumps(data)
     else:
         data = self._fetchJsonData()
         self._writeToFile(simplejson.dumps(data), filename)     
         log.info('create new file and new data')
         return data
Exemplo n.º 13
0
 def test_encode_truefalse(self):
     self.assertEquals(json.dumps(
              {True: False, False: True}, sort_keys=True),
              '{"false": true, "true": false}')
     self.assertEquals(json.dumps(
             {2: 3.0, 4.0: 5L, False: 1, 6L: True, "7": 0}, sort_keys=True),
             '{"false": 1, "2": 3.0, "4.0": 5, "6": true, "7": 0}')
Exemplo n.º 14
0
def get_all_category():
    category_name = request.args.get('category_name', '').replace('$', '&')

    if category_name:
      	query_field = mongo_util.generate_query(eval(request.args.get('field', '[]')))
        
        try:
            page = int(request.args.get('page', '1'))
        except:
            page = 1
        page = page if page > 0 else 1
        
        #get target collection from mongodb
        com_col = mongo_util.get_commodity_col()
        
        com_cursor = com_col.find(
            {'category': {'$elemMatch': {'$all': category_name.split('>')}}},
            query_field).skip((page - 1) * conf.ITEM_PER_PAGE).limit(conf.ITEM_PER_PAGE).batch_size(500)

        return json.dumps(map(lambda x:x, com_cursor), cls=ComplexEncoder)

    com_col = mongo_util.get_commodity_col()

    all_category = com_col.distinct('category.0')

    return json.dumps(map(lambda x: {'name': '>'.join(x)},
                      all_category), cls=ComplexEncoder)
Exemplo n.º 15
0
    def get(self):
        created_by = self.get_current_user().strip('"')
        dealer = self.get_argument("dealer")
        numqrs = self.get_argument("numqrs")

        try:
            numqrs = int(numqrs)
        except ValueError:
            # front-end size interpret this as error
            self.write(simplejson.dumps([1]))
            self.set_header("Content-Type", "application/json")
            self.finish()
        try:
            connection = yield momoko.Op(self.db.getconn)
            with self.db.manage(connection):
                yield momoko.Op(connection.execute, "BEGIN")
                for _ in range(numqrs):
                    _uuid = qrgen.generate()
                    cursor = yield momoko.Op(self.db.execute,
                                             "INSERT INTO uuid_store (qr_uuid, user_id, \
                                              created_by, created_on) values (%s, %s, %s,\
                                              NOW())", (_uuid, dealer, created_by))
                yield momoko.Op(connection.execute, "COMMIT")
        except Except as e:
            if connection:
                yield momoko.Op(connection.execute, "ROLLBACK")
            mylogger.error(e)
            self.render("error.html", **{})
        else:
            # pack all images up and send them by mail
            self.write(simplejson.dumps([0]))
            self.set_header("Content-Type", "application/json")
Exemplo n.º 16
0
def api_user_list(request):
    convert_session_id_to_user(request)

    if request.method != u'POST':
        return return_error_response()
    elif 'application/json' in request.META['CONTENT_TYPE'].split(';'):
        user = request.user
        if not user.is_staff:
            return HttpResponse(simplejson.dumps({'error': True, 'info': u'仅限管理员访问'}), content_type='application/json')
        else:
            profile_detail_list = ProfileDetail.objects.all()
            response_data = list()
            for profile_detail in profile_detail_list:
                data = {
                    'id': profile_detail.profile.id,
                    'username': profile_detail.profile.username,
                    'phone_number': profile_detail.phone_number,
                    'email': profile_detail.profile.email,
                    'community': profile_detail.community.title,
                    'floor': profile_detail.floor,
                    'gate_card': profile_detail.gate_card,
                    'address': profile_detail.address
                }
                response_data.append(data)
            return HttpResponse(simplejson.dumps(response_data), content_type='application/json')
    else:
        return return_404_response()
Exemplo n.º 17
0
def broadcastNewPrograms(channel, programs, new_channel=False, to_owner=True, token=None):
  programs = programs if isinstance(programs, types.ListType) else [programs]

  response = {}
  response['type'] = 'new_programs'
  response['channel_id'] = channel.id
  response['channel'] = channel.toJson() if new_channel else None
  response['programs'] = [p.toJson(False) for p in programs]
  channels = memcache.get('web_channels') or {}
  
  if channel.privacy != constants.Privacy.PUBLIC and to_owner:
    if channel.user and channel.user.id in channels.iterkeys():
      webchannel.send_message(channels.get(channel.user.id), simplejson.dumps(response))
    elif token:
      webchannel.send_message(channels.get(token), simplejson.dumps(response))
  
  if channel.privacy == constants.Privacy.FRIENDS:
    if channel.user:
      for fid in channel.user.friends:
        if fid in channels.iterkeys():
          webchannel.send_message(channels.get(fid), simplejson.dumps(response))
  
  if channel.privacy == constants.Privacy.PUBLIC:
    for client in channels.iterkeys():
      webchannel.send_message(channels.get(client), simplejson.dumps(response))
Exemplo n.º 18
0
def api_user_update(request):
    convert_session_id_to_user(request)
    if request.method != u'POST':
        return return_error_response()
    elif 'application/json' in request.META['CONTENT_TYPE'].split(';'):
        data = simplejson.loads(request.body)
        username = data.get(u'username', None)
        mobile = data.get(u'mobile', None)
        email = data.get(u'email', None)
        community_id = data.get(u'community', None)
        floor = data.get(u'floor', None)
        gate_card = data.get(u'gate_card', None)
        address = data.get(u'address', None)
        pattern = re.compile(r'^(1[0-9][0-9])\d{8}$')
        if not pattern.match(mobile):
            response_data = {'mobile_error': True, 'info': u'请输入正确的手机号码'}
            return HttpResponse(simplejson.dumps(response_data), content_type='application/json')
        if not validateEmail(email):
            response_data = {'email_error': True, 'info': u'请输入正确的邮箱地址'}
            return HttpResponse(simplejson.dumps(response_data), content_type='application/json')
        user = request.user
        user.email = email
        user.save()
        profile_detail = ProfileDetail.objects.get(profile=user)
        community = Community.objects.get(id=community_id)
        profile_detail.community = community
        profile_detail.floor = floor
        profile_detail.gate_card = gate_card
        profile_detail.address = address
        profile_detail.save()
        return HttpResponse(simplejson.dumps({'info': 'update profile detail successful'}),
                            content_type='application/json')
    else:
        return return_404_response()
Exemplo n.º 19
0
def api_user_change_password(request):
    convert_session_id_to_user(request)
    if request.method != u'POST':
        return return_error_response()
    elif 'application/json' in request.META['CONTENT_TYPE'].split(';'):
        data = simplejson.loads(request.body)
        old_password = data.get(u'old_password', None)
        new_password = data.get(u'new_password', None)
        repeat_password = data.get(u'repeat_password', None)
        user = request.user
        if new_password != repeat_password:
            response_data = {'error': True, 'info': u'两次密码不一致'}
            return HttpResponse(simplejson.dumps(response_data), content_type='application/json')
        if check_password(old_password, user.password):
            pattern = re.compile('\w{6,15}')
            match = pattern.match(new_password)
            if not match:
                response_data = {'error': True, 'info': u'密码长度为6-15位数字或字母'}
                return HttpResponse(simplejson.dumps(response_data), content_type='application/json')
            else:
                user.password = make_password(new_password, 'md5')
                user.save()
                Session.objects.get(session_key=request.META['HTTP_SESSIONID']).delete()
                return HttpResponse(simplejson.dumps({'error': False, 'info': u'密码更新成功'}),
                                    content_type='application/json')
        else:
            response_data = {'error': True, 'info': u'旧密码不正确'}
            return HttpResponse(simplejson.dumps(response_data), content_type='application/json')
    else:
        return return_404_response()
Exemplo n.º 20
0
def get_encoded_character(deviceid,text):
    avd_device = "adb -s %s" % deviceid
    start_app = "%s shell am start -an com.symbio.input.unicode/.Main" % avd_device
    click_dpad_down = "%s shell input keyevent KEYCODE_DPAD_DOWN" % avd_device
    click_dpad_enter = "%s shell input keyevent KEYCODE_ENTER" % avd_device
    click_dpad_space = "%s shell input keyevent KEYCODE_SPACE" % avd_device
    # log("%r"%text)
    run_cmd(start_app)
    time.sleep(2)
    text_list = text.split()
    log(text_list)
    text_list = [x.encode('utf8') if is_pure_alnum(x) else x for x in text_list]
    log(text_list)
    for t in text_list[:-1]:
        cmd = "%s shell input text %r"  % (avd_device, json.dumps(t)[1:-1])
        cmd = wrapper(cmd)
        run_cmd(cmd)
        run_cmd(click_dpad_space)
    cmd = "%s shell input text %r"  % (avd_device, json.dumps(text_list[-1])[1:-1])
    cmd = wrapper(cmd)
    log(cmd)
    run_cmd(cmd)

    run_cmd(click_dpad_down)
    if __DEBUG__:
        run_cmd(click_dpad_down)
    run_cmd(click_dpad_enter)
Exemplo n.º 21
0
def app(environ, start_response):
    '''Application'''

    if environ['REQUEST_METHOD'] != 'POST':
        start_response('405 Method Not Allowed', [('Content-type', 'application/json')])
        return ''

    try:
        req = json.load(environ['wsgi.input'])
    except Exception as e:
        start_response('400 Bad Request', [('Content-type', 'application/json')])
        return [json.dumps({'status': 'error', 'text': e.message})]
    url = req.get('url', None)
    if url is None:
        start_response('400 Bad Request', [('Content-type', 'application/json')])
        return [json.dumps({'status': 'error', 'text': "Missing 'url' parameter"})]

    res = extract_video_info(url)

    entries = res.get('entries')
    if entries is not None:
        for entry in entries:
            ret = add_stream(entry)
    else:
        ret = add_stream(res)
    start_response('200 OK', [('Content-type', 'application/json')])
    return [json.dumps({'status': 'success', 'text': 'Successfully added'})]
Exemplo n.º 22
0
    def handle(self, *args, **options):
        if len(args) != 0: raise CommandError("This command doesn't expect arguments!")
        flip_all = options['flip_all']
        code_red = options['code_red']

        es = get_es_new()
        es_indices = list(get_all_expected_es_indices())
        if code_red:
            if raw_input('\n'.join([
                'CODE RED!!!',
                'Really delete ALL the elastic indices and pillow checkpoints?',
                'The following indices will be affected:',
                '\n'.join([unicode(index_info) for index_info in es_indices]),
                'This is a PERMANENT action. (Type "code red" to continue):',
                '',
            ])).lower() == 'code red':
                for index_info in es_indices:
                    es.indices.delete(index_info.index)
                    print 'deleted elastic index: {}'.format(index_info.index)
            else:
                print 'Safety first!'
            return

        if flip_all:
            for index_info in es_indices:
                assume_alias(es, index_info.index, index_info.alias)
            print simplejson.dumps(es.indices.get_aliases(), indent=4)
Exemplo n.º 23
0
def search(request):
	img = []
	name = []
	price = []
	fatherid = []
	dict = {}
	if request.method == 'POST':
		req = simplejson.loads(request.raw_post_data)
		searchname = req['search']
		res = dish.objects.filter(name__contains = searchname)
		for i in res:
			img.append(i.img)
			name.append(i.name)
			price.append(i.price)
			fatherid.append(i.fatherid.id)
		dict['img'] = img
		dict['name'] = name
		dict['price'] = price
		dict['fatherid'] = fatherid
		dict['status'] = 'y'
		x  = simplejson.dumps(dict)
		return HttpResponse(x)
	dict['status'] = 'n'
	x = simplejson.dumps(dict)
	return HttpResponse(x)
Exemplo n.º 24
0
def onedish(request):
	dict = {}
	commentitem = []
	if request.method == 'POST':
		req = simplejson.loads(request.raw_post_data)
		fatherid = req['fatherid']
		dishname = req['dishname']
		onedish = dish.objects.filter(fatherid = fatherid,name = dishname)
		res = restaurant.objects.filter(id = fatherid)
		onedish_introduce = onedish[0].introduce
		onedish_price = onedish[0].price
		onedish_father = res[0].name
		onedish_img = onedish[0].img
		onedish_id = onedish[0].id
		onedish_mark = onedish[0].mark
		onedish_marknum = onedish[0].marknum

		commentlist = comment.objects.filter(dishid = onedish_id)
		for i in commentlist:
			commentitem.append(i.content)
		dict['commentlist'] = commentitem
		dict['onedish_id'] = onedish_id
		dict['onedish_marknum'] = onedish_marknum
		dict['onedish_mark'] = onedish_mark
		dict['onedish_introduce'] = onedish_introduce
		dict['onedish_price'] = onedish_price
		dict['onedish_img'] = onedish_img
		dict['onedish_father'] = onedish_father
		dict['status'] = 'y'
		x = simplejson.dumps(dict)
		return HttpResponse(x)
	dict['status'] = 'n'
	x = simplejson.dumps(dict)
	return HttpResponse(x)
Exemplo n.º 25
0
def sendmessage(request):
	dict = {}
	if request.method == 'POST':
		req = simplejson.loads(request.raw_post_data)
		message = req['message']
		phonenumber = req['phonenumber']
		sendCount = req['sendCount']
		sendSum = req['sendSum']
		sendCoupon = req['sendCoupon']
		sendUserId = req['sendUserId']
		dict['message'] = message
		dict['phonenumber'] = phonenumber
		dict['sendCoupon'] = sendCoupon
		dict['sendCount'] = sendCount
		dict['sendSum'] = sendSum
		dict['sendUserId'] = sendUserId
		userObject = user.objects.get(id = sendUserId)
		p = order(count= sendCount,number = sendSum,fatherid = userObject)
		p.save()
		if sendCoupon > 0:
			coupon.objects.get(id = sendCoupon).delete()
		# resp = requests.post(("https://sms-api.luosimao.com/v1/send.json"),auth=("api", "6bac001348b3495d558a8edffe0312bb"),data={"mobile": phonenumber,"message": message},timeout=3 , verify=False);
		# result = json.loads(resp.content)
		# dict['status'] = result['error']
		# x = simplejson.dumps(dict)
#不发送
		dict['status'] = "0"
		x = simplejson.dumps(dict)
		return HttpResponse(x)
	dict['status'] = '104'
	x = simplejson.dumps(dict)
	return HttpResponse(x)
Exemplo n.º 26
0
def retag_question(request, id):
    """retag question view
    """
    question = get_object_or_404(models.Post, id=id)

    try:
        request.user.assert_can_retag_question(question)
        if request.method == 'POST':
            form = forms.RetagQuestionForm(question, request.POST)

            if form.is_valid():
                if form.has_changed():
                    text = question.get_text_content(tags=form.cleaned_data['tags'])
                    if akismet_check_spam(text, request):
                        message = _('Spam was detected on your post, sorry if it was a mistake')
                        raise exceptions.PermissionDenied(message)

                    request.user.retag_question(question=question, tags=form.cleaned_data['tags'])
                if request.is_ajax():
                    response_data = {
                        'success': True,
                        'new_tags': question.thread.tagnames
                    }

                    if request.user.message_set.count() > 0:
                        #todo: here we will possibly junk messages
                        message = request.user.get_and_delete_messages()[-1]
                        response_data['message'] = message

                    data = simplejson.dumps(response_data)
                    return HttpResponse(data, content_type="application/json")
                else:
                    return HttpResponseRedirect(question.get_absolute_url())
            elif request.is_ajax():
                response_data = {
                    'message': format_errors(form.errors['tags']),
                    'success': False
                }
                data = simplejson.dumps(response_data)
                return HttpResponse(data, content_type="application/json")
        else:
            form = forms.RetagQuestionForm(question)

        data = {
            'active_tab': 'questions',
            'question': question,
            'form' : form,
        }
        return render(request, 'question_retag.html', data)
    except exceptions.PermissionDenied as e:
        if request.is_ajax():
            response_data = {
                'message': unicode(e),
                'success': False
            }
            data = simplejson.dumps(response_data)
            return HttpResponse(data, content_type="application/json")
        else:
            request.user.message_set.create(message = unicode(e))
            return HttpResponseRedirect(question.get_absolute_url())
Exemplo n.º 27
0
def login(request):
	dict= {}
	if request.method == 'POST':
		req = simplejson.loads(request.raw_post_data)
		rusername = req['username']
		rpassword = req['password']
		b = user.objects.filter(username=rusername)
		if(len(b) == 0):
			dict["status"] = '101'#用户名不存在返回101
		elif(b[0].password != rpassword):
			dict["status"] = '102'#密码错误返回102
		else:
			dict["status"] = 'yes'#验证成功返回yes
			dict['name']  = b[0].name
			dict['phonenumber'] = b[0].phonenumber
			dict['address'] = b[0].address
			dict['id'] = b[0].id

		x = simplejson.dumps(dict)
		return HttpResponse(x)

	else:
		dict['status'] = 'wa'
		x = simplejson.dumps(dict)
		return HttpResponse(x)
Exemplo n.º 28
0
    def send_command(self, command, arguments = None):
        if command not in self.cmd_list and command != 'command-list':
            raise SuricataCommandException("No such command: %s", command)

        cmdmsg = {}
        cmdmsg['command'] = command
        if (arguments != None):
            cmdmsg['arguments'] = arguments
        if self.verbose:
            print("SND: " + json.dumps(cmdmsg))
        cmdmsg_str = json.dumps(cmdmsg) + "\n"
        if sys.version < '3':
            self.socket.send(cmdmsg_str)
        else:
            self.socket.send(bytes(cmdmsg_str, 'iso-8859-1'))

        ready = select.select([self.socket], [], [], 600)
        if ready[0]:
            cmdret = self.json_recv()
        else:
            cmdret = None

        if cmdret == None:
            raise SuricataReturnException("Unable to get message from server")

        if self.verbose:
            print("RCV: "+ json.dumps(cmdret))

        return cmdret
    def bulk_index(self, index, docs, id_field="_id", parent_field="_parent"):
        chunks = []
        for doc in copy.deepcopy(docs):
            if "_type" not in doc:
                raise ValueError("document is missing _type field.")

            action = {"index": {"_index": index, "_type": doc.pop("_type")}}

            if doc.get(id_field) is not None:
                action["index"]["_id"] = doc[id_field]

            if doc.get(parent_field) is not None:
                action["index"]["_parent"] = doc.pop(parent_field)

            chunks.append(json.dumps(action))
            chunks.append(json.dumps(doc))

        payload = "\n".join(chunks) + "\n"
        url = self.build_url(index, None, "_bulk")
        asr = erequests.AsyncRequest("POST", url, self.session)
        asr.prepare(data=payload)

        r = self.map_one(asr)
        try:
            return r.json()
        except Exception as exc:
            raise ElasticSearchError(url) from exc
Exemplo n.º 30
0
def materias_view(request,pagina):
	if request.method == "POST":
		if "materia_id" in request.POST:
			try:
				id_mat = request.POST['materia_id']
				m = Materia.objects.get(pk=id_mat )
				mensaje = {"num_horas":"64","materia_id":m.id}
				m.delete() #Eliminar MAterias
				return HttpResponse(simplejson.dumps(mensaje),mimetype='application/json')
			except:
				mensaje = {"num_horas":"120"}
				return HttpResponse(simplejson.dumps(mensaje),mimetype='application/json')
	


	lista_mat = Materia.objects.get_query_set() #Algo asi como select * from materias where horas=64
	paginator = Paginator(lista_mat,3) #Cuantos elementos quieres por pagina = 3
	try:
		page = int(pagina)
	except:
		page = 1
	try:
		listaMaterias = paginator.page(page)
	except (EmptyPage,InvalidPage):
		listaMaterias = paginator.page(paginator.num_pages)

	ctx = {'listaMaterias':listaMaterias}
	return render_to_response('materias/listaMaterias.html', ctx, context_instance=RequestContext(request))
Exemplo n.º 31
0
def dumps_with_simplejson():
    simplejson.dumps(test_object)
Exemplo n.º 32
0
        project['projectBinaryData'] = decode_project_binary(
            project['projectBinaryData'])

    for source_key, target_key, custom_map in keys:

        try:

            val = dpath.util.get(project, source_key)
        except KeyError:
            # logger.debug("key %s is missing.  Defaulting to Null" % source_key)
            val = None

        if callable(custom_map):
            val = custom_map(val)

        mapped[target_key] = val

    return mapped


if __name__ == '__main__':

    with open('./project_samples_dynamojson/sample2.json', 'r') as fo:
        project = json.loads(fo.read())

        # mapped = map(project)
        mapped = map(flatten_dynamo_document(project))
        # print("mapped: %s" % mapped)
        with open("output.json", "w") as fo:
            fo.write(json.dumps(mapped))
Exemplo n.º 33
0
 def get(self):
     result = {'success':True}
     self.clear_cookie("user")
     self.write(json.dumps(result))
Exemplo n.º 34
0
 def __repr__(self):
     return json.dumps(self.config())
Exemplo n.º 35
0
def auth(environ, start_response):
    result = ""
    responseCode = "200 Ok"
    returnContentType = 'text/html'
    redirect = False

    request = environ['REQUEST_URI']
    method = environ['REQUEST_METHOD']

    log("%s %s" % (method, request))

    if method == "POST":
        options = environ['wsgi.nput'].read()
        application = request.split("/")[-1]
    elif method == "GET":
        application, options = request.split("/")[-1].split("?")

    p = dict([x.split("=") for x in options.split("&")])

    m = md5()
    m.update(SALT)
    m.update(p.get('client_id', ''))

    decodedURI = urllib.unquote(p.get('redirect_uri', ''))
    if '?' in decodedURI:
        m.update(decodedURI.split("?")[0])
    elif '#' in decodedURI:
        m.update(decodedURI.split("#")[0])
    else:
        m.update(decodedURI)

    code = m.hexdigest()
    now = time.time()
    token = buildToken(now, code)

    keys = dict([
        (access[0], access[1:])
        for access in [line.strip().split(" ") for line in open(".auth", "r")]
    ])

    if application == "auth":
        #TODO: handle refresh tokens
        if code in keys:
            #TODO: at this point, we need a mechanism for the user to give permission to proceed. Not implemented yet.
            expiry, scope = keys[code]
            if p['response_type'] == "code":
                codeTokens[token] = (now, code)
                uri = decodedURI + "?code=%s&state=%s" % (token, p['state'])
            elif p['response_type'] == "token":
                accessTokens[token] = (now, code, scope)
                log("new token %s" % token)
                uri = "#access_token=%s&token_type=bearer&expires_in=%d&scope=%s&state=%s"
                uri = decodedURI + uri % (token, ACCESS_TOKEN_EXPIRY, "+".join(
                    keys[code]), p.get('state', ''))

            redirect = uri
            result = "Redirecting to %s." % uri
        else:
            result = MSG_AUTH_REQUEST
            result = result % (p['client_id'], decodedURI,
                               p.get('scope', 'Not specified'), 'state'
                               in p, code, p['client_id'], p['redirect_uri'],
                               p.get('state', ''), p['response_type'])


#    elif application == "token": # IS THIS EVEN USED?
#      if not "grant_type" in p:
#        return "parameter grant_type missing" #fail
#
#      if p['grant_type'] == "authorization_code" :
#        codeToken  = p['code']
#        if codeToken in codeTokens:
#          timestamp, client = codeTokens[codeToken]
#          if time.time()-timestamp > TOKEN_EXPIRY:
#            return "timeout" #fail
#          if code != client:
#            return "token does not match client" #fail
#
#          accessTokens[token] = (now, code)
#          refreshToken = buildToken(time.time(), SALT + code) #FIXME: use more variance?
#          refreshTokens[refreshToken] = (now, code)
#
#          returnContentType = "application/json;charset=UTF-8"
#          jsondata = {
#            "access_token": token,
#            "token_type": "bearer",
#            "expires_in": ACCESSTOKEN_EXPIRY,
#            "refresh_token": refreshToken,
#          }
#          result = json.dumps(jsondata)

    elif application == "vrfy":
        returnContentType = "application/json;charset=UTF-8"
        if p['access_type'] == "token":
            token = p['token']
            log("vrfy token %s is valid: %s" %
                (token, str(token in accessTokens)))
            if token in accessTokens:
                timestamp, code, scope = accessTokens[token]
                if time.time() > timestamp + ACCESS_TOKEN_EXPIRY:
                    result = json.dumps({})
                    responseCode = "401 Expired"
                else:
                    result = json.dumps({
                        "verified-for":
                        code,
                        "scope":
                        scope,
                        "expires":
                        timestamp + ACCESS_TOKEN_EXPIRY
                    })
            else:
                result = json.dumps({})
                responseCode = "401 Unauthorized"

    # TODO: Add headers if required
    headers = [
        ('Content-Type', returnContentType),
        ('Cache-Control', 'no-store'),
        ('Pragma', 'no-cache'),
    ]
    if redirect:
        headers.append(('Location', redirect))
        responseCode = "302 Granted"
    start_response(responseCode, headers)
    return result
Exemplo n.º 36
0
def dump(label, o):
    print('%s:\n%s' % (label, json.dumps(o, indent=2, separators=(',', ':'))))
Exemplo n.º 37
0
def dumps_sorted_with_simplejson():
    simplejson.dumps(test_object, sort_keys=True)
Exemplo n.º 38
0
def panel(trans_id):
    """
    This method calls index.html to render the data grid.

    Args:
        trans_id: unique transaction id
    """

    url_params = None
    if request.args:
        url_params = {k: v for k, v in request.args.items()}

    if request.form:
        url_params['title'] = request.form['title']
        if 'sql_filter' in request.form:
            url_params['sql_filter'] = request.form['sql_filter']
        if 'query_url' in request.form:
            url_params['query_url'] = request.form['query_url']

    # We need client OS information to render correct Keyboard shortcuts
    user_agent = UserAgent(request.headers.get('User-Agent'))
    """
    Animations and transitions are not automatically GPU accelerated and by
    default use browser's slow rendering engine. We need to set 'translate3d'
    value of '-webkit-transform' property in order to use GPU. After applying
    this property under linux, Webkit calculates wrong position of the
    elements so panel contents are not visible. To make it work, we need to
    explicitly set '-webkit-transform' property to 'none' for .ajs-notifier,
    .ajs-message, .ajs-modal classes.

    This issue is only with linux runtime application and observed in Query
    tool and debugger. When we open 'Open File' dialog then whole Query tool
    panel content is not visible though it contains HTML element in back end.

    The port number should have already been set by the runtime if we're
    running in desktop mode.
    """
    is_linux_platform = False

    from sys import platform as _platform
    if "linux" in _platform:
        is_linux_platform = True

    # Fetch the server details
    bgcolor = None
    fgcolor = None

    s = Server.query.filter_by(id=url_params['sid']).first()
    if s and s.bgcolor:
        # If background is set to white means we do not have to change
        # the title background else change it as per user specified
        # background
        if s.bgcolor != '#ffffff':
            bgcolor = s.bgcolor
        fgcolor = s.fgcolor or 'black'

    layout = get_setting('SQLEditor/Layout')

    macros = get_user_macros()

    return render_template("datagrid/index.html",
                           _=gettext,
                           uniqueId=trans_id,
                           is_desktop_mode=app.PGADMIN_RUNTIME,
                           is_linux=is_linux_platform,
                           title=underscore_unescape(url_params['title']),
                           url_params=json.dumps(url_params),
                           client_platform=user_agent.platform,
                           bgcolor=bgcolor,
                           fgcolor=fgcolor,
                           layout=layout,
                           macros=macros)
 def to_str(self):
     import simplejson as json
     return json.dumps(self.to_dict())
Exemplo n.º 40
0
def process_tweet(line, track_list, expand_url=False):

    tweet = simplejson.loads(line)

    if track_list:
        track_set = set(track_list)
    else:
        track_set = None
	# List of punct to remove from string for track keyword matching
    punct = re.escape('!"$%&\'()*+,-./:;<=>?@[\\]^`{|}~')


    if (tweet.has_key("entities") and "created_at" in tweet and "created_at" in tweet['user']):
        hashtag_num = 0
        tweet['hashtags'] = []
        tweet['mentions'] = []
        tweet['codes'] = []

        if "hashtags" in tweet['entities']:
			hashtag_num = len(tweet['entities']['hashtags'])
			for index in range(len(tweet['entities']['hashtags'])):
				tweet['hashtags'].append(tweet['entities']['hashtags'][index]['text'].lower())


        urls_num = 0
        coded_url_num = 0
        urls = []
        if "urls" in tweet['entities']:
			urls_num = len(tweet['entities']['urls'])
			if expand_url:
				for urls in tweet['entities']['urls']:
					url_code = None
					if 'long-url' in urls and urls['long-url'] is not None:
						url_code = ck_coded_url(urls['long-url'])
					elif "expanded_url" in urls and urls['expanded_url'] is not None:
						url_code = ck_coded_url(urls['expanded_url'])
					elif "url" in urls:
						url_code = ck_coded_url(urls['url'])
					if url_code:
						urls['code'] = url_code[0]
						urls['hashtag'] = url_code[1]
						tweet['codes'].append(url_code[0])
			coded_url_num = len(tweet['codes'])

        mentions_num = 0
        if "user_mentions" in tweet['entities']:
			mentions_num = len(tweet['entities']['user_mentions'])
			for index in range(len(tweet['entities']['user_mentions'])):
				if "screen_name" in tweet['entities']['user_mentions'][index]:
					tweet['mentions'].append(tweet['entities']['user_mentions'][index]['screen_name'].lower())

        tweet['counts'] = {
							'urls': urls_num,
							'hashtags': hashtag_num,
							'user_mentions': mentions_num,
							'coded_urls': coded_url_num
							};

        tweet['hashtags'].sort()
        tweet['mentions'].sort()

        tweet['text_hash'] = hashlib.md5(tweet['text'].encode("utf-8")).hexdigest()
	
	tweet["track_kw"] = {"org_tweet" : {}, "rt_tweet" : {}, "qt_tweet" : {}}
	
	#regular expression to delete emojis
        emoji_pattern = re.compile(u'([\U00002600-\U000027BF])|([\U0001f300-\U0001f64F])|([\U0001f680-\U0001f6FF])')
	
	# Check to see if we have a retweet
        if tweet.has_key("retweeted_status") and tweet['retweeted_status']['truncated']== True:

	    rt_hashtags = []
            rt_mentions = []
	    rt_urls = []

            for index in range(len(tweet['retweeted_status']['extended_tweet']['entities']['hashtags'])):
            	rt_hashtags.append(tweet['retweeted_status']['extended_tweet']['entities']['hashtags'][index]['text'].lower())
            for index in range(len(tweet['retweeted_status']['extended_tweet']['entities']['user_mentions'])):
            	rt_mentions.append(tweet['retweeted_status']['extended_tweet']['entities']['user_mentions'][index]['screen_name'].lower())
	    for index in range(len(tweet['retweeted_status']['extended_tweet']['entities']['urls'])):
                rt_urls.append(tweet['retweeted_status']['extended_tweet']['entities']['urls'][index]['expanded_url'].lower())

            if track_set:
                rt_hashtags = set([x.lower() for x in rt_hashtags])
                rt_mentions = set([x.lower() for x in rt_mentions])
                track_set = set([x.lower() for x in track_set])
		tweet["track_kw"]["rt_tweet"]["hashtags"]  = list(set(rt_hashtags).intersection(track_set)) 
		tweet["track_kw"]["rt_tweet"]["mentions"] = list(set(rt_mentions).intersection(track_set))
		rt_text = re.sub('[%s]' % punct, ' ', tweet['retweeted_status']['extended_tweet']['full_text'])
		rt_text = emoji_pattern.sub(r'', rt_text)
		rt_text = rt_text.lower().split()
		tweet["track_kw"]["rt_tweet"]["text"] = list(set(rt_text).intersection(track_set))
		tmpURLs = []
		for url in rt_urls:
			for x in track_set:
				if x in url:
					tmpURLs.append(url)
		tweet["track_kw"]["rt_tweet"]["urls"] = list(tmpURLs)

	#Check if is retweet and it is not truncated
	elif tweet.has_key("retweeted_status") and tweet['retweeted_status']['truncated']== False:
            rt_hashtags = []
            rt_mentions = []
            rt_urls = []

            for index in range(len(tweet['retweeted_status']['entities']['hashtags'])):
                rt_hashtags.append(tweet['retweeted_status']['entities']['hashtags'][index]['text'].lower())
            for index in range(len(tweet['retweeted_status']['entities']['user_mentions'])):
                rt_mentions.append(tweet['retweeted_status']['entities']['user_mentions'][index]['screen_name'].lower())
            for index in range(len(tweet['retweeted_status']['entities']['urls'])):
                rt_urls.append(tweet['retweeted_status']['entities']['urls'][index]['expanded_url'].lower())

            if track_set:
                rt_hashtags = set([x.lower() for x in rt_hashtags])
                rt_mentions = set([x.lower() for x in rt_mentions])
                track_set = set([x.lower() for x in track_set])
                tweet["track_kw"]["rt_tweet"]["hashtags"]  = list(set(rt_hashtags).intersection(track_set))  #list(rt_hashtags.intersection(track_set))
                tweet["track_kw"]["rt_tweet"]["mentions"] = list(set(rt_mentions).intersection(track_set))  #list(rt_mentions.intersection(track_set))
                rt_text = re.sub('[%s]' % punct, ' ', tweet['retweeted_status']['text'])
                rt_text = emoji_pattern.sub(r'', rt_text)
		rt_text = rt_text.lower().split()
                tweet["track_kw"]["rt_tweet"]["text"] = list(set(rt_text).intersection(track_set))
                tmpURLs = []
                for url in rt_urls:
                        for x in track_set:
                                if x in url:
                                        tmpURLs.append(url)
                tweet["track_kw"]["rt_tweet"]["urls"] = list(tmpURLs)


	#check if we have a quoted tweet and if it is truncated
	if tweet.has_key("quoted_status") and tweet['quoted_status']['truncated']== True :

            qt_hashtags = []
            qt_mentions = []
	    qt_urls = []

 	    for index in range(len(tweet['quoted_status']['extended_tweet']['entities']['hashtags'])):
                qt_hashtags.append(tweet['quoted_status']['extended_tweet']['entities']['hashtags'][index]['text'].lower())
            for index in range(len(tweet['quoted_status']['extended_tweet']['entities']['user_mentions'])):
                qt_mentions.append(tweet['quoted_status']['extended_tweet']['entities']['user_mentions'][index]['screen_name'].lower())
	    for index in range(len(tweet['quoted_status']['extended_tweet']['entities']['urls'])):
                qt_urls.append(tweet['quoted_status']['extended_tweet']['entities']['urls'][index]['expanded_url'].lower())

            
            if track_set:
                qt_hashtags = set([x.lower() for x in qt_hashtags])
                qt_mentions = set([x.lower() for x in qt_mentions])
                track_set = set([x.lower() for x in track_set])
                tweet["track_kw"]["qt_tweet"]["hashtags"]  = list(set(qt_hashtags).intersection(track_set))
                tweet["track_kw"]["qt_tweet"]["mentions"] = list(set(qt_mentions).intersection(track_set))
                qt_text = re.sub('[%s]' % punct, ' ', tweet['quoted_status']['extended_tweet']['full_text'])
		qt_text = emoji_pattern.sub(r'', qt_text)
                qt_text = qt_text.lower().split()
                tweet["track_kw"]["qt_tweet"]["text"] = list(set(qt_text).intersection(track_set))
		tmpURLs = []
                for url in qt_urls:
                        for x in track_set:
                                if x in url:
                                        tmpURLs.append(url)
                tweet["track_kw"]["qt_tweet"]["urls"] = list(tmpURLs)

	#Check if we have a quoted tweet and it is not truncated
	elif  tweet.has_key("quoted_status") and tweet['quoted_status']['truncated']== False :

            qt_hashtags = []
            qt_mentions = []
            qt_urls = []

            for index in range(len(tweet['quoted_status']['entities']['hashtags'])):
                qt_hashtags.append(tweet['quoted_status']['entities']['hashtags'][index]['text'].lower())
            for index in range(len(tweet['quoted_status']['entities']['user_mentions'])):
                qt_mentions.append(tweet['quoted_status']['entities']['user_mentions'][index]['screen_name'].lower())
            for index in range(len(tweet['quoted_status']['entities']['urls'])):
                qt_urls.append(tweet['quoted_status']['entities']['urls'][index]['expanded_url'].lower())


            if track_set:
                qt_hashtags = set([x.lower() for x in qt_hashtags])
                qt_mentions = set([x.lower() for x in qt_mentions])
                track_set = set([x.lower() for x in track_set])
                tweet["track_kw"]["qt_tweet"]["hashtags"]  = list(set(qt_hashtags).intersection(track_set))
                tweet["track_kw"]["qt_tweet"]["mentions"] = list(set(qt_mentions).intersection(track_set))
                qt_text = re.sub('[%s]' % punct, ' ', tweet['quoted_status']['text'])
		qt_text = emoji_pattern.sub(r'', qt_text)
                qt_text = qt_text.lower().split()
                tweet["track_kw"]["qt_tweet"]["text"] = list(set(qt_text).intersection(track_set))

                tmpURLs = []
                for url in qt_urls:
                        for x in track_set:
                                if x in url:
                                        tmpURLs.append(url)
                tweet["track_kw"]["qt_tweet"]["urls"] = list(tmpURLs)

	#Check Original tweets
        if track_set and tweet['truncated'] == False :

                        myURLs = []
			for index in range(len(tweet['entities']['urls'])):
				myURLs.append(tweet['entities']['urls'][index]['expanded_url'].lower())
			
			hashTags_set = set([x.lower() for x in tweet['hashtags']])
                        mentions_set = set([x.lower() for x in tweet['mentions']])
                        track_set = set([x.lower() for x in track_set])
                        tweet["track_kw"]["org_tweet"]["hashtags"] = list(set(hashTags_set).intersection(track_set))
			tweet["track_kw"]["org_tweet"]["mentions"] = list(set(mentions_set).intersection(track_set))

                        tweet_text = re.sub('[%s]' % punct, ' ', tweet['text'])
			tweet_text = emoji_pattern.sub(r'', tweet_text)
                        tweet_text = tweet_text.lower().split()
			tweet["track_kw"]["org_tweet"]["text"] = list(set(tweet_text).intersection(track_set))
			tmpURLs = []
			for url in myURLs:
                        	for x in track_set:
                                	if x in url:
                                        	tmpURLs.append(url)
                	tweet["track_kw"]["org_tweet"]["urls"] = list(tmpURLs)

	elif track_set and tweet['truncated'] == True :
                        ext_hashtags = []
			ext_mentions = []
			ext_urls = []

			for index in range(len(tweet['extended_tweet']['entities']['hashtags'])):
    				ext_hashtags.append(tweet['extended_tweet']['entities']['hashtags'][index]['text'].lower())
			for index in range(len(tweet['extended_tweet']['entities']['user_mentions'])):
    				ext_mentions.append(tweet['extended_tweet']['entities']['user_mentions'][index]['screen_name'].lower())
			for index in range(len(tweet['extended_tweet']['entities']['urls'])):
				ext_urls.append(tweet['extended_tweet']['entities']['urls'][index]['expanded_url'].lower())

                        hashTags_set = set([x.lower() for x in ext_hashtags])
                        mentions_set = set([x.lower() for x in ext_mentions])
                        track_set = set([x.lower() for x in track_set])
                        tweet["track_kw"]["org_tweet"]["hashtags"] = list(set(hashTags_set).intersection(track_set))
                        tweet["track_kw"]["org_tweet"]["mentions"] = list(set(mentions_set).intersection(track_set))
                        #tweet['track_kw']['hashtags'] = list(set(hashTags_set).intersection(track_set))
                        #tweet['track_kw']['mentions'] = list(set(mentions_set).intersection(track_set))
                        #---------------------------------End new code by Dani-------------------------------------------------
                        tweet_text = re.sub('[%s]' % punct, ' ', tweet['extended_tweet']['full_text'])
                        tweet_text = emoji_pattern.sub(r'', tweet_text)
                        tweet_text = tweet_text.lower().split()
                        tweet["track_kw"]["org_tweet"]["text"] = list(set(tweet_text).intersection(track_set))
                        tmpURLs = []
                        for url in ext_urls:
                                for x in track_set:
                                        if x in url:
                                                tmpURLs.append(url)
                        tweet["track_kw"]["org_tweet"]["urls"] = list(tmpURLs)


        # Convert dates 2012-09-22 00:10:46
        # Note that we convert these to a datetime object and then convert back to string
        # and update the tweet with the new string. We do this becuase we want to find
        # and log any process issues here, not when we do an insert.
        #
        #tweet['created_ts'] = to_datetime(tweet['created_at'])
        #tweet['user']['created_ts'] = to_datetime(tweet['user']['created_at'])
        t = to_datetime(tweet['created_at'])
        tweet['created_ts'] = t.strftime('%Y-%m-%d %H:%M:%S')

        t = to_datetime(tweet['user']['created_at'])
        tweet['user']['created_ts'] = t.strftime('%Y-%m-%d %H:%M:%S')

        #print tweet['created_ts']

    tweet_out_string = simplejson.dumps(tweet).encode('utf-8') + '\n'

    return tweet_out_string
Exemplo n.º 41
0
 def json_error(err_msg, status=400):
     result = {'error': err_msg}
     return HttpResponse(json.dumps(result), status=status,
                         content_type=content_type)
Exemplo n.º 42
0
def harmonize_unii(out_file, product_file, unii_file, class_index_dir):
  out = open(out_file, 'w')
  meta_file = csv.DictReader(open(product_file, 'rb'), delimiter='\t')

  ndc_dict = {}
  for row in meta_file:

    # Building the ndc_dict which is the out_file data structure of the final loop
    # A little weird because there are duplicate set_id entries in the Product
    # file. Setting the key of the ndc_dict as the substance name and then
    # checking to see if the set_id is already in value list of that key.
    this_spl_id = row['PRODUCTID'].split('_')[1]
    this_substance = row['SUBSTANCENAME']

    if this_substance.strip() != '':
      if this_spl_id in ndc_dict:
        tmp_substance = [s.lstrip() for s in this_substance.split(';')]
        ndc_dict[this_spl_id] = set(tmp_substance + ndc_dict[this_spl_id])
        ndc_dict[this_spl_id] = list(ndc_dict[this_spl_id])
      else:
        ndc_dict[this_spl_id] = [s.lstrip() for s in this_substance.split(';')]



  pharma_xmls = []
  # Grab all of the xml files
  for root, _, filenames in os.walk(class_index_dir):
    for filename in fnmatch.filter(filenames, '*.xml'):
      pharma_xmls.append(os.path.join(root, filename))

  # call async worker
  pharma_rows = parallel_extract(pharma_xmls, harmonization_extract_worker)

  unii_rows = extract_unii.load_unii_from_csv(unii_file)

  combo = []

  # Loop over ndc_dict, split its key, look for each token as a separate
  # UNII element, if it is one, then add it to the unii_info dict for this
  # loop cycle, once done with all of the tokenized keys, then loop over each
  # set_id in the ndc_dict value list and push a combine record onto the
  # list that will be the output.
  # Loop handles the many-to-many relationship of ingredients to products.
  unii_pivot = {}
  for key, value in ndc_dict.iteritems():
    for substance_name in value:
      if substance_name.lower() in pharma_rows:
        if key in unii_pivot:
          unii_pivot[key].append(pharma_rows[substance_name.lower()])
        else:
          unii_pivot[key] = [pharma_rows[substance_name.lower()]]
      elif substance_name.lower() in unii_rows:
        if key in unii_pivot:
          unii_pivot[key].append(unii_rows[substance_name.lower()])
        else:
          unii_pivot[key] = [unii_rows[substance_name.lower()]]

  for key, value in unii_pivot.iteritems():
    output_dict = {}
    output_dict['spl_id'] = key
    output_dict['unii_indexing'] = value
    combo.append(output_dict)

  for row in combo:
    out.write(json.dumps(row) + '\n')
Exemplo n.º 43
0
def task(request):
    mturk_hitId = request.GET.get('hitId', '')
    mturk_assignmentId = request.GET.get('assignmentId', '')
    expId = request.GET.get('experiment', '')
    usrId = request.GET.get('researcher', '')
    taskName = request.GET.get('task', '')
    wid = request.GET.get('wid', '')
    isSandbox = request.GET.get('sandbox', '')
    n = int(request.GET.get('n', '1'))

    logger.info("get %d tasks for wid %s" % (n, wid))

    expModel = ExperimentModel.objects.filter(name=expId, username=usrId)[0]
    banned_wids = json.loads(expModel.banned)["ids"]
    if wid in banned_wids:
        return HttpResponse("Your WorkerID has been banned")

    exps = ExperimentFile.objects.filter(username=usrId,
                                         experiment__name=expId)
    if len(exps) == 0:
        return HttpResponse("No experiments with those specs found")

    exp = None
    for exp in reversed(exps):
        if exp.original_filename == expModel.config_file:
            break
    if not exp:
        return HttpResponse(
            "Not found: config for exp with wid=%s and exp=%s" % (wid, expId))

    EX = exp.experiment

    his = json.loads(EX.analytics)
    if "wids" not in his:
        his["wids"] = []

    return_tasks = []
    find_tasks = WorkerTask.objects.filter(name=taskName,
                                           wid=wid,
                                           experiment=EX)

    logger.info("found %d tasks in database" % len(find_tasks))
    logger.info(str(find_tasks))

    if len(find_tasks) == 0:
        data = json.loads(exp.docfile.read())
        logger.info(data["tasks"])

        for task in data["tasks"]:
            logger.info(
                "params for task name %s.  Checking if it is same as '%s'" %
                (task['name'], taskName))
            if task["name"] == taskName:

                param = {}
                gen = [{}]

                # Create all combinatinos of the parameters' values
                for p in task["params"]:
                    if p["type"] == "UniformChoice":
                        gen2 = []
                        for inProgress in gen:
                            for choice in p["options"]:
                                modify = copy(inProgress)
                                modify[p["name"]] = choice
                                gen2.append(modify)
                        gen = gen2

                param = gen[0]
                seed(abs(hash(wid)) % (10**8))
                shuffle(gen)

                while n > len(gen):
                    gen.append({})

                balanced_history = json.loads(EX.balanced_history)
                for p in task["params"]:
                    if p["type"] == "CountDownChoice":
                        if p["name"] not in balanced_history:
                            balanced_history[p["name"]] = {}

                            for i in range(0, len(p["options"])):
                                balanced_history[p["name"]][
                                    p["options"][i][1]] = p["options"][i][0]

                balanced_history = json.loads(json.dumps(balanced_history))
                pickedsofar = {}

                logger.info("generating %d tasks" % n)
                for i in range(0, n):
                    param = gen.pop()

                    for p in task["params"]:
                        if p["name"] not in pickedsofar:
                            pickedsofar[p["name"]] = []

                        if p["type"] == "CountDownChoice":
                            sorter = []
                            historical_data = balanced_history[p["name"]]
                            possibilities = historical_data.keys()
                            possible_values = []

                            for key in possibilities:
                                if historical_data[key] > 0:
                                    possible_values.append(key)

                            print(possible_values)

                            if len(possible_values) == 0:
                                historical_data = {}
                                for i in range(0, len(p["options"])):
                                    historical_data[p["options"][i]
                                                    [1]] = p["options"][i][0]
                                    possible_values.append(p["options"][i][1])

                            shuffle(possible_values)
                            picked = possible_values[0]

                            historical_data[picked] -= 1
                            balanced_history[p["name"]] = historical_data

                            param[p["name"]] = picked

                    random.seed(hash(wid + taskName + expId))
                    task_id = ''.join(
                        random.choice(string.ascii_uppercase + string.digits)
                        for _ in range(10))
                    NewTask = WorkerTask(name=taskName,
                                         wid=wid,
                                         experiment=EX,
                                         identifier=task_id,
                                         researcher=usrId,
                                         hitId=mturk_hitId,
                                         assignmentId=mturk_assignmentId)

                    param["identifier"] = task_id
                    NewTask.params = json.dumps(param)

                    history = json.loads(NewTask.history)
                    timestamp_string = format(datetime.datetime.now(), u'U')
                    event = {
                        "type": "changeStatus",
                        "newStatus": "Waiting",
                        "timestamp": timestamp_string
                    }
                    history["events"].append(event)
                    NewTask.history = json.dumps(history)

                    NewTask.isSandbox = isSandbox
                    NewTask.save()
                    return_tasks.append(NewTask)

                logger.info("created %d tasks" % len(return_tasks))
                EX.analytics = json.dumps(his)
                EX.balanced_history = json.dumps(balanced_history)
                EX.save()

    for workertask in find_tasks:
        return_tasks.append(workertask)

    params_list = []
    ncompleted = 0
    for task in return_tasks:
        params = json.loads(task.params)

        results = json.loads(task.results)
        if len(results["data"]) == 0 and task.currentStatus == "Waiting":
            params_list.append(params)
        else:
            ncompleted += 1

    response = dict(params=params_list,
                    pay=EX.per_task_payment,
                    bonus=EX.bonus_payment,
                    ncompleted=ncompleted)
    return HttpResponse(json.dumps(response))
Exemplo n.º 44
0
def list_users():
    init_sys_cfg()
    print json.dumps(map(hipchat.user.User.get_json, hipchat.user.User.list()))
Exemplo n.º 45
0
def perform_query(query, socket_path, key=None):
    livestatus_socket = LivestatusSocket(socket_path)
    answer = livestatus_socket.send_query_and_receive_json_answer(query)
    formatted_answer = format_answer(query, answer, key)

    return json.dumps(formatted_answer, sort_keys=False, indent=4)
 def _put(self, key, val):
     js = json.dumps(val)
     self.r.set(key, js)
Exemplo n.º 47
0
 def serialize(self):
     return json.dumps(self._request_params, sort_keys=True, indent=4 * ' ')
Exemplo n.º 48
0
def saveDataxJob(request):
    user = request.user
    job_name = request.POST.get('job_name')
    description = request.POST.get('description')
    read_instance_id = request.POST.get('read_instance_id')  #前台实例名
    read_database = request.POST.get('read_database')
    read_sql = request.POST.get('read_sql')
    writer_instance_id = request.POST.get('writer_instance_id')
    writer_database = request.POST.get('writer_database')
    writer_table = request.POST.get('writer_table')
    writer_column = request.POST.get('writer_column')
    writer_preSql = request.POST.get('writer_preSql')
    writer_postSql = request.POST.get('writer_postSql')
    operation_type = request.POST.get('operation_type')
    job_id = request.POST.get('job_id')
    writer_column = writer_column.rstrip(',')
    print(writer_column)
    result = {'status': 0, 'msg': 'ok', 'data': {}}
    # writer_columns = writer_column.split(',')
    # while '' in writer_columns:
    #     writer_columns.remove('')
    # if len(writer_columns) == 0:
    #     writer_columns.append('*')
    #判断操作类型
    if operation_type == 'add':
        # 判断任务名重复
        job = DataXJob.objects.filter(job_name=job_name)
        if job.exists():
            result = {'status': 1, 'msg': '任务名称不能重复', 'data': {}}
            return HttpResponse(json.dumps(result),
                                content_type='application/json')
        else:
            try:
                with transaction.atomic():

                    savejob = DataXJob()
                    savejob.job_name = job_name
                    savejob.job_description = description
                    savejob.read_instance_id = read_instance_id
                    savejob.read_database = read_database
                    savejob.read_sql = read_sql
                    savejob.writer_instance_id = writer_instance_id
                    savejob.writer_database = writer_database
                    savejob.writer_table = writer_table
                    savejob.writer_preSql = writer_preSql
                    savejob.writer_postSql = writer_postSql
                    savejob.crate_user = user.username
                    savejob.save()

                    saveColumn = DataXJobWriterColumn()
                    saveColumn.job = savejob
                    saveColumn.column_name = writer_column
                    saveColumn.save()
            except Exception as msg:
                connection.close()
                logger.error(msg)
                result[msg] = msg
            return HttpResponse(json.dumps(result),
                                content_type='application/json')
    elif operation_type == 'update':
        try:
            with transaction.atomic():
                jobdata = {
                    'job_name': job_name,
                    'job_description': description,
                    'read_instance_id': read_instance_id,
                    'read_database': read_database,
                    'read_sql': read_sql,
                    'writer_instance_id': writer_instance_id,
                    'writer_database': writer_database,
                    'writer_table': writer_table,
                    'writer_preSql': writer_preSql,
                    'writer_postSql': writer_postSql
                }
                DataXJob.objects.filter(
                    job_id=job_id).update(**jobdata, crate_user=user.username)

                cloumndata = {'column_name': writer_column}
                DataXJobWriterColumn.objects.filter(job_id=job_id).update(
                    **cloumndata)

        except Exception as msg:
            connection.close()
            logger.error(msg)
            result[msg] = msg
        return HttpResponse(json.dumps(result),
                            content_type='application/json')
    else:
        result = {'status': 1, 'msg': '操作类型不支持', 'data': {}}
        return HttpResponse(json.dumps(result),
                            content_type='application/json')
Exemplo n.º 49
0
 def _vm_create(self, vm_json, remote=False):
     return self._run_cmd('_vm_create', stdin=json.dumps(vm_json), remote=remote)
Exemplo n.º 50
0
    def add(self, widget, label=None, rowspan=1, colspan=1):
        if self.columns and colspan > self.columns:
            colspan = self.columns

        label_size = label and 1 or 0

        if label_size == 1 and colspan == 2:
            colspan = 1

        if self.columns and colspan + self.x + label_size > self.columns:
            self.add_row()

        tr = self.table[-1]
        label_table = []
        if label:
            if colspan > 1:
                colspan -= 1
            attrs = {
                'class': 'label',
                'kind': getattr(widget, 'kind', None),
                'title': getattr(widget, 'help', None),
                'for': widget.name,
                'model': getattr(widget, 'model', None),
                'fname':getattr(widget, 'name', None),
                'widget_item': ({}, widget)
            }
            td = [attrs, label]

            if self.is_search:
                if colspan > 1:
                    attrs['colspan'] = colspan
                if getattr(widget, 'full_name', None):
                    attrs['class'] = attrs.get('class', 'label') + ' search_filters search_fields'
                    label_table = td
            else:
                attrs['width'] = '1%'
            tr.append(td)

        if isinstance(widget, TinyInputWidget) and hasattr(cherrypy.request, 'terp_validators'):
            self._add_validator(widget)

        attrs = self.base_widget_attrs(widget, colspan, rowspan)

        if not hasattr(widget, 'visible'):
            widget.visible = True

        # state change
        if getattr(widget, 'states', None):
            states = widget.states
            # convert into JS
            if isinstance(states, dict):
                states = dict([(k, dict(v)) for k, v in states.iteritems()])

            attrs.update(states=simplejson.dumps(states),
                         widget=widget.name)
            if not widget.visible:
                attrs['style'] = 'display: none'
            widget.visible = True

        if getattr(widget, "valign", None):
            attrs['valign'] = widget.valign

        # attr change
        if getattr(widget, 'attributes', None):
            attrs['attrs'] = str(widget.attributes)
            attrs['widget'] = widget.name

        if not isinstance(widget, (Char, Frame, Float, DateTime, Integer, Selection, Notebook, Separator, NewLine, Label)):
            from openerp.widgets.search import Filter
            if self.is_search \
               and (not (getattr(widget, 'kind', None) or widget._name)) \
               or (isinstance(widget, Filter) and widget.string):
                classes = [attrs.get('class', 'item'), 'search_filters']
                if isinstance(widget, Filter):
                    classes.append('group_box')
                    if widget.def_checked:
                        classes.append('grop_box_active')

                attrs['class'] = ' '.join(classes)
                attrs['nowrap'] = 'nowrap'

        attrs['class'] = attrs.get('class', '') + ' item-%s' % widget.__class__.__name__.lower()
        if self.columns and not attrs.has_key('width'):
            if label:
                attrs['width'] = str((100 / self.columns) * (colspan + 1) - 1) + '%'
            else:
                attrs['width'] = str((100 / self.columns) * colspan) + '%'

        td = [attrs, widget]
        if getattr(widget, 'full_name', None) and self.is_search and label_table:
            label_table[0]['widget_item'] = td
            label_table[0]['is_search'] = True
        else:
            tr.append(td)

        self.x += colspan + label_size
Exemplo n.º 51
0
 def json_encode(self, value):
     """simplejson.dumps without extra spaces.
     
     memcache doesn't like spaces in the key.
     """
     return simplejson.dumps(value, separators=(",", ":"))
Exemplo n.º 52
0
 def save(self, *args, **kwargs):
     if self._data is not None and self.serialized_data is None:
         self.serialized_data = json.dumps(self._data)
     super(UserLedger, self).save(*args, **kwargs)
Exemplo n.º 53
0
    def _moneris_s2s_send(self, cr, uid, values, cc_values, context=None):
        """
         .. versionadded:: pre-v8 saas-3
         .. warning::

            Experimental code. You should not use it before OpenERP v8 official
            release.
        """
        tx_id = self.create(cr, uid, values, context=context)
        tx = self.browse(cr, uid, tx_id, context=context)

        headers = {
            'Content-Type':
            'application/json',
            'Authorization':
            'Bearer %s' %
            tx.acquirer_id._moneris_s2s_get_access_token()[tx.acquirer_id.id],
        }
        data = {
            'intent':
            'sale',
            'transactions': [{
                'amount': {
                    'total': '%.2f' % tx.amount,
                    'currency': tx.currency_id.name,
                },
                'description': tx.reference,
            }]
        }
        if cc_values:
            data['payer'] = {
                'payment_method':
                'credit_card',
                'funding_instruments': [{
                    'credit_card': {
                        'number': cc_values['number'],
                        'type': cc_values['brand'],
                        'expire_month': cc_values['expiry_mm'],
                        'expire_year': cc_values['expiry_yy'],
                        'cvv2': cc_values['cvc'],
                        'first_name': tx.partner_name,
                        'last_name': tx.partner_name,
                        'billing_address': {
                            'line1': tx.partner_address,
                            'city': tx.partner_city,
                            'country_code': tx.partner_country_id.code,
                            'postal_code': tx.partner_zip,
                        }
                    }
                }]
            }
        else:
            # TODO: complete redirect URLs
            data['redirect_urls'] = {
                # 'return_url': 'http://example.com/your_redirect_url/',
                # 'cancel_url': 'http://example.com/your_cancel_url/',
            },
            data['payer'] = {
                'payment_method': 'moneris',
            }
        data = json.dumps(data)

        request = urllib2.Request(
            'https://api.sandbox.moneris.com/v1/payments/payment', data,
            headers)
        result = self._moneris_try_url(request, tries=3, context=context)
        return (tx_id, result)
Exemplo n.º 54
0
    def json_encode(self, value):
        """simplejson.dumps without extra spaces and consistant ordering of dictionary keys.

        memcache doesn't like spaces in the key.
        """
        return simplejson.dumps(value, separators=(",", ":"), sort_keys=True)
Exemplo n.º 55
0
def _resize(ws):
    rows, cols = _pty_size()
    ws.send(json.dumps({'resize': {'width': cols, 'height': rows}}))
Exemplo n.º 56
0
def importFitAndReturnJson(fileName, fieldName):
     fitfile = FitFile(fileName)
     return json.dumps(fitfile.get_messages(fieldName, False, True), sort_keys=True, default=str, iterable_as_array=True)
Exemplo n.º 57
0
 def get(self):
     self._add_CORS_header()
     self.write(simplejson.dumps(self.tabpy.get_endpoints()))
Exemplo n.º 58
0
    def moneris_form_generate_values(self,
                                     cr,
                                     uid,
                                     id,
                                     partner_values,
                                     tx_values,
                                     context=None):
        base_url = self.pool['ir.config_parameter'].get_param(
            cr, uid, 'web.base.url')
        acquirer = self.browse(cr, uid, id, context=context)

        moneris_tx_values = dict(tx_values)
        moneris_tx_values.update({
            'cmd':
            '_xclick',
            'business':
            acquirer.moneris_email_account,
            'item_name':
            tx_values['reference'],
            'item_number':
            tx_values['reference'],
            'amount':
            tx_values['amount'],
            'currency_code':
            tx_values['currency'] and tx_values['currency'].name or '',
            'address1':
            partner_values['address'],
            'city':
            partner_values['city'],
            'country':
            partner_values['country'] and partner_values['country'].name or '',
            'state':
            partner_values['state'] and partner_values['state'].name or '',
            'email':
            partner_values['email'],
            'zip':
            partner_values['zip'],
            'first_name':
            partner_values['first_name'],
            'last_name':
            partner_values['last_name'],
            'return':
            '%s' % urlparse.urljoin(base_url, MonerisController._return_url),
            'notify_url':
            '%s' % urlparse.urljoin(base_url, MonerisController._notify_url),
            'cancel_return':
            '%s' % urlparse.urljoin(base_url, MonerisController._cancel_url),
        })
        tx_ids = self.pool['payment.transaction'].search(
            cr,
            uid, [('reference', '=', tx_values['reference'])],
            context=context)
        for tx in tx_ids:
            tx = self.pool['payment.transaction'].browse(cr,
                                                         uid,
                                                         tx,
                                                         context=context)
            tx.write({'amount': tx_values['amount']})

        if acquirer.fees_active:
            moneris_tx_values['handling'] = '%.2f' % moneris_tx_values.pop(
                'fees', 0.0)
        if moneris_tx_values.get('return_url'):
            moneris_tx_values['custom'] = json.dumps(
                {'return_url': '%s' % moneris_tx_values.pop('return_url')})
        return partner_values, moneris_tx_values
Exemplo n.º 59
0
def save_many(query, comment=''):
    return request('/test/save_many', method='POST', data=urlencode({'query': simplejson.dumps(query), 'comment': comment}))
Exemplo n.º 60
0
    'name':'park',
    'website':'google.com',
    'from':'busan'
})
data['people'].append({
    'name':'lee',
    'website':'daum.net',
    'from':'iechone'
})

#print(data)

#data ={'people': [{'name': 'kim', 'website': 'naver.com', 'from': 'seoul'}, {'name': 'park', 'website': 'google.com', 'from': 'busan'}, {'name': 'lee', 'website': 'daum.net', 'from': 'iechone'}]}

#dict(json) -> str
e = json.dumps(data, indent=4)
# print(type(e))
# print(e)

#str -> dict(json)
d = json.loads(e)
# print(type(d))
# print(d)

with open('C:/python source/Section4/member.json','w') as outfile:
    outfile.write(e)


with open('C:/python source/Section4/member.json','r') as infile:
    r = json.loads(infile.read())
    print("=====")