Esempio n. 1
0
    def testArrayNumpyLabelled(self):
        input = {'a': []}
        output = ujson.loads(ujson.dumps(input), numpy=True, labelled=True)
        self.assertTrue((np.empty((1, 0)) == output[0]).all())
        self.assertTrue((np.array(['a']) == output[1]).all())
        self.assertTrue(output[2] is None)

        input = [{'a': 42}]
        output = ujson.loads(ujson.dumps(input), numpy=True, labelled=True)
        self.assertTrue((np.array([42]) == output[0]).all())
        self.assertTrue(output[1] is None)
        self.assertTrue((np.array(['a']) == output[2]).all())

        # py3 is non-determinstic on the ordering......
        if not py3compat.PY3:
            input = [{'a': 42, 'b':31}, {'a': 24, 'c': 99}, {'a': 2.4, 'b': 78}]
            output = ujson.loads(ujson.dumps(input), numpy=True, labelled=True)
            expectedvals = np.array([42, 31, 24, 99, 2.4, 78], dtype=int).reshape((3,2))
            self.assertTrue((expectedvals == output[0]).all())
            self.assertTrue(output[1] is None)
            self.assertTrue((np.array(['a', 'b']) == output[2]).all())


            input = {1: {'a': 42, 'b':31}, 2: {'a': 24, 'c': 99}, 3: {'a': 2.4, 'b': 78}}
            output = ujson.loads(ujson.dumps(input), numpy=True, labelled=True)
            expectedvals = np.array([42, 31, 24, 99, 2.4, 78], dtype=int).reshape((3,2))
            self.assertTrue((expectedvals == output[0]).all())
            self.assertTrue((np.array(['1','2','3']) == output[1]).all())
            self.assertTrue((np.array(['a', 'b']) == output[2]).all())
Esempio n. 2
0
def show_windows(hWndList):
    for h in hWndList:
        # title = show_window_attr(h)
        windowTitle = mUtil.getWindowText(h)
        # log.log '窗口标题:%s' % (str(title))
        if "资金余额" in str(windowTitle):
            findTitle = find_text_for_index(hWndList, h)
            SingleUserInfo.set_capital_balance(findTitle)
        if "总 资 产" in str(windowTitle):
            findTitle = find_text_for_index(hWndList, h)
            SingleUserInfo.set_total_assets(findTitle)
        if "股票市值" in str(windowTitle):
            findTitle = find_text_for_index(hWndList, h)
            SingleUserInfo.set_stock_market_value(findTitle)
        if "可取金额" in str(windowTitle):
            findTitle = find_text_for_index(hWndList, h)
            SingleUserInfo.set_advisable_fundse(findTitle)
        if "冻结金额" in str(windowTitle):
            findTitle = find_text_for_index(hWndList, h)
            SingleUserInfo.set_frozen_fundse(findTitle)
        if "可用金额" in str(windowTitle):
            findTitle = find_text_for_index(hWndList, h)
            SingleUserInfo.set_available_funds(findTitle)

    SingleUserInfo.__dict__ = json.loads(
        json.dumps(SingleUserInfo.__dict__).replace("\u0000", ""))
    log.log(json.dumps(SingleUserInfo.__dict__))
Esempio n. 3
0
    def testArrayNumpyLabelled(self):
        input = {'a': []}
        output = ujson.loads(ujson.dumps(input), numpy=True, labelled=True)
        self.assertTrue((np.empty((1, 0)) == output[0]).all())
        self.assertTrue((np.array(['a']) == output[1]).all())
        self.assertTrue(output[2] is None)

        input = [{'a': 42}]
        output = ujson.loads(ujson.dumps(input), numpy=True, labelled=True)
        self.assertTrue((np.array([42]) == output[0]).all())
        self.assertTrue(output[1] is None)
        self.assertTrue((np.array([u('a')]) == output[2]).all())

        # py3 is non-determinstic on the ordering......
        if not compat.PY3:
            input = [{'a': 42, 'b':31}, {'a': 24, 'c': 99}, {'a': 2.4, 'b': 78}]
            output = ujson.loads(ujson.dumps(input), numpy=True, labelled=True)
            expectedvals = np.array([42, 31, 24, 99, 2.4, 78], dtype=int).reshape((3,2))
            self.assertTrue((expectedvals == output[0]).all())
            self.assertTrue(output[1] is None)
            self.assertTrue((np.array([u('a'), 'b']) == output[2]).all())


            input = {1: {'a': 42, 'b':31}, 2: {'a': 24, 'c': 99}, 3: {'a': 2.4, 'b': 78}}
            output = ujson.loads(ujson.dumps(input), numpy=True, labelled=True)
            expectedvals = np.array([42, 31, 24, 99, 2.4, 78], dtype=int).reshape((3,2))
            self.assertTrue((expectedvals == output[0]).all())
            self.assertTrue((np.array(['1','2','3']) == output[1]).all())
            self.assertTrue((np.array(['a', 'b']) == output[2]).all())
Esempio n. 4
0
 def dump_message(self, seq, rv, err):
     try:
         dumped = json.dumps([seq, rv, err])
     except Exception as e:  # coerce
         print 'Websocket coerces exception:'
         print_exc(e)
         dumped = json.dumps([seq, str(rv), err])
     self.write_message(dumped)
Esempio n. 5
0
def get_ratios(id1, id2, direction, ops, bins=1, bins_col=1, jsonit=True):
    """
  Get the ratios for the overview or the aggregated results for the middle view
  :param id1:
  :param id2:
  :param direction:
  :param ops:
  :param bins:
  :param bins_col:
  :param jsonit: If True return result as json string. Otherwise return Python object.
  :return:
  """
    hashname = create_hashname(id1, id2, bins, bins_col, direction, ops)
    json_ratios = get_diff_cache(hashname)

    if json_ratios is None:
        # we calculate the new one
        # get the detail diff
        t4 = timeit.default_timer()

        diffobj = get_diff_table(id1, id2, direction, ops, False)

        t5 = timeit.default_timer()
        _log.debug("TIMER: get diff in get ratios ", t5 - t4)
        # calculate the ratios for the overview
        t1 = timeit.default_timer()

        ratios = diffobj.aggregate(bins, bins_col)

        t2 = timeit.default_timer()
        _log.debug("TIMER: time to aggregate with ", bins, bins_col, t2 - t1)
        # todo find a better solution for this serialize thing :|
        # bin == 1 -> timeline bar chart
        # bin == -1 -> 2d ratio plot
        if bins == 1 or bins == -1:
            json_ratios = ujson.dumps(ratios.serialize())
        # bin > 1 -> 2d ratio histogram
        else:
            json_ratios = ujson.dumps(ratios)

        # cache this as overview
        set_diff_cache(hashname, json_ratios)

        if not jsonit:
            return ratios

    if not jsonit:
        t0 = timeit.default_timer()
        rj = ratio_from_json(json_ratios)
        t3 = timeit.default_timer()
        _log.debug("TIMER: time ratio from json", bins, bins_col, t3 - t0)
        return rj

    return json_ratios
Esempio n. 6
0
def seedupload(request):
    obj = dict()
    if request.method == 'POST':
        images = request.FILES.getlist('images')
        if images:
            for image in images:
                image.name = ImageTool.get_new_random_file_name(image.name)
                seed = models.Seed(userid=1, img=image)
                res = seed.save()
                return HttpResponse(json.dumps(res))
        else:
            obj['error'] = '没有上传的文件'
        return HttpResponse(json.dumps(obj))
    else:
        return render(request, "setting.html")
Esempio n. 7
0
    def _writecells_xls(self, cells, sheet_name, startrow, startcol):
        if sheet_name in self.sheets:
            wks = self.sheets[sheet_name]
        else:
            wks = self.book.add_sheet(sheet_name)
            self.sheets[sheet_name] = wks

        style_dict = {}

        for cell in cells:
            val = _conv_value(cell.val)

            num_format_str = None
            if isinstance(cell.val, datetime.datetime):
                num_format_str = "YYYY-MM-DD HH:MM:SS"
            if isinstance(cell.val, datetime.date):
                num_format_str = "YYYY-MM-DD"

            stylekey = json.dumps(cell.style)
            if num_format_str:
                stylekey += num_format_str

            if stylekey in style_dict:
                style = style_dict[stylekey]
            else:
                style = CellStyleConverter.to_xls(cell.style, num_format_str)
                style_dict[stylekey] = style

            if cell.mergestart is not None and cell.mergeend is not None:
                wks.write_merge(startrow + cell.row,
                                startrow + cell.mergestart,
                                startcol + cell.col, startcol + cell.mergeend,
                                val, style)
            else:
                wks.write(startrow + cell.row, startcol + cell.col, val, style)
Esempio n. 8
0
def search_ajax(request):
    data = request.GET
    print(data)
    number = int(data.get('number'))
    title = "'%" + data.get('title') + "%'"
    if request.is_ajax():
        db = MySQL()
        sql = "select blog_id, blog_title, blog_date_time, blog_view, blog_content from blog_blog where blog_title like %s limit %d, %d"
        data = (title, int(number), 10)
        blogs = db.ExecDataQuery(sql, data)
        blogs_list = []
        for row in blogs:
            blogs = {
                'blog_id': row[0],
                'blog_title': row[1],
                'blog_date_time': str(row[2]),
                'blog_view': row[3],
                'blog_content': row[4].replace('�', '').replace(' ', '')[0:100]
            }
            blogs_list.append(blogs)
        # print(blog_time_list)
        jsonDate = json.dumps(blogs_list, ensure_ascii=False)
        return HttpResponse(jsonDate, content_type='application/json')
    else:
        return HttpResponse('N')
Esempio n. 9
0
def check_user():
    haveregisted = userInfoTable.query.filter_by(username=request.form['username']).all()
    if haveregisted.__len__() is not 0:  # 判断是否已被注册
        passwordRight = userInfoTable.query.filter_by(username=request.form['username'],
                                                      password=request.form['password']).all()
        if passwordRight.__len__() is not 0:
            obj = userInfoTable.query.filter_by(username=request.form['username']).first()
            rootdir = obj.pictureUrl
            phonenum = obj.user_phone_number
            list = os.listdir(rootdir)  # 列出文件夹下所有的目录与文件
            info_dict = {"info":[]}
            for i in range(0, len(list)):
                picture_file_name = list[i]
                # 图片文件
                path = os.path.join(rootdir, picture_file_name)
                if os.path.isfile(path):
                    # with open(path, 'rb') as f:
                    #     image_byte = base64.b64encode(f.read())
                    #     trainer_image_string = image_byte.decode('ascii')  # byte类型转换为str
                    trainer_name = picture_file_name.split(".")[0]
                    obj = trainerInfoTable.query.filter_by(trainer_name=trainer_name).first()
                    trainer_intro = obj.trainer_intro
                    print("trainer_name:",trainer_name)
                    trainer_image_url = "http://10.0.2.2:8080/userPicture?filename="+phonenum+"/"+picture_file_name
                    data = {"trainer_image_url": trainer_image_url,"trainer_name":trainer_name,"trainer_intro":trainer_intro}
                    info_dict.get("info").append(data)

            jsoninfo = json.dumps(info_dict)
            print("jsoninfo:",jsoninfo)
            return jsoninfo
        else:
            return '1'
    else:
        return '0'
Esempio n. 10
0
def blogs_category(request):
    data = request.GET
    print(data)
    number = int(data.get('number'))
    category = data.get('category')
    print(number, type(number))
    if request.is_ajax():
        db = MySQL()
        sql = "select blog_id, blog_title, blog_date_time, blog_view, blog_content from blog_blog where blog_category = '%s'order by blog_date_time desc limit %d, %d"
        data = (category, int(number), 10)
        blogs = db.ExecDataQuery(sql, data)
        blogs_list = []
        for row in blogs:
            blogs = {
                'blog_id': row[0],
                'blog_title': row[1],
                'blog_date_time': str(row[2]),
                'blog_view': row[3],
                'blog_content': row[4].replace('�', '').replace(' ', '')[0:100]
            }
            blogs_list.append(blogs)
        jsonDate = json.dumps(blogs_list, ensure_ascii=False)
        return HttpResponse(jsonDate, content_type='application/json')
    else:
        return HttpResponse('N')
Esempio n. 11
0
def saveTransitionToDb(cursor,transition,gameId,orderInGame):
    query = "INSERT INTO `state` (`json`, `game_id`,`order_in_game`,`terminal`) VALUES (%s, %s,%s, %s ) ; "


    cursor.execute(query,(json.dumps(transition),str(gameId),str(orderInGame),str(transition['next_terminal'])))
    idState = int(cursor.lastrowid)
    return idState
Esempio n. 12
0
def series_json(series):
    data = {}
    data['data'] = series.values
    data['index'] = series.index
    data['name'] = series.name
    data['_pandas_type'] = 'series'
    data['__repr__'] = repr(series)
    return json.dumps(data)
Esempio n. 13
0
def get_diff_table(id1, id2, direction, ops, jsonit=True):
    """
  Get cached data for the diff table and if not available calculate and cache it
  :param id1:
  :param id2:
  :param direction:
  :param ops:
  :param jsonit: If True return result as json string. Otherwise return Python object.
  :return:
  """

    # HACK: force calculation of everything. then we only do it once and use the cache in the future
    all_ops = "structure,content,merge,reorder"

    hash_name = create_hashname(id1, id2, 0, 0, direction, all_ops)

    t1 = timeit.default_timer()
    json_result = get_diff_cache(hash_name)
    t2 = timeit.default_timer()
    _log.debug("TIMER: get diff: cache (json)", t2 - t1)
    diffobj = None

    if json_result is None:
        # get one for the detail
        t3 = timeit.default_timer()
        diffobj = calc_diff(id1, id2, direction, all_ops)
        t4 = timeit.default_timer()
        _log.debug("TIMER: get diff: calc diff ", t4 - t3)

        if isinstance(diffobj, Diff):
            # log the detail
            json_result = ujson.dumps(diffobj.serialize())
            set_diff_cache(hash_name, json_result)
        else:
            # todo later find a way to send the error
            # e.g. there's no matching column in this case
            json_result = ujson.dumps(diffobj)  # which is {} for now!
            set_diff_cache(hash_name, json_result)

    elif jsonit is False:
        diffobj = Diff().unserialize(ujson.loads(json_result))

    if jsonit:
        return json_result
    else:
        return diffobj
Esempio n. 14
0
def dataframe_json(df):
    data = {}
    for k, v in df.iteritems():
        data[k] = v.values
    data['index'] = df.index
    data['_pandas_type'] = 'dataframe';
    data['__repr__'] = repr(df)
    return json.dumps(data)
Esempio n. 15
0
def dataframe_json(df, obj_name):
    data = {}
    for k, v in df.iteritems():
        data[k] = v.values
    data['index'] = df.index
    data['__repr__'] = repr(df)
    data['__obj_name__'] = obj_name
    return json.dumps(data)
Esempio n. 16
0
def saveTransitionToDb(cursor, transition, gameId, orderInGame):
    query = "INSERT INTO `state` (`json`, `game_id`,`order_in_game`,`terminal`) VALUES (%s, %s,%s, %s ) ; "

    cursor.execute(query,
                   (json.dumps(transition), str(gameId), str(orderInGame),
                    str(transition['next_terminal'])))
    idState = int(cursor.lastrowid)
    return idState
Esempio n. 17
0
def dataframe_json(df):
    data = {}
    for k, v in list(df.items()):
        data[k] = v.values
    data['index'] = df.index
    data['_pandas_type'] = 'dataframe';
    data['__repr__'] = repr(df)
    return json.dumps(data)
Esempio n. 18
0
def series_json(series):
    data = {}
    data['data'] = series.values
    data['index'] = series.index
    data['name'] = series.name
    data['_pandas_type'] = 'series'
    data['__repr__'] = repr(series)
    return json.dumps(data)
Esempio n. 19
0
 def sex():
     sql = "select user_sex, count(*) from blog_user group by user_sex"
     sexes = db.ExecQuery(sql)
     sex_list = []
     for row in sexes:
         if not row[0] == '':
             sex_dict = {'sex': row[0], 'number': row[1]}
             sex_list.append(sex_dict)
     return json.dumps(sex_list, ensure_ascii=False)
def redis_set_add(pool_id, key, item):
    try:
        if type(item) == type({}):
            item = json.dumps(item)
        r = redis.Redis(connection_pool=poolList[pool_id])
        return r.sadd(key, item)
    except Exception as e:
        print('[ERROR]redis lpush Error!', e)
        traceback.print_exc()
Esempio n. 21
0
 def blog_time():
     db = MySQL()
     sql = "select date_format (blog_date_time, '%H'), count(*) from blog_blog group by date_format (blog_date_time, '%H')"
     blog_time = db.ExecQuery(sql)
     blog_time_list = []
     for row in blog_time:
         blog_time = {'time': row[0], 'number': row[1]}
         blog_time_list.append(blog_time)
     return json.dumps(blog_time_list, ensure_ascii=False)
Esempio n. 22
0
def delete_cart(request):
    user_id = request.session.get('user_id')
    cart_id = request.GET.get('cart_id')
    try:
        delcart = CartInfo.objects.filter(user_id=user_id, id=cart_id)
        delcart.delete()
    except BaseException as e:
        logging.warning(e)
    content = {'status': 'ok', 'text': '删除成功'}
    return HttpResponse(json.dumps(content))
Esempio n. 23
0
def MHistAvg(json_str,
             plot_in_file=False,
             out_path=None,
             dmin=None,
             last_ts=-1):
    """
    Reading JSON file or string and returns quantile based threshold for one day.
    This synchronous one, i.e. calculation timestamps includes in JSON
    Acceptable JSON format is explained in MSJSONReader method of preprocessor module.

    Parameters
    ----------
    json_str: string
        JSON string or filepath

    plot_in_file: boolean
        plot raw data into *.png file or not

    out_path: string
        path where plot file should be created

    dmin: integer
        number of minutes to aggregate raw data

    last_ts: integer
        timestamp of calculation Unix-time

    Returns
    -------
    outjson_str: string
        dynamic threshold in JSON string format, i.e. [[timestamp, lower, upper]...]
    """
    dynout_pattern = dynthresh()
    outjson_str = ""
    autothresh = []
    monid, sensitivity, calc_ts, rarray, errm1, status = MSJSONReader(json_str)
    if status == 0:
        for i in range(len(monid)):
            tseries, errm0, status0 = Preprocessor(rarray[i])
            if status0 == 0:
                autothresh = autothresh + [
                    dynthresh(monid[i], tseries, rarray[i], plot_in_file,
                              out_path, dmin, sensitivity[i], calc_ts[i])
                ]
                autothresh[i]["metric_id"] = monid[i]
            else:
                autothresh = autothresh + [dynout_pattern]
                autothresh[i]["metric_id"] = monid[i]
                autothresh[i]["errMsg"] = errm0
    else:
        autothresh = autothresh + [dynout_pattern]
        autothresh[0]["metric_id"] = ""
        autothresh[0]["errMsg"] = errm1
    outjson_str = dumps(autothresh)
    return outjson_str
Esempio n. 24
0
 def star():
     db = MySQL()
     sql = "select user_star, count(*) from blog_user group by user_star"
     star = db.ExecQuery(sql)
     star_list = []
     for row in star:
         if not row[0] == '保密':
             star_dict = {'star': row[0], 'number': row[1]}
             star_list.append(star_dict)
     jsonDate = json.dumps(star_list, ensure_ascii=False)
     return jsonDate
Esempio n. 25
0
 def blog():
     db = MySQL()
     sql = "select blog_category, count(*) from blog_blog group by blog_category"
     blogs = db.ExecQuery(sql)
     blogs_list = []
     for row in blogs:
         blogs = {'category': row[0], 'number': row[1]}
         blogs_list.append(blogs)
     # print(blog_time_list)
     jsonDate = json.dumps(blogs_list, ensure_ascii=False)
     return jsonDate
Esempio n. 26
0
def HistAvg(json_str,
            plot_in_file=False,
            out_path=None,
            dmin=None,
            sens="low",
            last_ts=-1):
    """
    Reading JSON file or string and returns quantile based threshold for one day.
    Acceptable JSON format is explained in SJSONReader method of preprocessor module.

    Parameters
    ----------
    json_str: string
        JSON string or filepath

    plot_in_file: boolean
        plot raw data into *.png file or not

    out_path: string
        path where plot file should be created

    dmin: integer
        number of minutes to aggregate raw data

    sens: string
        one of three values string ["low", "medium", "high"]

    last_ts: integer
        timestamp of calculation Unix-time

    Returns
    -------
    outjson_str: string
        dynamic threshold in JSON string format, i.e. [[timestamp, lower, upper]...]
    """
    dynout_pattern = dynthresh()
    outdict = []
    monid = None
    monid, rarray, errm, status = SJSONReader(json_str)
    if status == 0:
        tseries, errm, status = Preprocessor(rarray)
    else:
        outdict = dynout_pattern
        outdict["errMsg"] = errm
    if status == 0:
        outdict = dynthresh(monid, tseries, rarray, plot_in_file, out_path,
                            dmin, sens, last_ts)
    else:
        outdict = dynout_pattern
        outdict["errMsg"] = errm
    if monid is not None:
        outdict["monitor_id"] = monid
    outjson_str = dumps(outdict)
    return outjson_str
Esempio n. 27
0
 def address():
     db = MySQL()
     sql = "select user_address, count(*) from blog_user group by user_address"
     address = db.ExecQuery(sql)
     address_list = []
     for row in address:
         if not row[0] == '保密':
             address_dict = {'address': row[0], 'number': row[1]}
             address_list.append(address_dict)
     jsonDate = json.dumps(address_list, ensure_ascii=False)
     return jsonDate
Esempio n. 28
0
def main(unused_args):
    config = SmallConfig()
    model_path = FLAGS.model
    output_path = FLAGS.output
    np.random.seed()

    # data_loader
    if FLAGS.num_sample:
        config.batch_size = FLAGS.num_sample

    with tf.Graph().as_default(), tf.Session() as session:
        config.num_steps = 1
        model, model_validate = build_model(session, config, model_path)
        sequence_list = simulate_sequence(session, model_validate)
        if output_path:
            ensure_base_dir(output_path)
            with open(output_path, 'w') as f:
                json.dump(sequence_list, f)
        else:
            json.dumps(sequence_list)
Esempio n. 29
0
 def tall():
     db = MySQL()
     sql = "select count(*) from blog_user"
     user_number = db.ExecQuery(sql)
     sql = "select count(*) from blog_blog"
     blog_number = db.ExecQuery(sql)
     sex_list = []
     sex_dict = {'number': blog_number[0][0]}
     sex_list.append(sex_dict)
     sex_dict = {'number': user_number[0][0]}
     sex_list.append(sex_dict)
     return json.dumps(sex_list, ensure_ascii=False)
Esempio n. 30
0
def search(request):
    if request.method == 'POST':
        print("the POST method")
        concat = request.POST
        print(concat)
        title = concat['title']
    jsonData = json.dumps({'title': title}, ensure_ascii=False)
    topic_blog = models.blog.objects.all().order_by('-blog_view')[0:5]
    return render(request, 'search.html', {
        'topic_blog': topic_blog,
        'title': jsonData
    })
Esempio n. 31
0
def DNN_judge():
    http = urllib3.PoolManager()
    url = "https://aip.baidubce.com/rpc/2.0/nlp/v2/dnnlm_cn?access_token=24.622919d482aad3d8ff925c11144238bf.2592000.1550716334.282335-11177554"
    data = {"text": "良好的管理能力"}
    encode_data = json.dumps(data).encode('GBK')
    # JSON:在发起请求时,可以通过定义body 参数并定义headers的Content-Type参数来发送一个已经过编译的JSON数据:
    request = http.request('POST',
                           url,
                           body=encode_data,
                           headers={'Content-Type': 'application/json'})
    result = str(request.data, 'GBK')
    print(result)
    return result
Esempio n. 32
0
 def blog_year():
     db = MySQL()
     sql = "select year (blog_date_time), month (blog_date_time), count(*) from blog_blog group by  year (blog_date_time), month (blog_date_time)"
     blog_num = db.ExecQuery(sql)
     blog_num_list = []
     for row in blog_num:
         blog_num = {
             'year_month': str(row[0]) + ' ' + str(row[1]),
             'number': row[2]
         }
         blog_num_list.append(blog_num)
     jsonDate = json.dumps(blog_num_list, ensure_ascii=False)
     return jsonDate
Esempio n. 33
0
    def initUserInfo(self):
        log.log("===================================================")
        log.log("开始填充用户数据.......")
        log.log("===================================================")
        # 程序前置
        win32gui.SetForegroundWindow(self.xiadanH)
        self.clickBroadF2()
        self.clickBroadF4()
        childWindows = SingleUtil.findChildWindows(self.xiadanH)
        for childHw in childWindows:
            # title = show_window_attr(h)
            windowTitle = SingleUtil.getWindowText(childHw)
            # log.log '窗口标题:%s' % (str(title))
            if "资金余额" in str(windowTitle):
                findTitle = self.find_text_for_index(childWindows, childHw)
                SingleUserInfo.set_capital_balance(findTitle)
            if "总 资 产" in str(windowTitle):
                findTitle = self.find_text_for_index(childWindows, childHw)
                SingleUserInfo.set_total_assets(findTitle)
            if "股票市值" in str(windowTitle):
                findTitle = self.find_text_for_index(childWindows, childHw)
                SingleUserInfo.set_stock_market_value(findTitle)
            if "可取金额" in str(windowTitle):
                findTitle = self.find_text_for_index(childWindows, childHw)
                SingleUserInfo.set_advisable_fundse(findTitle)
            if "冻结金额" in str(windowTitle):
                findTitle = self.find_text_for_index(childWindows, childHw)
                SingleUserInfo.set_frozen_fundse(findTitle)
            if "可用金额" in str(windowTitle):
                findTitle = self.find_text_for_index(childWindows, childHw)
                SingleUserInfo.set_available_funds(findTitle)

        SingleUserInfo.__dict__ = json.loads(
            json.dumps(SingleUserInfo.__dict__).replace("\u0000", ""))
        log.log("===================================================")
        log.log("用户信息资金信息:")
        log.log(json.dumps(SingleUserInfo.__dict__))
        log.log("===================================================")
Esempio n. 34
0
def _to_json(obj_name):
    obj = globals()[obj_name]
    if isinstance(obj, pd.DataFrame):
        return dataframe_json(obj, obj_name)
    if isinstance(obj, dict):
        jdict = {}
        for k, v in obj.iteritems():
            jdict[k] = _to_json(v)
        return json_dict(jdict)

    if hasattr(obj, 'to_json'):
        return obj.to_json()

    return json.dumps(obj)
Esempio n. 35
0
    def test_encodeNullCharacter(self):
        input = "31337 \x00 1337"
        output = ujson.encode(input)
        self.assertEquals(input, json.loads(output))
        self.assertEquals(output, json.dumps(input))
        self.assertEquals(input, ujson.decode(output))

        input = "\x00"
        output = ujson.encode(input)
        self.assertEquals(input, json.loads(output))
        self.assertEquals(output, json.dumps(input))
        self.assertEquals(input, ujson.decode(output))

        self.assertEquals('"  \\u0000\\r\\n "', ujson.dumps(u("  \u0000\r\n ")))
        pass
Esempio n. 36
0
 def active_user():
     db = MySQL()
     sql = """select user_name, user_blog from (
     select user_name, count(*) as user_blog
     from blog_user, blog_blog 
     where user_id = blog_user_id 
     group by user_name) as active_user
     order by user_blog desc limit 10"""
     active_user = db.ExecQuery(sql)
     active_user_list = []
     for row in active_user:
         active_user_dict = {'user': row[0], 'number': row[1]}
         active_user_list.append(active_user_dict)
     jsonDate = json.dumps(active_user_list, ensure_ascii=False)
     return jsonDate
Esempio n. 37
0
    def test_encodeNullCharacter(self):
        input = "31337 \x00 1337"
        output = ujson.encode(input)
        self.assertEqual(input, json.loads(output))
        self.assertEqual(output, json.dumps(input))
        self.assertEqual(input, ujson.decode(output))

        input = "\x00"
        output = ujson.encode(input)
        self.assertEqual(input, json.loads(output))
        self.assertEqual(output, json.dumps(input))
        self.assertEqual(input, ujson.decode(output))

        self.assertEqual('"  \\u0000\\r\\n "', ujson.dumps("  \u0000\r\n "))
        pass
Esempio n. 38
0
def getChart(request):
    fuzzId = request.GET.get("fuzzId")
    result = getfuzzresult(fuzzId)
    chart = {
        "legendData": [
            'erase bytes', 'insert bytes', 'change byte',
            'insert repeated bytes', 'change ascii integer', 'change bit',
            'white noise', 'rotate', 'scale', 'triangular matrix',
            'kernel matrix'
        ],
        "seriesData": [{
            "value": result.mutate_erase_bytes,
            "name": 'erase bytes'
        }, {
            "value": result.mutate_insert_bytes,
            "name": 'insert bytes'
        }, {
            "value": result.mutate_change_byte,
            "name": 'change byte'
        }, {
            "value": result.mutate_insert_repeated_bytes,
            "name": 'insert repeated bytes'
        }, {
            "value": result.mutate_change_ascii_integer,
            "name": 'change ascii integer'
        }, {
            "value": result.mutate_change_bit,
            "name": 'change bit'
        }, {
            "value": result.mutate_white_noise,
            "name": 'white noise'
        }, {
            "value": result.mutate_rotate,
            "name": 'rotate'
        }, {
            "value": result.mutate_scale,
            "name": 'scale'
        }, {
            "value": result.mutate_triangular_matrix,
            "name": 'triangular matrix'
        }, {
            "value": result.mutate_kernel_matrix,
            "name": 'kernel matrix'
        }]
    }
    return HttpResponse(json.dumps(chart))
Esempio n. 39
0
    def write_cells(self, cells, sheet_name=None, startrow=0, startcol=0):
        # Write the frame cells using xlwt.

        sheet_name = self._get_sheet_name(sheet_name)

        if sheet_name in self.sheets:
            wks = self.sheets[sheet_name]
        else:
            wks = self.book.add_sheet(sheet_name)
            self.sheets[sheet_name] = wks

        style_dict = {}

        for cell in cells:
            val = _conv_value(cell.val)

            num_format_str = None
            if isinstance(cell.val, datetime.datetime):
                num_format_str = self.datetime_format
            elif isinstance(cell.val, datetime.date):
                num_format_str = self.date_format
            elif isinstance(cell.val, datetime.time):
                num_format_str = self.time_format

            stylekey = json.dumps(cell.style)
            if num_format_str:
                stylekey += num_format_str

            if stylekey in style_dict:
                style = style_dict[stylekey]
            else:
                style = self._convert_to_style(cell.style, num_format_str)
                style_dict[stylekey] = style

            if cell.mergestart is not None and cell.mergeend is not None:
                wks.write_merge(startrow + cell.row,
                                startrow + cell.mergestart,
                                startcol + cell.col,
                                startcol + cell.mergeend,
                                val, style)
            else:
                wks.write(startrow + cell.row,
                          startcol + cell.col,
                          val, style)
Esempio n. 40
0
def to_json(obj, *args, **kwargs):
  """
  convert the given object ot json using the extensible encoder
  :param obj:
  :param args:
  :param kwargs:
  :return:
  """
  #try:
  #  doesnt work since we can't convert numpy arrays
  #  import ujson
  #  return ujson.dumps(obj, cls=JSONExtensibleEncoder, *args, **kwargs)
  #except ImportError:
  if 'allow_nan' in kwargs:
    del kwargs['allow_nan']
  if 'indent' in kwargs:
    del kwargs['indent']
  kwargs['ensure_ascii'] = False
  return ujson.dumps(obj, *args, **kwargs)
Esempio n. 41
0
 def run(self):
     enabled = collections.defaultdict(int)
     disabled = collections.defaultdict(int)
     wiki = self.getWiki()
     for res in wiki.queryPages(generator='allpages', gaplimit='max', gapnamespace='480', prop='revisions', rvprop='content'):
         data = api.parseJson(res.revisions[0]['*'])
         if 'country' in data:
             # enabled by default
             if 'enabled' not in data or data.enabled:
                 enabled[data.country] += 1
             else:
                 disabled[data.country] += 1
     for state, vals in {'Enabled':enabled, 'Disabled':disabled}.iteritems():
         text = json.dumps([{"code":k, "val":v} for k,v in vals.iteritems()])
         wiki(
             'edit',
             title='Data:Json:StatsByCountry-' + state,
             summary='updating - %d countries' % (sum(vals.values())),
             text=text,
             token=wiki.token()
         )
Esempio n. 42
0
    def write_cells(self, cells, sheet_name=None, startrow=0, startcol=0):
        # Write the frame cells using xlsxwriter.

        sheet_name = self._get_sheet_name(sheet_name)

        if sheet_name in self.sheets:
            wks = self.sheets[sheet_name]
        else:
            wks = self.book.add_worksheet(sheet_name)
            self.sheets[sheet_name] = wks

        style_dict = {}

        for cell in cells:
            num_format_str = None
            if isinstance(cell.val, datetime.datetime):
                num_format_str = "YYYY-MM-DD HH:MM:SS"
            if isinstance(cell.val, datetime.date):
                num_format_str = "YYYY-MM-DD"

            stylekey = json.dumps(cell.style)
            if num_format_str:
                stylekey += num_format_str

            if stylekey in style_dict:
                style = style_dict[stylekey]
            else:
                style = self._convert_to_style(cell.style, num_format_str)
                style_dict[stylekey] = style

            if cell.mergestart is not None and cell.mergeend is not None:
                wks.merge_range(startrow + cell.row,
                                startrow + cell.mergestart,
                                startcol + cell.col,
                                startcol + cell.mergeend,
                                cell.val, style)
            else:
                wks.write(startrow + cell.row,
                          startcol + cell.col,
                          cell.val, style)
Esempio n. 43
0
def show_details(show_details_id):

        cur, conn = mysql_connect("leo_markt")

        cur.execute(""" SELECT name, description, price, category, availability FROM products WHERE id = %s """,
                    (show_details_id,))
        for i in cur:
            items_dict = {
                "name": i[0],
                "description": i[1],
                "price": '{:,.2f}'.format(i[2]).replace(",", "X").replace(".", ",").replace("X", ".") + "€",
                # price = price.decode('utf-8')
                "category": i[3],
                "availability": i[4]
            }

            show_details_json = json.dumps(items_dict)

            cur.close()
            conn.close()

            return show_details_json
Esempio n. 44
0
    def _writecells_xls(self, cells, sheet_name, startrow, startcol):
        if sheet_name in self.sheets:
            wks = self.sheets[sheet_name]
        else:
            wks = self.book.add_sheet(sheet_name)
            self.sheets[sheet_name] = wks

        style_dict = {}

        for cell in cells:
            val = _conv_value(cell.val)

            num_format_str = None
            if isinstance(cell.val, datetime.datetime):
                num_format_str = "YYYY-MM-DD HH:MM:SS"
            if isinstance(cell.val, datetime.date):
                num_format_str = "YYYY-MM-DD"

            stylekey = json.dumps(cell.style)
            if num_format_str:
                stylekey += num_format_str

            if stylekey in style_dict:
                style = style_dict[stylekey]
            else:
                style = CellStyleConverter.to_xls(cell.style, num_format_str)
                style_dict[stylekey] = style

            if cell.mergestart is not None and cell.mergeend is not None:
                wks.write_merge(
                    startrow + cell.row,
                    startrow + cell.mergestart,
                    startcol + cell.col,
                    startcol + cell.mergeend,
                    val,
                    style,
                )
            else:
                wks.write(startrow + cell.row, startcol + cell.col, val, style)
Esempio n. 45
0
def to_json(obj):
    if isinstance(obj, pd.DataFrame):
        return dataframe_json(obj)

    if isinstance(obj, pd.Series):
        return series_json(obj)

    if isinstance(obj, list):
        jlist = []
        for v in obj:
            jlist.append(to_json(v))
        return json_list(jlist)

    if isinstance(obj, dict):
        jdict = {}
        for k, v in obj.iteritems():
            jdict[k] = to_json(v)
        return json_dict(jdict)

    if hasattr(obj, 'to_json'):
        return obj.to_json()

    return json.dumps(obj)
Esempio n. 46
0
 def test_encodeDecodeLongDecimal(self):
     sut = {u('a'): -528656961.4399388}
     encoded = ujson.dumps(sut, double_precision=15)
     ujson.decode(encoded)
Esempio n. 47
0
    def testArrayNumpyExcept(self):

        input = ujson.dumps([42, {}, 'a'])
        try:
            ujson.decode(input, numpy=True)
            assert False, "Expected exception!"
        except(TypeError):
            pass
        except:
            assert False, "Wrong exception"

        input = ujson.dumps(['a', 'b', [], 'c'])
        try:
            ujson.decode(input, numpy=True)
            assert False, "Expected exception!"
        except(ValueError):
            pass
        except:
            assert False, "Wrong exception"

        input = ujson.dumps([['a'], 42])
        try:
            ujson.decode(input, numpy=True)
            assert False, "Expected exception!"
        except(ValueError):
            pass
        except:
            assert False, "Wrong exception"

        input = ujson.dumps([42, ['a'], 42])
        try:
            ujson.decode(input, numpy=True)
            assert False, "Expected exception!"
        except(ValueError):
            pass
        except:
            assert False, "Wrong exception"

        input = ujson.dumps([{}, []])
        try:
            ujson.decode(input, numpy=True)
            assert False, "Expected exception!"
        except(ValueError):
            pass
        except:
            assert False, "Wrong exception"

        input = ujson.dumps([42, None])
        try:
            ujson.decode(input, numpy=True)
            assert False, "Expected exception!"
        except(TypeError):
            pass
        except:
            assert False, "Wrong exception"

        input = ujson.dumps([{'a': 'b'}])
        try:
            ujson.decode(input, numpy=True, labelled=True)
            assert False, "Expected exception!"
        except(ValueError):
            pass
        except:
            assert False, "Wrong exception"

        input = ujson.dumps({'a': {'b': {'c': 42}}})
        try:
            ujson.decode(input, numpy=True, labelled=True)
            assert False, "Expected exception!"
        except(ValueError):
            pass
        except:
            assert False, "Wrong exception"

        input = ujson.dumps([{'a': 42, 'b': 23}, {'c': 17}])
        try:
            ujson.decode(input, numpy=True, labelled=True)
            assert False, "Expected exception!"
        except(ValueError):
            pass
        except:
            assert False, "Wrong exception"