def get_block_data(self): block_dict = Filler.get_serializable_dict( self.w3.eth.getBlock(self.current_status['block_height'])) block_dict['difficulty'] = Int64(block_dict['difficulty']) # Overflow with gasLimit block_dict['gasLimit'] = str(block_dict['gasLimit']) block_dict['gasUsed'] = str(block_dict['gasUsed']) block_dict['number'] = Int64(block_dict['number']) block_dict['size'] = Int64(block_dict['size']) block_dict['timestamp'] = Int64(block_dict['timestamp']) block_dict['totalDifficulty'] = Int64(block_dict['totalDifficulty']) block_dict['extraData'] = block_dict.pop('proofOfAuthorityData') block_dict['receiptRoot'] = block_dict.pop('receiptsRoot') return block_dict
def resolve(self, value): """Resolve the BSON `value` by converting integer floats into :class:`bson.Int64`. """ value = super(IntegerField, self).resolve(value) if isinstance(value, float) and value.is_integer(): value = Int64(value) return value
def get_tx_data(self, tx_hash): tx_dict = Filler.get_serializable_dict( self.w3.eth.getTransaction(tx_hash)) tx_dict = {key: tx_dict[key] for key in \ ['hash', 'nonce', 'value', 'gasPrice', 'gas', 'input', 'r', 's', 'v']} tx_receipt_dict = Filler.get_serializable_dict( self.w3.eth.getTransactionReceipt(tx_hash)) tx_receipt_dict = {key: tx_receipt_dict[key] for key in \ ['blockHash', 'blockNumber', 'transactionIndex', 'from', 'to', 'gasUsed', 'cumulativeGasUsed', 'logs', 'logsBloom', 'contractAddress']} if not tx_receipt_dict['contractAddress']: tx_receipt_dict.pop('contractAddress') tx_dict.update(tx_receipt_dict) tx_dict['nonce'] = Int64(tx_dict['nonce']) tx_dict['value'] = Int64(tx_dict['value']) tx_dict['gasPrice'] = Int64(tx_dict['gasPrice']) tx_dict['gas'] = str(tx_dict['gas']) tx_dict['blockNumber'] = Int64(tx_dict['blockNumber']) tx_dict['gasUsed'] = str(tx_dict['gasUsed']) tx_dict['cumulativeGasUsed'] = str(tx_dict['cumulativeGasUsed']) return tx_dict
async def create_task(self, db, data: schemas.UpdateTask): # id自增可能会失败吧,先用着 last_id = db.task_schedule.find_one(sort=[('id', -1)]) if not last_id: return False get_suite = db.test_suite.find_one({'p_key': data.suite_name}) res = db.task_schedule.insert_one({ 'id': Int64(last_id['id'] + 1), 'update_time': datetime.datetime.today().strftime('%Y-%m-%d %H:%M'), 'total': Int64(get_suite['total']), 'task_name': data.task_name, 'suite_name': get_suite['suite_name'], 'operator': data.operator, 'version': data.version, 'status': 'idle', 'f_key': data.suite_name, 'is_delete': False }) if not res: return False p_key = str(res.inserted_id) update = db.task_schedule.update_one({'_id': res.inserted_id}, {'$set': { 'p_key': p_key }}) if update.modified_count != 1: return False return True
def order_book_to_dict(order_book, interval): return { "instrument": order_book.instrument, "bids": [{ "price": price / 10000, "orders": [order_to_dict(order) for order in orders] } for price, orders in sorted(order_book.bid_book.items(), reverse=True)], "asks": [{ "price": price / 10000, "orders": [order_to_dict(order) for order in orders] } for price, orders in sorted(order_book.ask_book.items())], "interval_multiple": Int64(_round_up(order_book.last_time, interval)), "last_sod_offset": order_book.last_sod_offset }
def resolve(self, value): """Resolve the BSON `value` by converting strings and string-like types into numbers. """ value = super(NumberField, self).resolve(value) if isinstance(value, (bytearray, bytes, string_type)): try: value = Int64(value) except ValueError: try: value = float(value) except ValueError: pass return value
def make_value_safe(val, fmt: Safe_Formats = Safe_Formats.BSON) -> object: """Makes value safe for use in BSON (mongo) or JSON""" if fmt == Safe_Formats.BSON: if isinstance(val, np.generic): val = val.item() if isinstance(val, int): if abs(val) > 2**31: return Int64(val) else: return int(val) return val elif fmt == Safe_Formats.JSON: if isinstance(val, np.generic): return val.item() return val
def get_comments(db, post): db.comments_col() comments = post.get_comments() for c in comments: filter = {'id': Int64(c.id)} if not db.find_one(filter): comment = { 'id': c.id, 'InfoUpdateDate': datetime.datetime.utcnow(), 'InsPageLink': const.IG_PROFILE + post.owner_username, 'InsPostlink': const.IG_URL + post.shortcode, 'PostRelaseDate': post.date_utc, 'CommentDate': c.created_at_utc, 'CommetDescription': c.text, 'CommentLike': c.likes_count, 'ReplyCount': sum(1 for _ in comments) + 1, } db.insert_one(comment)
async def update_task(self, db, data: schemas.UpdateTask): get_suite = db.test_suite.find_one({'p_key': data.suite_name}) res = db.task_schedule.update_one({'id': data.id}, { '$set': { 'update_time': datetime.datetime.today().strftime('%Y-%m-%d %H:%M'), 'total': Int64(get_suite['total']), 'task_name': data.task_name, 'suite_name': get_suite['suite_name'], 'operator': data.operator, 'version': data.version, 'status': data.status, 'f_key': data.suite_name, 'is_delete': False } }) if res.modified_count != 1: return False return True
import pytest from bson import Int64 from mongomodels.fields import NumberField, IntegerField, RealNumberField @pytest.mark.parametrize('value', [1, 0, -1, 1.1, Int64(1)]) def test_validate_number(value): NumberField(nullable=False).validate(value) @pytest.mark.parametrize('value', [[], '', '1', {'a': 1}]) def test_fails_validate_number(value): with pytest.raises(TypeError): NumberField(nullable=False).validate(value) @pytest.mark.parametrize('value', [1, 0, -1, Int64(1)]) def test_validate_integer(value): IntegerField(nullable=False).validate(value) @pytest.mark.parametrize('value', [1.1, [], '', '1', {'a': 1}]) def test_fails_validate_integer(value): with pytest.raises(TypeError): IntegerField(nullable=False).validate(value) @pytest.mark.parametrize('value', [0.0, 1.1]) def test_validate_real_number(value):
import json import pymongo from bson.objectid import ObjectId from bson import Int64 client = pymongo.MongoClient("128.239.20.76") c_det = client.asdf.det branc = "master" id_list = [ # "5ac39c01c15e003de25a9fce" ] for request_id in id_list: path = "/sciclone/aiddata10/geo/{0}/outputs/det/results/{1}/request_details.json".format( master, request_id) request = json.load(open(path)) request["status"] = Int64(1) request["stage"][3]["time"] = 1522850400 request["_id"] = ObjectId(request["_id"]) for ix in range(len(request['release_data'])): del request['release_data'][ix]['$$hashKey'] for ix in range(len(request['raster_data'])): del request['raster_data'][ix]['$$hashKey'] for ixf in range(len(request['raster_data'][ix]['files'])): del request['raster_data'][ix]['files'][ixf]['$$hashKey'] c_det.insert(request) # from pprint import pprint # pprint(request)
MIN_INT32 = -(2 ** 31) UNDER_INT32_VALUE = MIN_INT32 - 1 MAX_INT32 = 2 ** 31 - 1 OVER_INT32_VALUE = MAX_INT32 + 1 sample_datetime = datetime.now() type_test_data = [ # Simple types TypeTestCase(int, "int", 15), TypeTestCase(int, "int", MIN_INT32), TypeTestCase(int, "int", MAX_INT32), TypeTestCase(int, "long", UNDER_INT32_VALUE), TypeTestCase(int, "long", OVER_INT32_VALUE), TypeTestCase(Int64, "long", 13), TypeTestCase(Int64, "long", Int64(13)), TypeTestCase(str, "string", "foo"), TypeTestCase(float, "double", 3.14), TypeTestCase(Decimal, "decimal", Decimal("3.14159265359")), TypeTestCase( Decimal, "decimal", "3.14159265359" ), # TODO split tests for odmantic type inference TypeTestCase(Decimal128, "decimal", Decimal128(Decimal("3.14159265359"))), TypeTestCase(Dict, "object", {"foo": "bar", "fizz": {"foo": "bar"}}), TypeTestCase(bool, "bool", False), TypeTestCase(Pattern, "regex", re.compile(r"^.*$")), TypeTestCase(Pattern, "regex", re.compile(r"^.*$", flags=re.IGNORECASE)), TypeTestCase( Pattern, "regex", re.compile(r"^.*$", flags=re.IGNORECASE | re.MULTILINE) ), TypeTestCase(Regex, "regex", Regex(r"^.*$", flags=32)),
def change_bind_information(cfg_file, col_name, read_datas=read_datas, skip_first=False, clean=True): datas = read_file(cfg_file, read_datas, skip_first) aircraft_conn = current_app.mongodb['aircraft_information'] count = 0 for data in datas: collection = 'time_control_unit_y5b' mxp_conn = current_app.mongodb['time_control_unit_y5b'] if data['category'] == '时寿件'.encode('utf-8'): collection = 'life_control_unit_y5b' mxp_conn = current_app.mongodb['life_control_unit_y5b'] inst = mxp_conn.find_one({ "id": data['mxp_id'], "pieceNo": data['mxp_id'] }) if not inst: logging.warn('The mxp is not exist. Mxp: %s' % data['planeId']) logging.warn('The mxp is not exist. Mxp: %s' % data['serialNumber']) print data['mxp_id'] continue ref_id = DBRef(collection, inst['_id']) tmp_item = aircraft_conn.find_one( { 'id': data['planeId'], 'boundedItems.refId': ref_id }, {'boundedItems.$': 1}) if not tmp_item: logging.warn('The bind information is not exit. Mxp: %s' % data['planeId']) logging.warn('The bind information is not exit. Mxp: %s' % data['mxp_id'].decode("utf-8")) logging.warn('The bind information is not exit. Mxp: %s' % data['completeDate']) continue tmp = tmp_item['boundedItems'][0] tmp['ellapsedTimes'] = data['ellapsedTimes'] tmp['completeDate'] = Int64(data['completeDate']) tmp['ellapsedHours'] = float(data['ellapsedHours']) tmp['engineTime'] = float(data['engineTime']) tmp['trace'] = data['trace'] # tmp['serialNumber'] = data['serialNumber'] for i in range(len(data['serialNumber'])): tmp['serialNumber'] = data['serialNumber'][i] if i < 1: aa = aircraft_conn.update( { 'id': data['planeId'], 'boundedItems.refId': ref_id }, {'$set': { 'boundedItems.$': tmp }}) else: tmp['boundedId'] = str(ObjectId()) aa = aircraft_conn.update( { 'id': data['planeId'], 'boundedItems.refId': ref_id }, {'$addToSet': { 'boundedItems': tmp }}) count = count + aa['n'] return count
def _setup(): global db global raw_bson global large_doc_keys db = pymongo.MongoClient().bsonnumpy_test small = db[collection_names[SMALL]] small.drop() print("%d small docs, %d bytes each with 3 keys" % (N_SMALL_DOCS, len(BSON.encode({ '_id': ObjectId(), 'x': 1, 'y': math.pi })))) small.insert_many([ collections.OrderedDict([('x', 1), ('y', math.pi)]) for _ in range(N_SMALL_DOCS) ]) dtypes[SMALL] = np.dtype([('x', np.int64), ('y', np.float64)]) large = db[collection_names[LARGE]] large.drop() # 2600 keys: 'a', 'aa', 'aaa', .., 'zz..z' large_doc_keys = [ c * i for c in string.ascii_lowercase for i in range(1, 101) ] large_doc = collections.OrderedDict([(k, math.pi) for k in large_doc_keys]) print("%d large docs, %dk each with %d keys" % (N_LARGE_DOCS, len(BSON.encode(large_doc)) // 1024, len(large_doc_keys))) large.insert_many([large_doc.copy() for _ in range(N_LARGE_DOCS)]) dtypes[LARGE] = np.dtype([(k, np.float64) for k in large_doc_keys]) # Ignore for now that the first batch defaults to 101 documents. raw_bson_docs_small = [{'x': 1, 'y': math.pi} for _ in range(N_SMALL_DOCS)] raw_bson_small = BSON.encode({ 'ok': 1, 'cursor': { 'id': Int64(1234), 'ns': 'db.collection', 'firstBatch': raw_bson_docs_small } }) raw_bson_docs_large = [large_doc.copy() for _ in range(N_LARGE_DOCS)] raw_bson_large = BSON.encode({ 'ok': 1, 'cursor': { 'id': Int64(1234), 'ns': 'db.collection', 'firstBatch': raw_bson_docs_large } }) raw_bsons[SMALL] = raw_bson_small raw_bsons[LARGE] = raw_bson_large