Пример #1
0
def _parse_canonical_int64(doc):
    """Decode a JSON int64 to bson.int64.Int64."""
    l_str = doc['$numberLong']
    if len(doc) != 1:
        raise TypeError('Bad $numberLong, extra field(s): %s' % (doc, ))
    return Int64(l_str)
Пример #2
0
def test_weighted_4():
    data = Int64(8)
    assert Weighted(data) == (2, data)
Пример #3
0
    return doc


_CANONICAL_JSON_TABLE = {
    frozenset(['$oid']):
    lambda d, _: ObjectId(d['$oid']),
    frozenset(['$numberDecimal']):
    lambda d, _: Decimal128(d['$numberDecimal']),
    frozenset(['$symbol']):
    lambda d, _: text_type(d['$symbol']),
    frozenset(['$numberInt']):
    lambda d, _: int(d['$numberInt']),
    frozenset(['$numberDouble']):
    lambda d, _: float(d['$numberDouble']),
    frozenset(['$numberLong']):
    lambda d, _: Int64(d['$numberLong']),
    frozenset(['$date']):
    _get_date,
    frozenset(['$minKey']):
    lambda dummy0, dummy1: MinKey(),
    frozenset(['$maxKey']):
    lambda dummy0, dummy1: MaxKey(),
    frozenset(['$undefined']):
    lambda dummy0, dummy1: None,
    frozenset(['$dbPointer']):
    _get_dbpointer,
    frozenset(['$ref', '$id']):
    lambda d, _: DBRef(d.pop('$ref'), d.pop('$id'), **d),
    frozenset(['$ref', '$id', '$db']):
    lambda d, _: DBRef(d.pop('$ref'), d.pop('$id'), d.pop('$db'), **d),
    frozenset(['$regex', '$options']):
 def convert_timestamp(value: str) -> Int64:
     try:
         return Int64(value)
     except:
         pass
Пример #5
0
def main():
    #Attivazione del Server Mongo
    print("Attivazione Server....")
    server = MockupDB(port='27017')
    server.run()
    print("Server Mongo in esecuzione all'indirizzo "+str(server.address))

    #Aggiunta
    LOG_FILENAME = 'log_file.out'
    logging.basicConfig(filename=LOG_FILENAME, level=logging.DEBUG)
    logging.debug('File di Log per Mockup Mongo')

    print("----------------Inizializzazione------------------------------")

    getlast=0
    ismaster=0
    buildinfo=0
    saslStart=0
    saslContinue = 0
    folder = "/MockupFolder/Config"
    for conf in os.listdir(folder):
        if getlast == 0 and "getlasterror" in str(conf):
            getlast = getlast + 1
            with open(str(folder)+'/'+str(conf)) as file_data:
                data = json.load(file_data)
                #getlasterror_reply = OpReply(data["reply_data"])
                opmsgreply_getlasterror = mockupdb.make_op_msg_reply(data["reply_data"])
                server.autoresponds('getlasterror', opmsgreply_getlasterror)

        if ismaster == 0 and "ismaster" in str(conf):
            ismaster = ismaster + 1
            with open(str(folder)+'/'+str(conf)) as file_data:
                data = json.load(file_data)
                #ismaster_reply = OpReply(data["reply_data"])
                opmsgreply_ismaster = mockupdb.make_op_msg_reply(data["reply_data"]["sections"][0]["payload"])
                server.autoresponds('ismaster', opmsgreply_ismaster)

        if buildinfo == 0 and (("buildInfo" in str(conf)) or ("buildinfo" in str(conf))):
            buildinfo = buildinfo + 1
            with open(str(folder)+'/'+str(conf)) as file_data:
                data = json.load(file_data)
                #buildinfo_reply = OpReply(data["reply_data"])
                opmsgreply_buildinfo = mockupdb.make_op_msg_reply(data["reply_data"])
                server.autoresponds('buildInfo', opmsgreply_buildinfo)
                server.autoresponds('buildinfo', opmsgreply_buildinfo)

        if saslStart == 0 and "saslStart" in str(conf):
            saslStart = saslStart + 1
            with open(str(folder)+'/'+str(conf)) as file_data:
                data = json.load(file_data)
                bin_data = "cj07JVxUUDM8TXVvd08hZXE9cWondHBSZElpSmZOb21mOHluUitjMmR0Y0x3RGtYSE5pWjVXWU9SZSxzPVBzdEJqdWpXQjZEdkR6Kzk2LysxR0E9PSxpPTEwMDAw".encode("ascii")
                #data["reply_data"]["payload"] = int(text_to_bits(bin_data))
                data["reply_data"]["payload"] = base64.decodebytes(bin_data)
                saslStart_reply = OpReply(data["reply_data"])
                server.autoresponds('saslStart', saslStart_reply)

        if saslContinue == 0 and "saslContinue" in str(conf):
            saslContinue = saslContinue + 1
            with open(str(folder)+'/'+str(conf)) as file_data:
                data = json.load(file_data)
                if str(data["reply_data"]["done"]) == "false":
                    bin_data = ''.encode("ascii")
                else:
                    bin_data = "dj0zb1A2enh5anBXSW5xc25nWllzN2lYZWJ3S289".encode("ascii")
                #data["reply_data"]["payload"] = int(text_to_bits(bin_data))
                data["reply_data"]["payload"] = base64.decodebytes(bin_data)
                saslContinue_reply = OpReply(data["reply_data"])
                server.autoresponds('saslContinue', saslContinue_reply)


        if getlast == 1 and ismaster == 1 and buildinfo == 1 and saslContinue == 1 and saslStart == 1:
            break


    if(ismaster == 0):
        print("ismaster automatico")
        opmsgreply = mockupdb.make_op_msg_reply(OrderedDict([('maxWireVersion', 6), ('minWireVersion', 0), ('ok', 1.0)]))
        server.autoresponds('ismaster',opmsgreply)
    if (buildinfo == 0):
        print("buildinfo automatico")
        server.autoresponds('buildInfo')
        server.autoresponds('buildinfo')
    if (getlast == 0):
        print("getlasterror automatico")
        server.autoresponds('getlasterror')
    if (saslStart == 0):
        print("saslstart automatico")
        server.autoresponds('saslStart')
    if (saslContinue == 0):
        print("saslcontinue automatico")
        server.autoresponds('saslContinue')
    server.autoresponds('ping')


    #Nella fase di inizializzazione è possibile che vengano mandati anche dei comandi come la find, la delete e la insert
    #Non vengono mandate sempre nello stesso ordine quindi le salvo prima in una struttura dati
    #Ogni volta che mi arriva uno di questi comandi rispondo con le risposte salvate nella struttura dati
    #Ovviamente le risposte vengono lette dagli eventuali file di report salvati nell'apposita carte CMDs di MockupFolder

    find_list = []
    insert_list = []
    delete_list = []
    folder = "/MockupFolder/CMDs"
    for command in os.listdir(folder):
        if "find" in str(command):
            with open(str(folder)+'/'+str(command)) as file_data:
                data = json.load(file_data)
                data["reply_data"]["cursor"]["id"] = Int64(0)
                find_list.append(mockupdb.make_op_msg_reply(data["reply_data"]))
        if "insert" in str(command):
            with open(str(folder)+'/'+str(command)) as file_data:
                data = json.load(file_data)
                insert_list.append(mockupdb.make_op_msg_reply(data["reply_data"]))
        if "delete" in str(command):
            with open(str(folder)+'/'+str(command)) as file_data:
                data = json.load(file_data)
                delete_list.append(mockupdb.make_op_msg_reply(data["reply_data"]))

    count_find = 0
    count_insert = 0
    count_delete = 0
    while not (count_delete == len(delete_list) and count_find == len(find_list) and count_insert == len(insert_list)):
        cmd = server.receives(timeout=3000)
        if str(cmd.command_name) == "find":
            cmd.replies(find_list[count_find])
            count_find = count_find + 1
        else:
            if str(cmd.command_name) == "insert":
                cmd.replies(insert_list[count_insert])
                count_insert = count_insert + 1
            else:
                if str(cmd.command_name) == "delete":
                    cmd.replies(delete_list[count_delete])
                    count_delete = count_delete + 1
                else:
                    general_reply = mockupdb.make_op_msg_reply(OrderedDict([('n', 1), ('ok', 1.0)]))
                    cmd.replies(general_reply)

    print("----------------Inizializzazione Terminata------------------------------")

    print("in attesa di comando----MODIFICA 2.0")
    cmd = server.receives(timeout=100000)
    print("ricevuto: " + str(cmd))
    while True:
        # Adesso ricevo richieste dall'applicazione, controllo che siano corrette e invio risposte
        # while not os.path.exists('ActualFileTest.txt'):
        #     cmd.command_err(errmsg='unrecognized request')
        #     print("in attesa di comando")
        #     cmd = server.receives(timeout=100000)
        file = open('ActualFileTest.txt', 'r')
        folder_name = file.read()
        folder_name = folder_name.strip('\n')
        print(folder_name)
        folder_name2 = folder_name
        file.close()
        num_test=int(re.search(r'\d+', folder_name2).group())
        print("-----------------TEST " + str(num_test) + "----------------------")
        #os.remove("ActualFileTest.txt")
        print(folder_name)
        folder = str(folder_name)+"CMDs"
        controllo = False
        for command in sorted(os.listdir(folder), key=numericalSort):
            #Controllo che il contenuto della richiesta sia corretto
            controllo = True
            with open(str(folder)+'/'+str(command)) as file_data:
                print("-------------------"+str(command))
                data = json.load(file_data)
                print("stampo campo command name: " + str(cmd.command_name))
                #Nel file di report (nel caso di op_msg della versione aggiornata di mogno) è presente il campo sections
                    #che può avere più elementi al suo interno contenenti i vari campi da testare
                len_str = len(data["request_data"]["sections"])
                if 'sections' in data["request_data"]:
                    string_report = ''
                    for i in range(0,len_str):
                        payload = str(data["request_data"]["sections"][i]["payload"])
                        #Se non è il primo json allora devo togliere la parentesi di inizio per la stringa
                         #con un solo json
                        if i > 0:
                            payload = payload[1:]
                        #Se non è l'ultimo json devo sostituire l'ultima parentesi con una virgola per separarlo dal successivo
                        if  i < (len_str-1):
                            payload = payload[:-1]+","
                        string_report = string_report + payload
                    string_report = string_report.replace("'", '"')
                    string_report = string_report.replace("True", "true")
                    string_report = string_report.replace("False", "false")
                    data_report = json.loads(string_report)
                else:
                    data_report = data["request_data"]

                len_str = len(data["reply_data"]["sections"])
                if 'sections' in data["reply_data"]:
                    string_reply = ''
                    for i in range(0, len_str):
                        payload =  str(data["reply_data"]["sections"][i]["payload"])
                        # Se non è il primo json allora devo togliere la parentesi di inizio per la stringa
                        # con un solo json
                        if i > 0:
                            payload = payload[1:]
                        # Se non è l'ultimo json devo sostituire l'ultima parentesi con una virgola per separarlo dal successivo
                        if i < (len_str - 1):
                            payload = payload[:-1] + ","
                        string_reply = string_reply + payload
                    string_reply = string_reply.replace("True", "true")
                    string_reply = string_reply.replace("False", "false")
                    string_reply = string_reply.replace("'", '"')
                    data_reply = json.loads(string_reply)
                else:
                    data_reply = data["reply_data"]

                #------------da qua confronto richiesta-------------
                request = json.loads(str(cmd))
                print("Controllo correttezza richiesta...")
                print("da file ho letto:" + str(data_report))
                print("da comando ricevuto:" + str(request))
                dispatcher = {'update': cmd_update, 'insert':cmd_insert, 'delete':cmd_delete, 'find':cmd_find, 'count': cmd_count}
                ris = dispatcher[cmd.command_name](data_report,request)
                print(ris)

                # Aggiunta
                try:
                    logging.debug("-----------------TEST " + str(num_test) + "----------------------")
                    assert ris == True
                    logging.debug('Success!')
                    print("Success!")
                    print("considero ------> " + str(data_reply))
                    # Se il confronto è andato a buon fine preparo la risposta
                    if cmd.command_name == "find" and 'cursor' in data_reply and 'id' in data_reply["cursor"] \
                            and '$numberLong' in data_reply["cursor"]["id"]:
                        number_long = data_reply["cursor"]["id"]["$numberLong"]
                        data_reply["cursor"]["id"] = Int64(number_long)
                    if 'cursor' in data_reply and 'firstBatch' in data_reply["cursor"]:
                        len_firstbastch = len(data_reply["cursor"]["firstBatch"])
                        for i in range(0, len_firstbastch):
                            if 'lastSeen' in data_reply["cursor"]["firstBatch"][i] and "$date" in \
                                    data_reply["cursor"]["firstBatch"][i]["lastSeen"]:
                                data = json.loads(
                                    str(data_reply["cursor"]["firstBatch"][i]["lastSeen"]).replace("'", '"'))
                                d = dateutil.parser.parse(data["$date"])
                                data_reply["cursor"]["firstBatch"][i]["lastSeen"] = d
                            if '_id' in data_reply["cursor"]["firstBatch"][i] and "date" in \
                                    data_reply["cursor"]["firstBatch"][i]["_id"] and "$date" in \
                                    data_reply["cursor"]["firstBatch"][i]["_id"]["date"]:
                                data = json.loads(
                                    str(data_reply["cursor"]["firstBatch"][i]["_id"]["date"]).replace("'", '"'))
                                d = dateutil.parser.parse(data["$date"])
                                data_reply["cursor"]["firstBatch"][i]["_id"]["date"] = d
                            if 'scheduledNotifications' in data_reply["cursor"]["firstBatch"][i] \
                                    and 'REMIND' in data_reply["cursor"]["firstBatch"][i]["scheduledNotifications"] \
                                    and 'lastNotified' in \
                                    data_reply["cursor"]["firstBatch"][i]["scheduledNotifications"]["REMIND"] \
                                    and "$date" in \
                                    data_reply["cursor"]["firstBatch"][i]["scheduledNotifications"]["REMIND"][
                                        "lastNotified"]:
                                print("entro per modificare la data")
                                data = json.loads(str(
                                    data_reply["cursor"]["firstBatch"][i]["scheduledNotifications"]["REMIND"][
                                        "lastNotified"]).replace("'", '"'))
                                d = dateutil.parser.parse(data["$date"])
                                data_reply["cursor"]["firstBatch"][i]["scheduledNotifications"]["REMIND"][
                                    "lastNotified"] = d
                    # Se corretto mando la risposta contentuta nel file di report
                    response = mockupdb.make_op_msg_reply(data_reply)
                    cmd.replies(response)
                except AssertionError:
                    logging.exception(str(traceback.print_exc()))
                    logging.debug("da file ho letto:" + str(data_report))
                    logging.debug("da comando ho ricevuto:" + str(request))
                    cmd.command_err(errmsg='unrecognized request')
                # Fine Aggiunta
            print("in attesa di comando")
            cmd = server.receives(timeout=100000)
            print("ricevuto: " + str(cmd))
        if not controllo:
            logging.debug("ERROR: Nessuna risposta al comando")
            cmd.command_err(errmsg='unrecognized request')
            print("in attesa di comando")
            cmd = server.receives(timeout=100000)
            print("ricevuto: " + str(cmd))
Пример #6
0
def edit_schedule(alias, user=None):
    schedule = db.schedules.find_one({'alias': alias})
    if schedule is None:
        flash(error_schedule_not_found, 'warning')
        return redirect(url_for('web.home')), 404

    if user is None or (ObjectId(user['_id']) != schedule['creator'] and
                        ObjectId(user['_id']) not in schedule['moderators']):
        flash('Нехватает прав на редактирование этого расписания', 'danger')
        return redirect(url_for('web.home')), 401

    #Редактирование постоянного расписания
    if request.method == 'POST':
        schedule_id = schedule['_id']
        if len(request.form['schedule_name']) < 1:
            return jsonify({'result': 'error', 'field': 'schedule_name'}), 400

        schedule_name = request.form['schedule_name']
        alias = utils.gen_schedule_alias()
        if 'alias' in request.form:
            alias = request.form['alias']
            schedule = db.schedules.find_one({'alias': alias})
            if schedule is not None and schedule['_id'] != ObjectId(
                    schedule_id):
                return jsonify({'result': 'error', 'field': 'alias'}), 400
            if len(alias) == 0:
                alias = utils.gen_schedule_alias()
                while db.schedules.find_one({'alias': alias}) is not None:
                    alias = utils.gen_schedule_alias()

        availability = request.form['availability']

        if availability not in ['public', 'private']:
            return jsonify({'result': 'error', 'field': 'availability'}), 400

        first_day = request.form['first_day']
        day_schedule = literal_eval(request.form['schedule'])

        if len(day_schedule) > 1 and not re.match('\d{2}\.\d{2}\.\d{2}',
                                                  first_day):
            return jsonify({'result': 'error', 'field': 'first_day'}), 400

        db.schedules.update_one({'_id': ObjectId(schedule_id)}, {
            '$set': {
                'name': schedule_name,
                'alias': alias,
                'availability': availability,
                'first_day': first_day,
                'schedule': day_schedule
            }
        })
        return jsonify({'result': 'success'}), 201

    # Добавление изменения на определённую дату
    if request.method == 'PUT':
        if 'change_date' not in request.form:
            return jsonify({'result': 'error', 'field': 'change_date'}), 400
        if 'lessons' not in request.form:
            return jsonify({'result': 'error', 'field': 'lessons'}), 400
        if 'override' not in request.form:
            return jsonify({'result': 'error'}), 400

        change_date_str = request.form['change_date']
        if not re.match('\d{2}\.\d{2}\.\d{2}', change_date_str):
            return jsonify({'result': 'error', 'field': 'change_date'}), 400

        override = request.form['override'] == 'true'
        change_date_millis = utils.date_in_millis(request.form['change_date'])
        try:
            lessons = literal_eval(request.form['lessons'])
        except:
            return jsonify({'result': 'error', 'field': 'lessons'}), 400

        for change in schedule['changes']:
            if change[
                    'change_date_millis'] == change_date_millis and not override:
                return jsonify({'result': 'error'}), 409
        db.schedules.update_one(
            {'_id': ObjectId(schedule['_id'])},
            {'$pull': {
                'changes': {
                    'change_date': Int64(change_date_millis)
                }
            }})
        db.schedules.update_one({'_id': ObjectId(schedule['_id'])}, {
            '$push': {
                'changes': {
                    '$each': [{
                        'change_date_millis': Int64(change_date_millis),
                        'change_date_str': change_date_str,
                        'lessons': lessons,
                    }],
                    '$sort': {
                        'change_date_millis': 1
                    }
                }
            }
        })
        return jsonify({'result': 'success'}), 201

    user = db.users.find_one({'_id': ObjectId(user['_id'])})
    return render_template('manage_schedule.html',
                           title='Редактирование',
                           user=user,
                           schedule=schedule)
def object_hook(dct):
    if "$oid" in dct:
        return ObjectId(str(dct["$oid"]))
    if "$ref" in dct:
        return DBRef(dct["$ref"], dct["$id"], dct.get("$db", None))
    if "$date" in dct:
        dtm = dct["$date"]
        # mongoexport 2.6 and newer
        if isinstance(dtm, string_type):
            # Parse offset
            if dtm[-1] == 'Z':
                dt = dtm[:-1]
                offset = 'Z'
            elif dtm[-3] == ':':
                # (+|-)HH:MM
                dt = dtm[:-6]
                offset = dtm[-6:]
            elif dtm[-5] in ('+', '-'):
                # (+|-)HHMM
                dt = dtm[:-5]
                offset = dtm[-5:]
            elif dtm[-3] in ('+', '-'):
                # (+|-)HH
                dt = dtm[:-3]
                offset = dtm[-3:]
            else:
                dt = dtm
                offset = ''

            aware = datetime.datetime.strptime(
                dt, "%Y-%m-%dT%H:%M:%S.%f").replace(tzinfo=utc)

            if not offset or offset == 'Z':
                # UTC
                return aware
            else:
                if len(offset) == 6:
                    hours, minutes = offset[1:].split(':')
                    secs = (int(hours) * 3600 + int(minutes) * 60)
                elif len(offset) == 5:
                    secs = (int(offset[1:3]) * 3600 + int(offset[3:]) * 60)
                elif len(offset) == 3:
                    secs = int(offset[1:3]) * 3600
                if offset[0] == "-":
                    secs *= -1
                return aware - datetime.timedelta(seconds=secs)
        # mongoexport 2.6 and newer, time before the epoch (SERVER-15275)
        elif isinstance(dtm, collections.Mapping):
            secs = float(dtm["$numberLong"]) / 1000.0
        # mongoexport before 2.6
        else:
            secs = float(dtm) / 1000.0
        return EPOCH_AWARE + datetime.timedelta(seconds=secs)
    if "$regex" in dct:
        flags = 0
        # PyMongo always adds $options but some other tools may not.
        for opt in dct.get("$options", ""):
            flags |= _RE_OPT_TABLE.get(opt, 0)
        return Regex(dct["$regex"], flags)
    if "$minKey" in dct:
        return MinKey()
    if "$maxKey" in dct:
        return MaxKey()
    if "$binary" in dct:
        if isinstance(dct["$type"], int):
            dct["$type"] = "%02x" % dct["$type"]
        subtype = int(dct["$type"], 16)
        if subtype >= 0xffffff80:  # Handle mongoexport values
            subtype = int(dct["$type"][6:], 16)
        return Binary(base64.b64decode(dct["$binary"].encode()), subtype)
    if "$code" in dct:
        return Code(dct["$code"], dct.get("$scope"))
    if "$uuid" in dct:
        return uuid.UUID(dct["$uuid"])
    if "$undefined" in dct:
        return None
    if "$numberLong" in dct:
        return Int64(dct["$numberLong"])
    if "$timestamp" in dct:
        tsp = dct["$timestamp"]
        return Timestamp(tsp["t"], tsp["i"])
    return dct
Пример #8
0
def _object_hook(dct):
    if 'length' in dct:
        dct['length'] = Int64(dct['length'])
    return object_hook(dct)
Пример #9
0
    def result(self, query, request_env, mindsdb_env, session):
        models = mindsdb_env['mindsdb_native'].get_models()
        model_names = [x['name'] for x in models]
        table = query['find']
        where_data = query.get('filter', {})
        print(f'\n\n\nOperating on models: {models}\n\n\n')
        if table == 'predictors':
            data = [{
                'name':
                x['name'],
                'status':
                x['status'],
                'accuracy':
                str(x['accuracy']) if x['accuracy'] is not None else None,
                'predict':
                ', '.join(x['predict']),
                'select_data_query':
                '',
                'external_datasource':
                '',
                'training_options':
                ''
            } for x in models]
        elif table in model_names:
            # prediction
            model = mindsdb_env['mindsdb_native'].get_model_data(
                name=query['find'])

            columns = []
            columns += model['columns']
            columns += [f'{x}_original' for x in model['predict']]
            for col in model['predict']:
                if model['data_analysis_v2'][col]['typing'][
                        'data_type'] == 'Numeric':
                    columns += [f"{col}_min", f"{col}_max"]
                columns += [f"{col}_confidence"]
                columns += [f"{col}_explain"]

            columns += [
                'when_data', 'select_data_query', 'external_datasource'
            ]

            where_data_list = where_data if isinstance(where_data,
                                                       list) else [where_data]
            for statement in where_data_list:
                if isinstance(statement, dict):
                    for key in statement:
                        if key not in columns:
                            columns.append(key)

            if 'select_data_query' in where_data:
                integrations = mindsdb_env['config']['integrations'].keys()
                connection = where_data.get('connection')
                if connection is None:
                    if 'default_mongodb' in integrations:
                        connection = 'default_mongodb'
                    else:
                        for integration in integrations:
                            if integration.startswith('mongodb_'):
                                connection = integration
                                break

                if connection is None:
                    raise Exception(
                        "Can't find connection from which fetch data")

                ds_name = 'temp'

                ds, ds_name = mindsdb_env['data_store'].save_datasource(
                    name=ds_name,
                    source_type=connection,
                    source=where_data['select_data_query'])
                where_data = mindsdb_env['data_store'].get_data(
                    ds_name)['data']
                mindsdb_env['data_store'].delete_datasource(ds_name)

            if 'external_datasource' in where_data:
                ds_name = where_data['external_datasource']
                if mindsdb_env['data_store'].get_datasource(ds_name) is None:
                    raise Exception(f"Datasource {ds_name} not exists")
                where_data = mindsdb_env['data_store'].get_data(
                    ds_name)['data']

            prediction = mindsdb_env['mindsdb_native'].predict(
                name=table, when_data=where_data)

            predicted_columns = model['predict']

            data = []
            keys = [x for x in list(prediction._data.keys()) if x in columns]
            min_max_keys = []
            for col in predicted_columns:
                if model['data_analysis_v2'][col]['typing'][
                        'data_type'] == 'Numeric':
                    min_max_keys.append(col)

            length = len(prediction._data[predicted_columns[0]])
            for i in range(length):
                row = {}
                explanation = prediction[i].explain()
                for key in keys:
                    row[key] = prediction._data[key][i]

                for key in predicted_columns:
                    row[key + '_confidence'] = explanation[key]['confidence']
                    row[key + '_explain'] = explanation[key]
                for key in min_max_keys:
                    row[key + '_min'] = min(
                        explanation[key]['confidence_interval'])
                    row[key + '_max'] = max(
                        explanation[key]['confidence_interval'])
                data.append(row)

        else:
            # probably wrong table name. Mongo in this case returns empty data
            data = []

        if 'projection' in query and len(data) > 0:
            true_filter = []
            false_filter = []
            for key, value in query['projection'].items():
                if helpers.is_true(value):
                    true_filter.append(key)
                else:
                    false_filter.append(key)

            keys = list(data[0].keys())
            del_id = '_id' in false_filter
            if len(true_filter) > 0:
                for row in data:
                    for key in keys:
                        if key != '_id':
                            if key not in true_filter:
                                del row[key]
                        elif del_id:
                            del row[key]
            else:
                for row in data:
                    for key in false_filter:
                        if key in row:
                            del row[key]

        db = mindsdb_env['config']['api']['mongodb']['database']

        cursor = {
            'id': Int64(0),
            'ns': f"{db}.$cmd.{query['find']}",
            'firstBatch': data
        }
        return {'cursor': cursor, 'ok': 1}
Пример #10
0
            u"localField": u"superheroes_power_matrix.Name",
            u"from": u"characters_stats",
            u"foreignField": u"Name",
            u"as": u"characters_stats"
        }
    },
    {
        u"$unwind": {
            u"path": u"$characters_stats"
        }
    },
    {
        u"$match": {
            u"superheroes_power_matrix.Flight": False,
            u"characters_stats.Intelligence": {
                u"$gt": Int64(50)
            }
        }
    },
    {
        u"$sort": SON([ (u"characters_stats.Intelligence", -1) ])
    },
    {
        u"$project": {
            u"superheroes_power_matrix.Name": u"$superheroes_power_matrix.Name",
            u"characters_stats.Intelligence": u"$characters_stats.Intelligence",
        }
    }
]

cursor = collection.aggregate(
Пример #11
0
             addresses=[
                 dict(
                     street="212 Rue de Tolbiac",
                     city="Paris",
                     state="Ile de France",
                     zip="75013",
                 )
             ],
         ),
     ),
     (MAIN_TWITTER_USER, dict(following=[str(u.id)
                                         for u in TWITTER_USERS])),
     (
         FullBsonModel(
             objectId_=ObjectId("5f6bd0f85cac5a450e8eb9e8"),
             long_=Int64(258),
             decimal_=Decimal128("256.123457"),
             # TODO: document some bytes value might be rejected because of utf8
             # encoding: encode in base64 before
             binary_=Binary(b"\x48\x49"),
             regex_=Regex(r"^.*$"),
         ),
         dict(
             objectId_="5f6bd0f85cac5a450e8eb9e8",
             long_=258,
             decimal_=256.123457,
             binary_=b"\x48\x49".decode(),
             regex_=r"^.*$",
         ),
     ),
 ),
Пример #12
0
def _parse_canonical_int64(doc: Any) -> Int64:
    """Decode a JSON int64 to bson.int64.Int64."""
    l_str = doc["$numberLong"]
    if len(doc) != 1:
        raise TypeError("Bad $numberLong, extra field(s): %s" % (doc, ))
    return Int64(l_str)
 def convert_ip(value: str) -> Int64:
     try:
         ip = IPv4Address(value)
         return Int64(ip)
     except:
         pass
Пример #14
0
 def transform_python(self, value):
     return Int64(value.value)
Пример #15
0
 def __init__(self, flags, cursor_id, number_returned, documents):
     self.flags = flags
     self.cursor_id = Int64(cursor_id)
     self.number_returned = number_returned
     self.documents = documents
Пример #16
0
    def check_events(self, test, listener, session_ids):
        res = listener.results
        if not len(test['expectations']):
            return

        # Give a nicer message when there are missing or extra events
        cmds = decode_raw([event.command for event in res['started']])
        self.assertEqual(
            len(res['started']), len(test['expectations']), cmds)
        for i, expectation in enumerate(test['expectations']):
            event_type = next(iter(expectation))
            event = res['started'][i]

            # The tests substitute 42 for any number other than 0.
            if (event.command_name == 'getMore'
                    and event.command['getMore']):
                event.command['getMore'] = Int64(42)
            elif event.command_name == 'killCursors':
                event.command['cursors'] = [Int64(42)]
            elif event.command_name == 'update':
                # TODO: remove this once PYTHON-1744 is done.
                # Add upsert and multi fields back into expectations.
                updates = expectation[event_type]['command']['updates']
                for update in updates:
                    update.setdefault('upsert', False)
                    update.setdefault('multi', False)

            # Replace afterClusterTime: 42 with actual afterClusterTime.
            expected_cmd = expectation[event_type]['command']
            expected_read_concern = expected_cmd.get('readConcern')
            if expected_read_concern is not None:
                time = expected_read_concern.get('afterClusterTime')
                if time == 42:
                    actual_time = event.command.get(
                        'readConcern', {}).get('afterClusterTime')
                    if actual_time is not None:
                        expected_read_concern['afterClusterTime'] = actual_time

            recovery_token = expected_cmd.get('recoveryToken')
            if recovery_token == 42:
                expected_cmd['recoveryToken'] = CompareType(dict)

            # Replace lsid with a name like "session0" to match test.
            if 'lsid' in event.command:
                for name, lsid in session_ids.items():
                    if event.command['lsid'] == lsid:
                        event.command['lsid'] = name
                        break

            for attr, expected in expectation[event_type].items():
                actual = getattr(event, attr)
                expected = wrap_types(expected)
                if isinstance(expected, dict):
                    for key, val in expected.items():
                        if val is None:
                            if key in actual:
                                self.fail("Unexpected key [%s] in %r" % (
                                    key, actual))
                        elif key not in actual:
                            self.fail("Expected key [%s] in %r" % (
                                key, actual))
                        else:
                            self.assertEqual(val, decode_raw(actual[key]),
                                             "Key [%s] in %s" % (key, actual))
                else:
                    self.assertEqual(actual, expected)
Пример #17
0
 def to_python(self, value):
     try:
         return Int64(value)
     except ValueError:
         return value
Пример #18
0
def _get_int64(data, view, position, dummy0, dummy1, dummy2):
    """Decode a BSON int64 to bson.int64.Int64."""
    return Int64(_UNPACK_LONG_FROM(data, position)[0]), position + 8
Пример #19
0
    }
}, {
    u"$lookup": {
        u"localField": u"characters_stats.non_existing_field",
        u"from": u"characters_info",
        u"foreignField": u"non_existing_field",
        u"as": u"characters_info"
    }
}, {
    u"$unwind": {
        u"path": u"$characters_info"
    }
}, {
    u"$match": {
        u"characters_stats.Speed": {
            u"$gt": Int64(40)
        },
        u"characters_info.HairColor": {
            u"$ne": u""
        }
    }
}, {
    u"$group": {
        u"_id": {},
        u"COUNT(*)": {
            u"$sum": 1
        }
    }
}, {
    u"$project": {
        u"mayor_cuarenta": u"$COUNT(*)",
Пример #20
0
 def transaction_id(self):
     """Monotonically increasing positive 64-bit integer."""
     self._transaction_id += 1
     return Int64(self._transaction_id)
Пример #21
0
 def to_foreign(self, obj, name, value):  # pylint:disable=unused-argument
     return Int64(int(value))
Пример #22
0
 def transaction_id(self):
     """Positive 64-bit integer."""
     return Int64(self._transaction_id)
Пример #23
0
 def migrate(self):
     bulk = [
         UpdateOne(
             {"_id": bson.ObjectId("5b6d6819d706360001a0b716")},
             {
                 "$set": {
                     "name": "Group",
                     "uuid": UUID("8874518c-effd-41fe-81bf-d67f1519ccf2"),
                     "description": "Grouping element",
                     "single_service": False,
                     "single_client": False,
                     "allow_children": True,
                     "bi_id": Int64(3859384814270643576),
                 }
             },
             upsert=True,
         ),
         UpdateOne(
             {"_id": bson.ObjectId("5b6d6c9fd706360001f5c053")},
             {
                 "$set": {
                     "name": "Network | Controller",
                     "uuid": UUID("bcf7ad57-81a4-4da0-8e6d-e429c9e21532"),
                     "description": "Controller - CPE relation",
                     "service_model": "sa.ManagedObject",
                     "client_model": "sa.ManagedObject",
                     "single_service": False,
                     "single_client": True,
                     "allow_children": False,
                     "bi_id": Int64(2204453448135692504),
                 }
             },
             upsert=True,
         ),
         UpdateOne(
             {"_id": bson.ObjectId("5b6dbbefd70636000170b980")},
             {
                 "$set": {
                     "name": "Object Group",
                     "uuid": UUID("f4c6d51d-d597-4183-918e-23efd748fd12"),
                     "description": "Arbitrary group of Managed Objects",
                     "service_model": "sa.ManagedObject",
                     "single_service": False,
                     "single_client": False,
                     "allow_children": False,
                     "bi_id": Int64(4062440225872880146),
                 }
             },
             upsert=True,
         ),
         UpdateOne(
             {"_id": bson.ObjectId("5b6d6be1d706360001f5c04e")},
             {
                 "$set": {
                     "name": "Network | IPoE Termination",
                     "uuid": UUID("ef42d9fe-d217-4754-b628-a1f71f6159da"),
                     "description":
                     "IPoE Temination (access equipment -> BRAS)",
                     "service_model": "sa.ManagedObject",
                     "client_model": "sa.ManagedObject",
                     "single_service": False,
                     "single_client": False,
                     "allow_children": False,
                     "bi_id": Int64(4546441601898809637),
                 }
             },
             upsert=True,
         ),
         UpdateOne(
             {"_id": bson.ObjectId("5b6d6beed706360001f5c04f")},
             {
                 "$set": {
                     "name": "Network | PPPoE Termination",
                     "uuid": UUID("a8ddcd67-d8c4-471d-9a9b-9f4749e09011"),
                     "description":
                     "PPPoE Temination (access equipment -> BRAS)",
                     "service_model": "sa.ManagedObject",
                     "client_model": "sa.ManagedObject",
                     "single_service": False,
                     "single_client": False,
                     "allow_children": False,
                     "bi_id": Int64(3384545658468911814),
                 }
             },
             upsert=True,
         ),
         UpdateOne(
             {"_id": bson.ObjectId("5b6d6c56d706360001f5c052")},
             {
                 "$set": {
                     "name": "Network | PPTP Termination",
                     "uuid": UUID("8ce08fc8-a5b1-448d-9c2c-ac1419ad9816"),
                     "description":
                     "PPTP Temination (access equipment -> BRAS)",
                     "service_model": "sa.ManagedObject",
                     "client_model": "sa.ManagedObject",
                     "single_service": False,
                     "single_client": False,
                     "allow_children": False,
                     "bi_id": Int64(2085768785416150430),
                 }
             },
             upsert=True,
         ),
         UpdateOne(
             {"_id": bson.ObjectId("5b6e785ed70636000170b9a6")},
             {
                 "$set": {
                     "name": "Voice | SIP Termination",
                     "uuid": UUID("3e15a3ea-f4c1-49a1-a183-d61dd79531c2"),
                     "description":
                     "SIP Temination (media gateway -> softswitch)",
                     "service_model": "sa.ManagedObject",
                     "client_model": "sa.ManagedObject",
                     "single_service": False,
                     "single_client": False,
                     "allow_children": False,
                     "bi_id": Int64(4632306658633376591),
                 }
             },
             upsert=True,
         ),
     ]
     self.mongo_db.technologies.bulk_write(bulk)
Пример #24
0
def _get_int64(data, position, dummy0, dummy1, dummy2):
    """Decode a BSON int64 to bson.int64.Int64."""
    end = position + 8
    return Int64(_UNPACK_LONG(data[position:end])[0]), end
Пример #25
0
def object_hook(dct, json_options=DEFAULT_JSON_OPTIONS):
    if "$oid" in dct:
        return ObjectId(str(dct["$oid"]))
    if "$ref" in dct:
        return DBRef(dct["$ref"], dct["$id"], dct.get("$db", None))
    if "$date" in dct:
        dtm = dct["$date"]
        # mongoexport 2.6 and newer
        if isinstance(dtm, string_type):
            # Parse offset
            if dtm[-1] == 'Z':
                dt = dtm[:-1]
                offset = 'Z'
            elif dtm[-3] == ':':
                # (+|-)HH:MM
                dt = dtm[:-6]
                offset = dtm[-6:]
            elif dtm[-5] in ('+', '-'):
                # (+|-)HHMM
                dt = dtm[:-5]
                offset = dtm[-5:]
            elif dtm[-3] in ('+', '-'):
                # (+|-)HH
                dt = dtm[:-3]
                offset = dtm[-3:]
            else:
                dt = dtm
                offset = ''

            aware = datetime.datetime.strptime(
                dt, "%Y-%m-%dT%H:%M:%S.%f").replace(tzinfo=utc)

            if offset and offset != 'Z':
                if len(offset) == 6:
                    hours, minutes = offset[1:].split(':')
                    secs = (int(hours) * 3600 + int(minutes) * 60)
                elif len(offset) == 5:
                    secs = (int(offset[1:3]) * 3600 + int(offset[3:]) * 60)
                elif len(offset) == 3:
                    secs = int(offset[1:3]) * 3600
                if offset[0] == "-":
                    secs *= -1
                aware = aware - datetime.timedelta(seconds=secs)

            if json_options.tz_aware:
                if json_options.tzinfo:
                    aware = aware.astimezone(json_options.tzinfo)
                return aware
            else:
                return aware.replace(tzinfo=None)
        # mongoexport 2.6 and newer, time before the epoch (SERVER-15275)
        elif isinstance(dtm, collections.Mapping):
            millis = int(dtm["$numberLong"])
        # mongoexport before 2.6
        else:
            millis = int(dtm)
        return bson._millis_to_datetime(millis, json_options)
    if "$regex" in dct:
        flags = 0
        # PyMongo always adds $options but some other tools may not.
        for opt in dct.get("$options", ""):
            flags |= _RE_OPT_TABLE.get(opt, 0)
        return Regex(dct["$regex"], flags)
    if "$minKey" in dct:
        return MinKey()
    if "$maxKey" in dct:
        return MaxKey()
    if "$binary" in dct:
        if isinstance(dct["$type"], int):
            dct["$type"] = "%02x" % dct["$type"]
        subtype = int(dct["$type"], 16)
        if subtype >= 0xffffff80:  # Handle mongoexport values
            subtype = int(dct["$type"][6:], 16)
        data = base64.b64decode(dct["$binary"].encode())
        # special handling for UUID
        if subtype == OLD_UUID_SUBTYPE:
            if json_options.uuid_representation == CSHARP_LEGACY:
                return uuid.UUID(bytes_le=data)
            if json_options.uuid_representation == JAVA_LEGACY:
                data = data[7::-1] + data[:7:-1]
            return uuid.UUID(bytes=data)
        if subtype == UUID_SUBTYPE:
            return uuid.UUID(bytes=data)
        return Binary(data, subtype)
    if "$code" in dct:
        return Code(dct["$code"], dct.get("$scope"))
    if "$uuid" in dct:
        return uuid.UUID(dct["$uuid"])
    if "$undefined" in dct:
        return None
    if "$numberLong" in dct:
        return Int64(dct["$numberLong"])
    if "$timestamp" in dct:
        tsp = dct["$timestamp"]
        return Timestamp(tsp["t"], tsp["i"])
    if "$numberDecimal" in dct:
        return Decimal128(dct["$numberDecimal"])
    return dct