Ejemplo n.º 1
0
    def post(self, request):

        file = open('data.csv', 'w')
        file.write(request.body.decode())
        file.close()
        try:
            train = pd.read_csv('data.csv', header=0, encoding='latin1')
            train.drop(['id', 'Unnamed: 0', 'index', 'conv_rate'], axis=1, inplace=True)
            train['bounce_rate'] = train['bounce_rate'].astype(float)


            x = train.values
            x_scaled = preprocessing.MinMaxScaler().fit_transform(x)
            df = pd.DataFrame(x_scaled, columns=train.columns)
            train, test = train_test_split(df, test_size = 0.4)

            target = 'with_conversion'
            IDcol = 'id'

            xgb4 = XGBClassifier(
                    learning_rate=0.01,
                    n_estimators=5000,
                    max_depth=2,
                    min_child_weight=1,
                    gamma=0,
                    subsample=0.85,
                    colsample_bytree=0.9,
                    reg_alpha=0.005,
                    objective='binary:logistic',
                    nthread=4,
                    scale_pos_weight=1,
                    seed=27)

            predictors = [x for x in train.columns if x not in [target, IDcol]]
                
            data = modelfit(xgb4, train, test, predictors)
            #modelfit(xgb4, train, test, predictors)
            os.remove('data.csv')
            return response.json(j.dumps(data))
        
        except:
            return response.json(j.dumps('Something went wrong'),status=500)
Ejemplo n.º 2
0
async def create_payloadtype(request, user):
    if user['auth'] not in ['access_token', 'apitoken']:
        abort(status_code=403,
              message=
              "Cannot access via Cookies. Use CLI or access via JS in browser")
    # this needs to know the name of the type, everything else is done for you
    if request.form:
        data = js.loads(request.form.get('json'))
    else:
        data = request.json
    try:
        if "ptype" not in data:
            return json({
                'status':
                'error',
                'error':
                '"ptype" is a required field and must be unique'
            })
        if "file_extension" not in data:
            data["file_extension"] = ""
        elif "." not in data['file_extension'] and data['file_extension'] != "":
            data['file_extension'] = "." + data['file_extension']
        if 'wrapper' not in data:
            data['wrapper'] = False
        if "command_template" not in data:
            data['command_template'] = ""
        if 'supported_os' not in data:
            return json({
                'status': 'error',
                'error': 'must specify "supported_os" list'
            })
        if 'execute_help' not in data:
            data['execute_help'] = ""
        if 'external' not in data:
            data['external'] = False
        query = await db_model.operator_query()
        operator = await db_objects.get(query, username=user['username'])
        if data['wrapper']:
            if "wrapped_payload_type" not in data:
                return json({
                    'status':
                    'error',
                    'error':
                    '"wrapped_payload_type" is required for a wraper type payload'
                })
            try:
                query = await db_model.payloadtype_query()
                wrapped_payload_type = await db_objects.get(
                    query, ptype=data['wrapped_payload_type'])
            except Exception as e:
                print(e)
                return json({
                    'status': 'error',
                    'error': "failed to find that wrapped payload type"
                })
            payloadtype = await db_objects.create(
                PayloadType,
                ptype=data['ptype'],
                operator=operator,
                file_extension=data['file_extension'],
                wrapper=data['wrapper'],
                wrapped_payload_type=wrapped_payload_type,
                supported_os=",".join(data['supported_os']),
                execute_help=data['execute_help'],
                external=data['external'])
        else:
            payloadtype = await db_objects.create(
                PayloadType,
                ptype=data['ptype'],
                operator=operator,
                file_extension=data['file_extension'],
                wrapper=data['wrapper'],
                command_template=data['command_template'],
                supported_os=",".join(data['supported_os']),
                execute_help=data['execute_help'],
                external=data['external'])
        os.mkdir("./app/payloads/{}".format(
            payloadtype.ptype))  # make the directory structure
        os.mkdir("./app/payloads/{}/payload".format(
            payloadtype.ptype))  # make the directory structure
        os.mkdir("./app/payloads/{}/commands".format(
            payloadtype.ptype))  # make the directory structure
        if request.files:
            code = request.files['upload_file'][0].body
            code_file = open(
                "./app/payloads/{}/payload/{}".format(
                    payloadtype.ptype, request.files['upload_file'][0].name),
                "wb")
            code_file.write(code)
            code_file.close()
            for i in range(1, int(request.form.get('file_length'))):
                code = request.files['upload_file_' + str(i)][0].body
                code_file = open(
                    "./app/payloads/{}/payload/{}".format(
                        payloadtype.ptype,
                        request.files['upload_file_' + str(i)][0].name), "wb")
                code_file.write(code)
                code_file.close()
    except Exception as e:
        print(e)
        return json({
            'status': 'error',
            'error': 'failed to create new payload type: ' + str(e)
        })
    status = {'status': 'success'}
    ptype_json = payloadtype.to_json()
    # make sure a file exists in the right location with the right name
    if not os.path.exists("./app/payloads/{}/payload/{}{}".format(
            payloadtype.ptype, payloadtype.ptype, payloadtype.file_extension)):
        file = open(
            "./app/payloads/{}/payload/{}{}".format(
                payloadtype.ptype, payloadtype.ptype,
                payloadtype.file_extension), 'wb')
        file.close()
    return json({**status, **ptype_json})
Ejemplo n.º 3
0
async def get_full_timeline_json(request, user):
    if user['auth'] not in ['access_token', 'apitoken']:
        abort(status_code=403, message="Cannot access via Cookies. Use CLI or access via JS in browser")
    try:
        if user['current_operation'] != "":
            data = {}
            data['cmd_output'] = False
            data['strict'] = "task"
            data['artifacts'] = False
            data['attack'] = False
            if request.method == "POST":
                config = request.json
                if 'cmd_output' in config:
                    data['cmd_output'] = config['cmd_output']
                if 'strict' in config:
                    data['strict'] = config['strict']
                if 'artifacts' in config:
                    data['artifacts'] = config['artifacts']
                if 'attack' in config:
                    data['attack'] = config['attack']
            try:
                query = await operation_query()
                operation = await db_objects.get(query, name=user['current_operation'])
                all_data = {}
                query = await callback_query()
                callbacks = await db_objects.execute(query.where(Callback.operation == operation).order_by(Callback.id))
                for c in callbacks:
                    c_json = c.to_json()
                    all_data[c_json['init_callback']] = {"callback": c_json}
                    query = await task_query()
                    tasks = await db_objects.prefetch(query.where(Task.callback == c).order_by(Task.id), Command.select())
                    for t in tasks:
                        t_json = t.to_json()
                        if data['attack']:
                            query = await attacktask_query()
                            attacks = await db_objects.execute(query.where(ATTACKTask.task == t))
                            attack_list = []
                            for a in attacks:
                                attack_list.append({"attack": a.attack.t_num, "attack_name": a.attack.name})
                            t_json['attack'] = attack_list
                        if data['artifacts']:
                            query = await taskartifact_query()
                            artifacts = await db_objects.execute(query.where(TaskArtifact.task == t))
                            artifacts_list = []
                            for a in artifacts:
                                artifacts_list.append(a.to_json())
                            t_json['artifacts'] = artifacts_list
                        all_data[t_json['status_timestamp_preprocessing']] = {"task": t_json}
                        if data['cmd_output']:
                            query = await response_query()
                            responses = await db_objects.execute(query.where(Response.task == t))
                            if data['strict'] == "time":
                                # this will get output as it happened, not grouped with the corresponding command
                                for r in responses:
                                    r_json = r.to_json()
                                    all_data[r_json['timestamp']] = {"response": r_json}
                            elif data['strict'] == "task":
                                # this will group output with the corresponding task, like we see it in the operator view
                                response_data = []
                                for r in responses:
                                    r_json = r.to_json()
                                    response_data.append({"response": r_json['response'], "id": r_json['id'], "timestamp": r_json['timestamp']})
                                # now that it's all grouped together into a dictionary, associate it with the task
                                all_data[t_json['status_timestamp_preprocessing']] = {"task": t_json, "responses": response_data}
                save_path = "./app/files/{}/full_timeline_json.json".format(user['current_operation'])
                count = 1
                while os.path.exists(save_path):
                    save_path = "./app/files/{}/full_timeline_json{}.json".format(user['current_operation'], str(count))
                    count += 1
                query = await operator_query()
                operator = await db_objects.get(query, username=user['username'])
                filemeta = await db_objects.create(FileMeta, total_chunks=1, operation=operation, path=save_path,
                                                   operator=operator, complete=True)
                file = open(save_path, 'w')
                file.write(js.dumps(all_data, indent=4, sort_keys=True))
                file.close()
                filemeta.md5 = await hash_MD5(js.dumps(all_data))
                filemeta.sha1 = await hash_SHA1(js.dumps(all_data))
                await db_objects.update(filemeta)
                return json({'status': 'success', **filemeta.to_json()})
            except Exception as e:
                print(str(sys.exc_info()[-1].tb_lineno) + " " + str(e))
                return json({'status': 'error', 'error': str(e)})
        else:
            return json({'status': 'error', 'error': 'Must select an operation as your current operation'})
    except Exception as e:
        print(str(sys.exc_info()[-1].tb_lineno) + " " + str(e))
        return json({'status': 'error', 'error': str(e)})