Example #1
0
 def test_clone(self):
     saved_locale = LocalizedTextFactory()
     cloned_locale = saved_locale.clone()
     self.assertEqual(
         omit(saved_locale.__dict__, ['_state', 'id', 'created_at', 'internal_reference_id']),
         omit(cloned_locale.__dict__, ['_state', 'id', 'created_at', 'internal_reference_id'])
     )
     self.assertNotEqual(saved_locale.id, cloned_locale.id)
     self.assertIsNone(cloned_locale.internal_reference_id)
Example #2
0
async def handle_request(request: web.Request, service: object, endpoint_cacher: object):
    req_ctx = {
        'method': request.method,
        'url': service['targets'][service['cur_target_index']],
        'params': dict(request.rel_url.query),
        'data': await request.text(),
        'cookies': dict(request.cookies),
        'headers': pydash.omit(dict(request.headers), 'Host'),
    }
    req = None
    req_cache = None
    req_ctx_hash = None

    if not pydash.is_empty(endpoint_cacher):
        req_ctx_hash = Hasher.hash_sha_256(json.dumps(req_ctx))
        req_cache = await EndpointCacher.get_cache(req_ctx_hash, DB.get_redis(request))

    if pydash.is_empty(req_cache):
        req = await Api.call(**req_ctx)
        if pydash.is_empty(req_ctx_hash):
            req_ctx_hash = Hasher.hash_sha_256(json.dumps(req_ctx))
        not pydash.is_empty(endpoint_cacher) and queue_async_func.s({
            'func': 'EndpointCacher.set_cache',
            'args': [req_ctx_hash, req, int(endpoint_cacher['timeout']), 'redis'],
            'kwargs': {}
        }).apply_async()
    else:
        req = json.loads(req_cache)

    cache_hit = True if not pydash.is_empty(req_cache) else False
    return req, cache_hit
Example #3
0
def main():
    p = get_cli_args(args)
    x_train, y_train, qid_train = load_svmlight_file(
        p.train.xgboost_train_path, query_id=True)  # pylint: disable=unbalanced-tuple-unpacking
    x_test, y_test, qid_test = load_svmlight_file(p.train.xgboost_test_path,
                                                  query_id=True)  # pylint: disable=unbalanced-tuple-unpacking
    x_train = x_train.todense()
    x_train = np.concatenate([
        x_train, x_train[:, -2] / x_train[:, 2], x_train[:, -1] / x_train[:, 4]
    ], 1)
    x_test = x_test.todense()
    x_test = np.concatenate(
        [x_test, x_test[:, -2] / x_test[:, 2], x_test[:, -1] / x_test[:, 4]],
        1)
    train_dmatrix = DMatrix(x_train, y_train)
    test_dmatrix = DMatrix(x_test, y_test)
    train_dmatrix.set_group([len(list(g)) for __, g in groupby(qid_train)])
    test_dmatrix.set_group([len(list(g)) for __, g in groupby(qid_test)])
    params = {
        'objective': 'rank:pairwise',
        'eval_metric': ['error', 'map@1'],
        'tree_method': 'exact',
        'eta': 0.1,
        'gamma': 1.0,
        'min_child_weight': 0.1,
        'max_depth': 6
    }
    xgb_model = xgb.train(params,
                          train_dmatrix,
                          num_boost_round=100,
                          evals=[(test_dmatrix, 'validation')])
    xgb_train_str = items_to_str(_.omit(params, 'objective',
                                        'eval_metric').items(),
                                 sort_by=itemgetter(0))
    xgb_model.save_model(xgb_train_str + '_model.xgb')
Example #4
0
 def populate(self, **kwargs):
     """Extended ndb.Model populate method, so it can ignore properties, which are not
     defined in model class without throwing error
     """
     kwargs = _.omit(kwargs, Base.PUBLIC_PROPERTIES + ['key', 'id'])  # We don't want to populate those properties
     kwargs = _.pick(kwargs, _.keys(self._properties))  # We want to populate only real model properties
     super(Base, self).populate(**kwargs)
Example #5
0
    def test_project_update(self):
        # create instance
        new_project = ProjectFactory.create(
            visibility=VisibilityCharChoices.PUBLIC)

        # authenticate
        self.authenticate()

        new_project = pydash.omit(
            new_project,
            [
                "_state", "modified_at", "user", "event", "dtype",
                "regional_project"
            ],
        )
        # update project name
        new_project_name = "Mock Project for Update API Test"
        new_country = country.CountryFactory()
        new_district = district.DistrictFactory(country=new_country)
        new_project["name"] = new_project_name
        new_project["reporting_ns"] = new_country.id
        new_project["project_country"] = new_country.id
        new_project["event"] = new_project["event_id"]
        new_project["project_districts"] = [new_district.id]

        # submit update request
        response = self.client.put(f"/api/v2/project/{new_project['id']}/",
                                   new_project,
                                   format='json')

        # check response
        self.assert_200(response)
        self.assertMatchSnapshot(json.loads(response.content))
        self.assertTrue(Project.objects.get(name=new_project_name))
Example #6
0
def get_optim(cls, optim_param):
    '''Helper to parse optim param and construct optim for net'''
    optim_param = optim_param or {}
    OptimClass = getattr(torch.optim, _.get(optim_param, 'name', 'Adam'))
    optim_param = _.omit(optim_param, 'name')
    optim = OptimClass(cls.parameters(), **optim_param)
    return optim
Example #7
0
def cli(ctx, config_path, broadcast: bool):
    config = parse_config(ctx, config_path, Config)
    config.contract_bin = StrictStr(
        file_validator(config.contract_bin, config_path))
    constructor_data = ""
    if config.constructor_types and config.constructor_values:
        constructor_types = yaml.full_load(config.constructor_types)
        constructor_values = yaml.full_load(config.constructor_values)
        constructor_data = eth_abi.encode_data(constructor_types,
                                               constructor_values)[2:]
    if ctx.obj.get("config"):
        # noinspection PyUnresolvedReferences
        print_json(config.dict() | {"constructor_data": constructor_data})
        exit(0)

    tx_params: Any = pydash.omit(config.dict(), "node", "contract_bin",
                                 "constructor_types", "constructor_values")

    tx_params["data"] = config.contract_bin + constructor_data

    if tx_params["nonce"] is None:
        tx_params["nonce"] = get_nonce_or_exit(
            config.node, eth_account.private_to_address(config.private_key))

    signed_tx = eth_tx.sign_tx(**tx_params)

    if broadcast:
        res = eth_rpc.eth_send_raw_transaction(config.node, signed_tx.raw_tx)
        print_json({"result": res.ok_or_error})
    else:
        decoded = eth_utils.to_human_readable_tx(
            eth_tx.decode_raw_tx(signed_tx.raw_tx))
        print_json(signed_tx.dict() | {"decoded": decoded})
Example #8
0
async def proxy(request: web.Request, handler: web.RequestHandler):
    try:
        req_start_time = time()
        if pydash.starts_with(request.path_qs, '/raven'):
            return await handler(request)

        service = Regex.best_match(await Regex.get_matched_paths(request.path, DB.get(request, service_controller.table)))
        await handle_service(service, request.remote)

        rate_limiter_rules = await RateLimiter.get_rule_by_service_id(str(service['_id']), DB.get_redis(request))
        rate_limiter_rule = rate_limiter_rules[0] if rate_limiter_rules else None
        await handle_rate_limiter(request, str(service['_id']), rate_limiter_rule)

        breakers = await CircuitBreaker.get_by_service_id(str(service['_id']), DB.get(request, circuit_breaker_controller.table))
        breaker = breakers[0] if breakers else None

        request_validators = await RequestValidator.get_by_service_id(str(service['_id']), DB.get(request, request_validator_controller.table))
        request_validator = request_validators[0] if request_validators else None

        endpoint_cachers = not pydash.is_empty(service) and await EndpointCacher.get_by_service_id(str(service['_id']), DB.get_redis(request)) or None
        endpoint_cacher = endpoint_cachers[0] if endpoint_cachers else None

        await handle_request_validator(request_validator, json.loads(await request.text()), request.method)
        req, req_cache_hit = await handle_request(request, service, endpoint_cacher)

        checks = []

        if not pydash.is_empty(
                breaker) and breaker['status'] == CircuitBreakerStatus.ON.name:
            if req['status'] in breaker['status_codes']:
                checks.append(handle_circuit_breaker(
                    breaker, service, request, req))
            else:
                await CircuitBreaker.incr_count(str(breaker['_id']), DB.get_redis(request))

        queue_async_func.s({
            'func': 'Service.advance_target',
            'args': [str(service['_id']), f'mongo:{service_controller.table}'],
            'kwargs': {}
        }).apply_async()
        req_finish_time = time()
        req_elapsed_time = req_finish_time - req_start_time
        checks.append(handle_insights(request, req, str(
            service['_id']), req_elapsed_time, req_cache_hit))
        await Async.all(checks)

        return web.Response(
            body=Bytes.decode_bytes(
                req['body_bytes']),
            status=req['status'],
            content_type=req['content_type'],
            headers=CIMultiDict(
                pydash.omit(
                    req['headers'],
                    'Content-Type',
                    'Transfer-Encoding',
                    'Content-Encoding')))
    except Exception as err:
        return Error.handle(err)
Example #9
0
def get_loss_fn(cls, loss_param):
    '''Helper to parse loss param and construct loss_fn for net'''
    loss_param = loss_param or {}
    loss_fn = getattr(F, _.get(loss_param, 'name', 'mse_loss'))
    loss_param = _.omit(loss_param, 'name')
    if not _.is_empty(loss_param):
        loss_fn = partial(loss_fn, **loss_param)
    return loss_fn
def get_object_from_id(oid, pid):      
  hit_oid = py_.find(objects['thing-descriptions'], {'oid': oid})
  if hit_oid:
    hit_values = py_.find(hit_oid['values'], {'pid': pid})
    return jsonify(py_.omit(hit_values,'pid'))
  else:
    content = {'Device or property not found'}
    abort(404)
Example #11
0
def get_lr_scheduler(cls, lr_scheduler_spec):
    '''Helper to parse lr_scheduler param and construct Pytorch optim.lr_scheduler'''
    if ps.is_empty(lr_scheduler_spec):
        lr_scheduler = NoOpLRScheduler()
    else:
        LRSchedulerClass = getattr(torch.optim.lr_scheduler, lr_scheduler_spec['name'])
        lr_scheduler_spec = ps.omit(lr_scheduler_spec, 'name')
        lr_scheduler = LRSchedulerClass(cls.optim, **lr_scheduler_spec)
    return lr_scheduler
def test_update(bibliography_repository):
    bibliography_entry = BibliographyEntryFactory.build()
    updated_entry = pydash.omit({**bibliography_entry, "title": "New Title"}, "PMID")
    bibliography_repository.create(bibliography_entry)
    bibliography_repository.update(updated_entry)

    assert (
        bibliography_repository.query_by_id(bibliography_entry["id"]) == updated_entry
    )
Example #13
0
File: db.py Project: enixdark/raven
    def format_document(document: object) -> object:
        """
        formats mongo document

        @param document: (object) document to format
        """
        formatted = pydash.omit(document, '_id')
        formatted['_id'] = document['_id']['$oid']
        return formatted
Example #14
0
def get_optim(net, optim_spec):
    '''Helper to parse optim param and construct optim for net'''
    OptimClass = getattr(torch.optim, optim_spec['name'])
    optim_spec = ps.omit(optim_spec, 'name')
    if torch.is_tensor(net):  # for non-net tensor variable
        optim = OptimClass([net], **optim_spec)
    else:
        optim = OptimClass(net.parameters(), **optim_spec)
    return optim
Example #15
0
 def __init__(self, n_estimators=24, max_depth=10, **kwargs):
     super().__init__()
     self.kwargs = {
         **self.xgb_defaults,
         'n_estimators': n_estimators,
         'max_depth':    max_depth,
         **kwargs,
     }
     if self.kwargs.get('booster') == 'gblinear':
         self.kwargs = pydash.omit(self.kwargs, *['max_depth'])
Example #16
0
  def ActiveDiscovery(self):
    self._logger.info("Active discovery - (" + str(len(objects['thing-descriptions'])) + ' nodes)')
         
    #Send request to agent     
    r = requests.post(globals.config['agent_endpoint'] + '/agent/objects', \
      data=json.dumps(py_.omit(objects, 'values')), \
      headers={"Content-Type": "application/json"})     

    # ToDo - Handle response (for potential errors)
    pass
Example #17
0
 async def generate_token(payload: dict, db):
     sanitized_payload = pydash.merge(
         pydash.omit(payload, 'password', 'token'),
         {'timestamp': repr(time.time())}
     )
     token = Token.generate(sanitized_payload)
     update_ctx = {
         'token': token
     }
     await Admin.update(payload['_id'], update_ctx, db)
Example #18
0
    async def set_cache(_hash: str, ctx: object, timeout: int, db: AioRedis):
        """
        sets cache

        @param _hash: (str) hash of request
        @param ctx: (object) body of response
        @param db: redis instance
        """
        omit_keys = list(filter(lambda key: ctx[key] is None, ctx.keys()))
        await db.set(_hash, json.dumps(pydash.omit(ctx, *omit_keys)))
        await db.expire(_hash, timeout)
def update_object_property(oid, pid):   
  body = request.json
  hit_oid = py_.find(objects['thing-descriptions'], {'oid': oid})
  if hit_oid:
    hit_values = py_.find(hit_oid['values'], {'pid': pid})
    hit_values['value'] = body['value']
    hit_values['timestamp'] = datetime.datetime.now().isoformat()     
    return jsonify(py_.omit(hit_values,'pid'))
  else:
    content = {'please move along': 'nothing to see here'}
    abort(404)
Example #20
0
 def __init__(self, *args, **kwargs):
     """
     this class will be used to pass allong values to the argparse add_argument function
     """
     #group is seperated out because it is not an original argument to the argparse add_argument function call
     #seperating it out allows us to select a group for our argument to be placed in if it is indicated
     group = get(kwargs, 'group')
     if group:
         self.group = group
     self.args = args
     self.kwargs = omit(kwargs, 'group')
def response_for_exception(request, exception):
    response = base_response_for_exception(request, exception)

    if isinstance(exception, ValidationError):
        dont_flash = [
            'password',
            'password_confirmation',
        ]

        old_input = pydash.merge(parser.parse(request.GET.urlencode()),
                                 parser.parse(request.POST.urlencode()))
        errors = exception.message_dict if hasattr(
            exception, 'error_dict') else {
                '__all__': exception.messages
            }

        response = HttpRedirector(request) \
            .back() \
            .with_input(pydash.omit(old_input, dont_flash)) \
            .with_errors(errors)

    if request.match('api/*') and not isinstance(response, JsonResponse):
        message = str(exception)
        data = {}

        if isinstance(exception, ValidationError):
            status_code = 422
            message = _('Invalid data!')
            data['errors'] = exception.message_dict if hasattr(
                exception, 'error_dict') else {
                    '__all__': exception.messages
                }
        elif isinstance(exception, Http404):
            status_code = 404
        elif isinstance(exception, BadRequest):
            status_code = 400
        elif isinstance(exception, AuthenticationException):
            status_code = 401
        elif isinstance(exception, PermissionDenied):
            status_code = 403
        else:
            status_code = 500

            if not settings.DEBUG:
                message = _('Something went wrong')

        if len(message.strip()) == 0:
            message = _(status_codes._codes[status_code][0])

        return JsonResponse(pydash.merge({'message': message}, data),
                            status=status_code)

    return response
Example #22
0
async def patch_handler(request: web.Request):
    try:
        ctx = json.loads(await request.text())
        circuit_breaker_id = request.rel_url.query['id']
        Validate.validate_schema(ctx, circuit_breaker_validator)
        Validate.validate_object_id(circuit_breaker_id)
        await CircuitBreaker.update(circuit_breaker_id, pydash.omit(ctx, 'id'), DB.get(request, table))
        return web.json_response({
            'message': 'Circuit breaker updated',
        })
    except Exception as err:
        return Error.handle(err)
Example #23
0
def main():
    p = get_cli_args(args)
    try:
        open('train.bin').close()
        open('eval.bin').close()
        lgb_train = lgb.Dataset('train.bin')
        lgb_eval = lgb.Dataset('eval.bin', reference=lgb_train)
    except:
        x_train, y_train, qid_train = load_svmlight_file(
            p.train.xgboost_train_path, query_id=True)  # pylint: disable=unbalanced-tuple-unpacking
        x_test, y_test, qid_test = load_svmlight_file(
            p.train.xgboost_test_path, query_id=True)  # pylint: disable=unbalanced-tuple-unpacking
        x_train = x_train.todense()
        x_test = x_test.todense()
        lgb_train = lgb.Dataset(
            np.array(x_train),
            np.array(y_train.squeeze()),
            group=[len(list(g)) for __, g in groupby(qid_train)])
        lgb_eval = lgb.Dataset(
            np.array(x_test),
            np.array(y_test.squeeze()),
            reference=lgb_train,
            group=[len(list(g)) for __, g in groupby(qid_test)])
        lgb_train.save_binary("train.bin")
        lgb_eval.save_binary("eval.bin")

    params = {
        'boosting_type': 'gbdt',
        'objective': 'lambdarank',
        'metric': {'ndcg'},
        'ndcg_eval_at': [1],
        'metric_freq': 1,
        'max_bin': 255,
        'num_trees': 100,
        'num_leaves': 100,
        'learning_rate': 0.1,
        'num_iterations': 100,
        'num_threads': 8,
        'feature_fraction': 1.0,
        'bagging_fraction': 0.9,
        'bagging_freq': 1,
        'verbose': 0,
    }
    gbm = lgb.train(params,
                    lgb_train,
                    num_boost_round=100,
                    valid_sets=lgb_eval)
    xgb_train_str = items_to_str(_.omit(params, 'objective',
                                        'eval_metric').items(),
                                 sort_by=itemgetter(0))
    preds = gbm.predict(lgb_eval)
    print((y_test != preds).nonzero())
    gbm.save_model('model' + xgb_train_str + '.light')
Example #24
0
async def patch_handler(request: web.Request):
    try:
        ctx = json.loads(await request.text())
        _id = request.rel_url.query.get('id')
        Validate.validate_schema(ctx, endpoint_cache_validator)
        Validate.validate_object_id(_id)
        await EndpointCacher.update(_id, pydash.omit(ctx, 'service_id', 'response_codes'), DB.get_redis(request))
        return web.json_response({
            'message': 'Endpoint cache updated',
            'status_code': 200
        })
    except Exception as err:
        return Error.handle(err)
Example #25
0
def get_lr_scheduler(optim, lr_scheduler_spec):
    '''Helper to parse lr_scheduler param and construct Pytorch optim.lr_scheduler'''
    if ps.is_empty(lr_scheduler_spec):
        lr_scheduler = NoOpLRScheduler(optim)
    elif lr_scheduler_spec['name'] == 'LinearToZero':
        LRSchedulerClass = getattr(torch.optim.lr_scheduler, 'LambdaLR')
        frame = float(lr_scheduler_spec['frame'])
        lr_scheduler = LRSchedulerClass(optim, lr_lambda=lambda x: 1 - x / frame)
    else:
        LRSchedulerClass = getattr(torch.optim.lr_scheduler, lr_scheduler_spec['name'])
        lr_scheduler_spec = ps.omit(lr_scheduler_spec, 'name')
        lr_scheduler = LRSchedulerClass(optim, **lr_scheduler_spec)
    return lr_scheduler
Example #26
0
async def patch_handler(request: web.Request):
    try:
        ctx = json.loads(await request.text())
        service_id = request.rel_url.query['id']
        Validate.validate_object_id(service_id)
        Validate.validate_schema(ctx, insights_validator)
        await Insights.update(service_id, pydash.omit(ctx, 'id'),
                              DB.get(request, table))
        return web.json_response({
            'message': 'insight updated',
        })
    except Exception as err:
        return Error.handle(err)
Example #27
0
 def connect(self):
     """Connect to database"""
     if self._conx:
         raise ex.ConnectionError('Database already connected')
     if 'file_options' in self.connect_args:
         file = 'file:{}?{}'.format(self.file,
                                    self.connect_args['file_options'])
     else:
         file = self.file
     # self._conx = sqlite3.connect(file, isolation_level=None, **pydash.omit(kwargs, 'file_options'))
     self._conx = sqlite3.connect(
         file, **pydash.omit(self.connect_args, 'file_options'))
     self._c = self._conx.cursor()
     return self
Example #28
0
    def put(self):
        data = request.get_json(force=True)

        objToPut = []

        for item in data['body']:
            filters = self.makeFilter(item)

            item = omit(item, ['_id', 'updated_at'])
            item = map_values_deep(item, updaterIds)

            objToPut.append({'filter': filters, 'data': item})

        return self.entity().batch_process(objToPut)
Example #29
0
async def login_handler(request: web.Request):
    try:
        ctx = json.loads(await request.text())
        verified = await Admin.verify_password(ctx['username'],
                                               ctx['password'],
                                               DB.get(request, table))
        if not verified:
            raise Exception({'message': 'Unathorized', 'status_code': 401})
        admin = await Admin.get_by_username(ctx['username'],
                                            DB.get(request, table))
        sanitized_admin = pydash.omit(admin, 'password')
        return web.json_response(
            {'data': DB.format_document(Bson.to_json(sanitized_admin))})
    except Exception as err:
        return Error.handle(err)
Example #30
0
def get_data(col_name, output='df', **kwargs):
    """
    从 mongodb 下载 股票 日数据

    mongolib需要知道column

    :param col_name:
    :return:
            list: OCLH
    """
    query = {}
    date_query = {}
    if 'fromdate' in kwargs.keys() and kwargs['fromdate']:
        date_query['$gte'] = kwargs['fromdate']

    if 'todate' in kwargs.keys() and kwargs['todate']:
        date_query['$lt'] = kwargs['todate']

    if date_query:
        query['date'] = date_query

    # print('query', query)

    col = db[col_name]

    cursor = col.find(query)

    if output == 'df':
        df = pd.DataFrame(list(cursor))
        del df['_id']
        del df['code']
        df['date'] = df['date'].astype('datetime64')
        df = df.set_index('date')
        cols = ['open', 'high', 'close', 'low', 'volume']
        df = df.ix[:, cols]
        return df
    elif output == 'obj':
        obj_list = list(cursor)

        return [pydash.omit(item, '_id', 'code') for item in obj_list]

    elif output == 'list':
        obj_list = list(cursor)
        return [[
            item['date'], item['open'], item['close'], item['low'],
            item['high'], item['volume']
        ] for item in obj_list]
Example #31
0
    def test_project_create(self):
        # authenticate
        new_user = UserFactory.create()
        self.authenticate(new_user)

        # create project
        new_project_name = "Mock Project for Create API Test"
        new_project = ProjectFactory.stub(
            name=new_project_name,
            visibility=VisibilityCharChoices.PUBLIC,
            user=new_user,
        )
        new_country = country.CountryFactory()
        new_district = district.DistrictFactory(country=new_country)
        new_project = pydash.omit(
            new_project,
            [
                "user",
                "reporting_ns",
                "project_country",
                "event",
                "dtype",
                "regional_project",
            ],
        )
        new_project["reporting_ns"] = new_country.id
        new_project["project_country"] = new_country.id
        new_project["project_districts"] = [new_district.id]

        # submit create request
        response = self.client.post("/api/v2/project/",
                                    new_project,
                                    format='json')

        # check response
        self.assert_201(response)
        self.assertMatchSnapshot(json.loads(response.content))
        self.assertTrue(Project.objects.get(name=new_project_name))
 def set(self, obj):
     source.set(self.collection, obj['id'], omit(obj, 'id'))
Example #33
0
def get_loss_fn(cls, loss_spec):
    '''Helper to parse loss param and construct loss_fn for net'''
    LossClass = getattr(nn, loss_spec['name'])
    loss_spec = ps.omit(loss_spec, 'name')
    loss_fn = LossClass(**loss_spec)
    return loss_fn
Example #34
0
def get_optim(cls, optim_spec):
    '''Helper to parse optim param and construct optim for net'''
    OptimClass = getattr(torch.optim, optim_spec['name'])
    optim_spec = ps.omit(optim_spec, 'name')
    optim = OptimClass(cls.parameters(), **optim_spec)
    return optim
Example #35
0
def test_omit(case, expected):
    assert _.omit(*case) == expected