Exemplo n.º 1
0
def discover_activities(extension_mod_name: str,
                        activity_type: str) -> DiscoveredActivities:
    """
    Discover exported activities from the given extension module name.
    """
    try:
        mod = importlib.import_module(extension_mod_name)
    except ImportError:
        raise DiscoveryFailed("could not import extension module '{m}'".format(
            m=extension_mod_name))

    activities = []
    try:
        exported = getattr(mod, "__all__")
    except AttributeError:
        logger.warn("'{m}' does not expose the __all__ attribute. "
                    "It is required to determine what functions are actually "
                    "exported as activities.".format(m=extension_mod_name))
        return activities

    funcs = inspect.getmembers(mod, inspect.isfunction)
    for (name, func) in funcs:
        if exported and name not in exported:
            # do not return "private" functions
            continue

        sig = inspect.signature(func)
        activity = {
            "type": activity_type,
            "name": name,
            "mod": mod.__name__,
            "doc": inspect.getdoc(func),
            "arguments": []
        }

        if sig.return_annotation is not inspect.Signature.empty:
            activity["return_type"] = portable_type_name(sig.return_annotation)

        for param in sig.parameters.values():
            if param.kind in (param.KEYWORD_ONLY, param.VAR_KEYWORD):
                continue

            arg = {
                "name": param.name,
            }

            if param.default is not inspect.Parameter.empty:
                arg["default"] = param.default
            if param.annotation is not inspect.Parameter.empty:
                arg["type"] = portable_type_name(param.annotation)
            activity["arguments"].append(arg)

        activities.append(activity)

    return activities
Exemplo n.º 2
0
    def index_remove_coin(self, coin):

        if self.coin_supported_check(coin.upper()):
            if DatabaseManager.delete_index_coin_model(coin.upper()):
                DatabaseManager.delete_realized_gain_model(coin.upper())
                logger.info("Coin " + coin.upper() + " removed from index")
            else:
                # Already Exist
                logger.warn("Coin not in index")
        else:
            logger.warn("Coin not supported")
Exemplo n.º 3
0
 def is_cookie_ok(self):
     url = 'http://photo.weibo.com/albums/get_all?uid=6069778559&page=1&count=20'
     print(url)
     rs = self.sess.get(url, headers=self.get_headers)
     if rs.history and rs.history[0].status_code == 302:
         # if not rs.status_code == 200:
         log.warn('session Expired, relogin.')
         self.login()
         os._exit(-1)
     else:
         log.debug('session is ok!!!')
    def parse(self, response):

        # Number of main page scrapped
        logger.warn('Parse page ({})')

        # Get the url of each reference on the current page
        links = get_info.get_article_urls(response)

        # Get pagination information
        next_page = get_info.get_next_page_of_articles(response)
        next_page_number = get_info.get_next_page_number(next_page)
Exemplo n.º 5
0
def run(args):
    cli = Cli("example")
    num1 = int(args["NUM1"])
    num2 = int(args["NUM2"])

    total = num1 + num2
    total_minus_one = total - 1

    logger.info(f"{num1} plus {num2} is {total}")
    logger.info(f"minus 1 that's {total_minus_one}")
    logger.warn("QUICK MATHS")
Exemplo n.º 6
0
    def is_configured(self, config):
        if not config['func_name']:
            logger.error('No function assigned for %s/parser_config.json' %
                         self.raw_dir)
            return False

        if not config['func_hash']:
            logger.warn('No hash for function %s. Assigning current hash' %
                        config['func_name'])
            return True
        return True
Exemplo n.º 7
0
 async def get_water_level(self):
     try:
         response = await self._bus.req('tank.water', {'command': 'get'})
         if response['status'] != 'ok':
             logger.warn("Cannot get 'tank.water' status: %s",
                         response['message'])
             return None
         return response['water']
     except futures.TimeoutError:
         logger.warn("Cannot get 'tank.water' status: request timeout")
         return None
Exemplo n.º 8
0
    def is_cookie_ok(self):
        url = 'http://photo.weibo.com/albums/get_all?uid={}&page=1&count=20'.format(
            self.my_info.get('uid'))
        # log.info('TEST of access to {}'.format(url))

        rs = self.sess.get(url, headers=self.get_headers)
        if rs.history and rs.history[0].status_code == 302:
            log.warn('✘ [session Expired], re-login.')
            base.force_quit()
        else:
            log.info('✔ [Web session] is ok!!!')
Exemplo n.º 9
0
async def start_kernel(msg, send, context):
    logger.info('start_kernel')
    logger.warn(msg)
    context.realtime_evaluation_mode = msg['realtimeEvaluation']
    set_state(context, RESTARTING)
    await stop_kernel(msg, send, context)
    context.a_queued = context.b_queued = context.realtime_evaluation_mode
    context.kernel_manager.start_kernel()
    context.jupyter_client = context.kernel_manager.client()
    context.iopub_listener = asyncio.create_task(iopub_listener(send, context))
    asyncio.create_task(wait_until_kernel_ready(send, context))
Exemplo n.º 10
0
 def clip_gradient(self):
     if self.gradient_clip_value is not None:
         max_norm = max(self.gradient_norm_queue)
         total_norm = torch.nn.utils.clip_grad_norm_(
             self.model.parameters(), max_norm * self.gradient_clip_value)
         self.gradient_norm_queue.append(
             min(total_norm, max_norm * 2.0, 1.0))
         if total_norm > max_norm * self.gradient_clip_value:
             logger.warn(
                 F'Clipping gradients with total norm {round(total_norm, 5)} '
                 F'and max norm {round(max_norm, 5)}')
Exemplo n.º 11
0
 def push_last(self, item):
     with self.cond:
         ret = None
         if len(self) + 1 >= self.max_size:
             ret = self.arr.pop()
             logger.warn('current list size out of max size, remove %s',
                         ret)
         self.arr.append(item)
         if len(self) == 1:
             self.cond.notify()
         return ret
Exemplo n.º 12
0
def load_sources(dataset_name: str) -> Optional[References]:
    """Load a file with sources for a standard GEM dataset (attempt download), return None if not present.
    Note that it can be the same files as for references -- the difference is in the source/target fields inside the JSON structure."""
    if dataset_name in _DATASET_SOURCES_URLS:
        try:
            dataset_file = ensure_download('references', dataset_name + '.json', _DATASET_SOURCES_URLS[dataset_name])
            return Sources(dataset_file)
        except Exception as e:
            logger.warn(f'Could not download references for {dataset_name}: {str(e)}')
            return None
    return None
Exemplo n.º 13
0
    def __init__(self, directory_path):
        self.path = directory_path
        self.output_file_dict = AnalysisDirectory._validator.collect_output_file_dict(self.path)
        try:
            AnalysisDirectory._validator.complete(self.path, self.output_file_dict)
            self.is_complete = True
        except RuntimeError as e:
            logger.warn(str(e))
            self.is_complete = False

        if self.is_complete:
            self._completion_time = AnalysisDirectory._validator.completion_time(self.path, self.is_complete, self.output_file_dict)
Exemplo n.º 14
0
 async def get_temperature(self):
     try:
         response = await self._bus.req('output.temperature',
                                        {'command': 'get'})
         if response['status'] != 'ok':
             logger.warn("Cannot get output temperature: %s",
                         response['message'])
             return None
         return response['temperature']
     except futures.TimeoutError:
         logger.warn("Cannot get output temperature: request timeout")
         return None
Exemplo n.º 15
0
 def index_rebalance_tick_update(self, tickcount):
     if isinstance(int(tickcount), (float, int, complex, long)):
         DatabaseManager.update_index_info_model(
             indexInfo.Active, indexInfo.TotalBTCVal, indexInfo.TotalUSDVal,
             indexInfo.TotalRealizedGain, indexInfo.TotalUnrealizedGain,
             round(float(percentage),
                   2), indexInfo.OrderTimeout, indexInfo.OrderRetryAmount,
             int(tickcount))
         logger.info("Index rebalance time set to " + str(tickcount) +
                     " minutes.")
     else:
         logger.warn("Tick count isn't a number")
Exemplo n.º 16
0
def is_r_installed():
    installed = True
    FNULL = open(os.devnull, 'w')
    try:
        sub.call(['Rscript'], stdout=FNULL, stderr=sub.STDOUT)
    except sub.CalledProcessError:
        installed = False
    except OSError:
        installed = False
        logging.warn(
            'R or Rscript not installed. Will not be able to use linear model')
    return installed
Exemplo n.º 17
0
def stress_vmss_instance_cpu(filter: str = None,
                             duration: int = 120,
                             timeout: int = 60,
                             instance_criteria: Iterable[Mapping[str,
                                                                 any]] = None,
                             configuration: Configuration = None,
                             secrets: Secrets = None):
    logger.warn("Deprecated usage of activity 'stress_vmss_instance_cpu'."
                " Please use activity 'stress_cpu' in favor since this"
                " activity will be removed in a future release.")
    return stress_cpu(filter, duration, timeout, instance_criteria,
                      configuration, secrets)
Exemplo n.º 18
0
def node_failure_in_rabbit_cluster(configuration: Configuration = None,
                                   secrets: Secrets = None):
    rmq_client = rmq_client_connect(configuration, secrets)
    client = ssh.connect(secrets)
    stdin, stdout, stderr = client.exec_command(
        'sudo /usr/sbin/rabbitmqctl stop_app')
    o, r = stdout.read().decode('utf-8'), stderr.read().decode('utf-8')
    client.close()
    logger.info(o)

    if r != "":
        logger.warn(r)
Exemplo n.º 19
0
    def _assign_param_grid(self) -> None:
        assert self.context.node_index == 0

        param_grids = []
        for i in range(len(self.context.nodes)):
            param_grids.append([])

        node_assignments = cycle(range(len(self.context.nodes)))

        Cs = [0.01, 0.1, 1, 10]
        gammas = [0.001, 0.01, 0.1, 1, 10]

        for C in Cs:
            param_grids[next(node_assignments)].append({
                'C': [C],
                'kernel': ['linear']
            })

        if not self._linear_only:
            for C in Cs:
                for gamma in gammas:
                    param_grids[next(node_assignments)].append({
                        'C': [C],
                        'gamma': [gamma],
                        'kernel': ['rbf']
                    })

        self._set_param_grid_inner(param_grids[0])

        for i in range(1, len(self.context.nodes)):
            while True:
                try:
                    message = Any()
                    message.Pack(
                        SVMTrainerMessage(setParamGrid=SetParamGrid(
                            grid=json.dumps(param_grids[i]))))
                    self.context.nodes[i].internal.MessageInternal(
                        InternalMessage(searchId=self._search_id,
                                        trainerIndex=self._trainer_index,
                                        message=message))
                    break
                except Exception as e:
                    logger.warn(
                        'Could not set param grid - node {} might still not be up'
                        .format(self.context.nodes[i].url))
                    logger.exception(e)
                    time.sleep(5)

            logger.info('Set param grid on node {}'.format(
                self.context.nodes[i].url))

        self._param_grid_event.set()
Exemplo n.º 20
0
def accimage_loader(path):
    import accimage
    try:
        start = time.time()
        img = accimage.Image(path)
        end = time.time()
        logger.debug('accimage decode {:.3f} ms'.format(1000 * (end - start)))

        return img
    except IOError:
        logger.warn('accimage failed to decode {}'.format(path))
        # Potentially a decoding problem, fall back to PIL.Image
        return pil_loader(path)
Exemplo n.º 21
0
def facebook_postback_handler(event, page=None):
    """Handle facebook postbacks."""
    # For now only deal with GET_STARTED postback
    if event.postback["payload"] != "GET_STARTED":
        # pylint: disable=deprecated-method
        logger.warn(f"Postback {event.postback['payload']} not supported")
        return
    user = User.get_or_create_facebook_user(page, event.sender_id)
    Log.log_message(user, f"POSTBACK: {event.postback['payload']}")
    logger.info(f"Postback {event.postback['payload']} received "
                f"from user {user.id}")
    if not facebook_handle_maintenance(page, event):
        handle_event(page, user, event)
Exemplo n.º 22
0
    def should_parse(self, config):
        if self.force:
            logger.warn('Force flag detected. Ignoring has and parsing %s' %
                        self.raw_dir)
            return True

        if config['func_hash'] != self.get_function_hash(config['func_name']):
            logger.warn('Function hash in %s is different, should parse' %
                        self.raw_dir)
            self.set_config(func_name=config['func_name'])
            return True
        logger.info('Parser did not change since last run')
        return False
Exemplo n.º 23
0
def load_references(dataset_name: str) -> Optional[References]:
    """Load a file with references for a standard GEM dataset (attempt download), return None if not present."""
    if dataset_name in _DATASET_REFERENCES_URLS:
        try:
            dataset_file = ensure_download(
                'references', dataset_name + '.json',
                _DATASET_REFERENCES_URLS[dataset_name])
            return References(dataset_file)
        except Exception as e:
            logger.warn(
                f'Could not download references for {dataset_name}: {str(e)}')
            return None
    return None
Exemplo n.º 24
0
def handle_set(prefix, topic, payload):
    """
    <peripheral type="button" alternative_press="">
        <output type="integer" value="1" destination="/set/BAR/markers/capture1"/>
    </peripheral>
    <peripheral type="button" alternative_press="">
        <output type="integer" value="+=1" destination="/set/BAR/markers/capture1"/>
    </peripheral>
    """
    clobber = True
    payload = payload.decode()
    address = topic.replace(prefix, "")
    if address.startswith("/"):
        address = address[1:]
    logger.info("set {}{}{}".format(prefix, topic, payload))

    address, sub_address = address.split("/")
    value = None
    symbol = None
    if "+=" in payload:
        symbol = "+="
    elif "-=" in payload:
        symbol = "-="
    elif "*=" in payload:
        symbol = "*="

    if symbol is None:
        r.hmset("state:{}".format(address), dict({sub_address: payload}))
    else:
        try:
            try:
                stored = int(r.hget("state:{}".format(address), sub_address))
            except Exception as ex:
                logger.warn(ex)
                if clobber:
                    stored = 0
                else:
                    return
            value = int(payload.partition(symbol)[-1])
            if symbol == "+=":
                stored += value
            elif symbol == "-=":
                stored -= value
            elif symbol == "*=":
                stored *= value
            #elif symbol == "++":
            #string concat
            #    stored += value
            r.hmset("state:{}".format(address), dict({sub_address: stored}))
        except Exception as ex:
            logger.warn(ex)
Exemplo n.º 25
0
def probe_app_can_connect_and_send_message_to_rabbit(
        configuration: Configuration = None, secrets: Secrets = None):

    if not secrets:
        raise ActivityFailed(
            "Please set the secrets entry to specify the SSH client settings")

    # Create ssh client
    client = ssh.connect(secrets)

    # Get RabbitMQ connection params from secrets
    rmq_api_rest_endpoint = os.getenv(secrets.get("rabbitmq_restendpoint"))
    rmq_username = os.getenv(secrets.get("rabbitmq_username"))
    rmq_password = os.getenv(secrets.get("rabbitmq_password"))
    rmq_vhost_url = os.getenv(
        secrets.get("rabbitmq_host")) + "/api/healthchecks/node"
    rabbit_creds = rmq_username + ":" + rmq_password

    wait_time = int(os.getenv('WAIT_TIME'))
    if wait_time > 0:
        logger.info("Waiting for " + str(wait_time) +
                    " secs before connecting to rabbit")
        time.sleep(wait_time)

    rabbitmq_connect_curl = 'curl -s -u {rabbitmq_creds} {rabbitmq_host}'.format(
        rabbitmq_creds=rabbit_creds, rabbitmq_host=rmq_vhost_url)

    stdin, stdout, stderr = client.exec_command(rabbitmq_connect_curl)
    o, r = stdout.read().decode('utf-8'), stderr.read().decode('utf-8')
    client.close()

    logger.info(o)

    error = False

    if r != "":
        logger.warn(r)
        error = True

    rmq_client = rmq_client_connect(configuration, secrets)

    rmq_client.create_vhost("ce_vhost")
    rmq_client.create_exchange("ce_vhost", "ce_exc", "direct")
    rmq_client.create_queue("ce_vhost", "ce_que")
    rmq_client.create_binding("ce_vhost", "ce_exc", "ce_que", "ce.rtkey")
    if not rmq_client.publish('ce_vhost', 'ce_exc', 'ce.rtkey',
                              'chaos experiment message'):
        error = True
    rmq_client.delete_vhost("ce_vhost")

    return not error
Exemplo n.º 26
0
    def __call__(self, item):
        rv, jpg_arr = cv2.imencode('.jpg', item.array)
        assert rv, "Fail to re-encode into jpg"

        file_like = io.BytesIO(jpg_arr.tostring())
        try:
            r = self.request_session.post(self.detect_url,
                                          files={'image': file_like})
            assert r.ok
            result = r.json()
            if result['success']:
                detections = result

                count_hit = 0

                vis_arr = item.array

                for box, score, class_name in zip(
                        detections['detection_boxes'],
                        detections['detection_scores'],
                        detections['detection_names']):
                    if score < self.confidence:
                        break
                    for t in self.targets:
                        if t in class_name:
                            # hit
                            count_hit += 1
                            if s3dexp.config.VISUALIZE_RESULT:
                                h, w = vis_arr.shape[:2]
                                top, left, bottom, right = box  # TF return between 0~1
                                top, bottom = top * h, bottom * h
                                left, right = left * w, right * w
                                top, left, bottom, right = map(
                                    int, (top, left, bottom, right))
                                vis_arr = cv2.rectangle(
                                    vis_arr, (left, top), (right, bottom),
                                    (0, 255, 0), 3)

                if count_hit > 0:
                    if s3dexp.config.VISUALIZE_RESULT:
                        vis_path = 'vis-detect-{}-{}.jpg'.format(
                            self.targets[0], os.path.basename(item.src))
                        logger.warn("Saving visualized detection to {}".format(
                            vis_path))
                        cv2.imwrite(vis_path, vis_arr)
                    return True

        except:
            raise

        return False
Exemplo n.º 27
0
    def load_module(self, module_name: str, module_path: str) -> bool:
        if os.path.exists(module_path):
            spec = importlib.util.spec_from_file_location("Module", module_path)
            if spec is None:
                logger.warn(f"The module {module_name} needs to have a class called Module")
                return False

            module = importlib.util.module_from_spec(spec)
            spec.loader.exec_module(module)
            self.loaded_modules[module_name] = module.Module()
            return True
        else:
            logger.warn(f'Module: {module_name} not found')
            return False
Exemplo n.º 28
0
def run_steady_state_hypothesis(experiment: Experiment,
                                configuration: Configuration, secrets: Secrets,
                                dry: bool = False):
    """
    Run all probes in the hypothesis and fail the experiment as soon as any of
    the probe fails or is outside the tolerance zone.
    """
    state = {
        "steady_state_met": None,
        "probes": []
    }
    hypo = experiment.get("steady-state-hypothesis")
    if not hypo:
        logger.info(
            "No steady state hypothesis defined. That's ok, just exploring.")
        return

    logger.info("Steady state hypothesis: {h}".format(h=hypo.get("title")))

    probes = hypo.get("probes", [])
    for activity in probes:
        run = execute_activity(
            activity, configuration=configuration, secrets=secrets, dry=dry)

        state["probes"].append(run)

        if run["status"] == "failed":
            run["tolerance_met"] = False
            state["steady_state_met"] = False
            logger.warn("Probe terminated unexpectedly, "
                        "so its tolerance could not be validated")
            return state

        run["tolerance_met"] = True

        if dry:
            # do not check for tolerance when dry mode is on
            continue

        tolerance = activity.get("tolerance")
        logger.debug("allowed tolerance is {t}".format(t=str(tolerance)))
        if not within_tolerance(tolerance, run["output"]):
            run["tolerance_met"] = False
            state["steady_state_met"] = False
            return state

    state["steady_state_met"] = True
    logger.info("Steady state hypothesis is met!")

    return state
Exemplo n.º 29
0
def disk_read(base_dir, disk, ext='jpg', sort_inode=False, store_result=True):
    logger.warn("Make sure you cleaned the OS page buffer!")
    base_dir = os.path.realpath(base_dir)
    paths = list(recursive_glob(base_dir, '*.{}'.format(ext)))

    if sort_inode:
        paths = sorted(paths, key=lambda p: os.stat(p).st_ino)
        logger.info("Sort by inode num.")
    else:
        # deterministic pseudo-random
        random.seed(42)
        random.shuffle(paths)

    results = []

    for p in paths:
        tic = time.time()

        fd = os.open(p, os.O_RDONLY)
        size = os.path.getsize(p)
        buf = os.read(fd, size)
        os.close(fd)
        elapsed = time.time() - tic

        logger.debug("{}: {} bytes {} ms".format(p, len(buf), elapsed * 1000))

        vals_dict = {'size': size}
        if sort_inode:
            vals_dict['seq_read_ms'] = elapsed * 1000
        else:
            vals_dict['rand_read_ms'] = elapsed * 1000

        results.append({
            'keys_dict': {
                'path': p,
                'disk': disk
            },
            'vals_dict': vals_dict
        })

    if store_result:
        logger.info("Going to write {} results to DB".format(len(results)))
        sess = dbutils.get_session()
        for r in results:
            dbutils.insert_or_update_one(sess,
                                         models.DiskReadProfile,
                                         keys_dict=r['keys_dict'],
                                         vals_dict=r['vals_dict'])
        sess.commit()
        sess.close()
Exemplo n.º 30
0
 def fix_missed(blizzard_characters: List):
     db = blizzard_db()
     q = DatabaseUtils.core_query(db.query(CharacterModel)).all()
     for character in q:
         found = False
         for b_ch in blizzard_characters:
             if b_ch['character']['name'] == character.name:
                 found = True
         if not found:
             logger.warn(character.name + " escape guild")
             db.query(CharacterModel).filter(
                 CharacterModel.wow_id == character.wow_id).update(
                     {'state': 0})
             db.commit()