Пример #1
0
    def test_custom_colwidth(self):
        f = io.BytesIO()
        tbl = TableLogger(file=f, border=False, colwidth={0: 30})
        tbl('col1')
        self.assertEqual(len(f.getvalue()) - 1, 30)

        f = io.BytesIO()
        tbl = TableLogger(file=f, border=False, colwidth={0: 30, 1: 20})
        tbl('col1', 345)
        self.assertEqual(len(f.getvalue()) - 2, 30 + 20)

        f = io.BytesIO()
        tbl = TableLogger(file=f,
                          border=False,
                          columns=['col1'],
                          colwidth={'col1': 30})
        tbl('value')
        self.assertEqual(len(f.getvalue().decode('utf-8').split('\n')[1]), 30)

        f = io.BytesIO()
        tbl = TableLogger(file=f,
                          border=False,
                          columns=['c1', 'c2'],
                          colwidth={
                              'c1': 30,
                              'c2': 20
                          })
        tbl('col1', 345)
        self.assertEqual(
            len(f.getvalue().decode('utf-8').split('\n')[1]) - 1, 30 + 20)
Пример #2
0
    def test_float_formatting(self):
        val = 0.777777
        f = io.BytesIO()
        tbl = TableLogger(file=f,
                          border=False,
                          float_format='{:.3}'.format,
                          default_colwidth=7)
        tbl(val)
        self.assertEqual('  0.778\n', f.getvalue().decode('utf-8'))

        # test np.float32
        f = io.BytesIO()
        tbl = TableLogger(file=f,
                          border=False,
                          float_format='{:.3}'.format,
                          default_colwidth=7)
        tbl(np.float32(val))
        self.assertEqual('  0.778\n', f.getvalue().decode('utf-8'))

        # test np.float64
        f = io.BytesIO()
        tbl = TableLogger(file=f,
                          border=False,
                          float_format='{:.3}'.format,
                          default_colwidth=7)
        tbl(np.float64(val))
        self.assertEqual('  0.778\n', f.getvalue().decode('utf-8'))

        f = io.BytesIO()
        tbl = TableLogger(file=f,
                          border=False,
                          float_format='{:.5}'.format,
                          default_colwidth=6)
        tbl(0.333333333333)
        self.assertEqual('0.3...\n', f.getvalue().decode('utf-8'))
Пример #3
0
    def test_default_colwidth(self):
        f = io.BytesIO()
        tbl = TableLogger(file=f, border=False, default_colwidth=5)
        tbl('col1')
        self.assertEqual('col1 \n', f.getvalue().decode('utf-8'))

        f = io.BytesIO()
        tbl = TableLogger(file=f, border=False, default_colwidth=5)
        tbl('col1', 'col2')
        self.assertEqual('col1  col2 \n', f.getvalue().decode('utf-8'))
Пример #4
0
    def test_int_formatting(self):
        val = 123
        f = io.BytesIO()
        tbl = TableLogger(file=f, border=False, default_colwidth=7)
        tbl(val)
        self.assertEqual('    123\n', f.getvalue().decode('utf-8'))

        f = io.BytesIO()
        tbl = TableLogger(file=f, border=False, default_colwidth=7)
        tbl(np.int32(val))
        self.assertEqual('    123\n', f.getvalue().decode('utf-8'))
Пример #5
0
    def test_columns(self):
        t = TableLogger(columns=['a', 'b'])
        self.assertEqual(t.columns, ['a', 'b'])

        t = TableLogger(columns='a,b')
        self.assertEqual(t.columns, ['a', 'b'])

        t = TableLogger()
        self.assertEqual(t.columns, [])

        self.assertRaises(ValueError, lambda: TableLogger(columns=''))
Пример #6
0
 def test_file(self):
     temp_dir = tempfile.mkdtemp(prefix='table-logger-temp-dir')
     out_file = os.path.join(temp_dir, 'out.log')
     try:
         t = TableLogger(file=out_file, border=False, default_colwidth=2)
         t(1, 'ü', 3)
         t.close()
         self.assertTrue(t.file.closed)
         with open(out_file, 'rb') as f:
             self.assertEqual(f.read().decode('utf-8'), ' 1 ü   3\n')
     finally:
         shutil.rmtree(temp_dir)
Пример #7
0
 def __init__(self):
     """Just to intialize the class,can be use for optimizations """
     self.logger = TableLogger(columns='data',
                               rownum=False,
                               time_delta=True,
                               timestamp=False,
                               colwidth={
                                   'data': 150,
                                   'rownum': 3,
                                   'time_delta': 6
                               })
     self.ast = wf.wrapperFactory("asterixWrapper",
                                  env='remote',
                                  tablelogger=self.logger)
     self.pgget = wf.wrapperFactory('postgresWrapper',
                                    env='remote',
                                    tablelogger=self.logger)
     self.unfold = Mediator()
     self.asinfile = "asincache.pkl"
     self.asin_df = None
     self.sourcedict = None
     with open('sourcedictionary.json', 'r') as f:
         sourcedict = json.load(f)
     self.sourcedict = sourcedict
     self.datalog_parsed = None
     self.logger("QPE initialized")
Пример #8
0
Файл: cli.py Проект: swpecht/zpy
def list_sims():
    """list datasets

    List sims from backend.
    """
    from cli.sims import fetch_sims

    try:
        with Loader("Fetching sims..."):
            sims = fetch_sims()
        click.echo("Fetched sims succesfully.")
    except requests.exceptions.HTTPError as e:
        click.secho(f"Failed to fetch sims {e}.", fg="red", err=True)
        return

    tbl = TableLogger(columns="name,state,zpy_version,blender_version,created",
                      default_colwidth=30)
    for s in sims:
        tbl(
            s["name"],
            s["state"],
            s["zpy_version"],
            s["blender_version"],
            s["created_at"],
        )
Пример #9
0
 def test_print_to_file(self):
     f = io.BytesIO()
     tbl = TableLogger(file=f, colwidth={0: 2, 1: 5})
     tbl('12', '12345')
     tbl('ab', 'cdefg')
     self.assertEqual('+----+-------+\n| 12 | 12345 |\n| ab | cdefg |\n',
                      f.getvalue().decode('utf-8'))
Пример #10
0
 def test_timestamp_column(self):
     f = io.BytesIO()
     tbl = TableLogger(file=f, timestamp=True, border=False)
     tbl()
     val = datetime.datetime.strptime(
         ' '.join(f.getvalue().decode('utf-8').split()[-2:]),
         '%Y-%m-%d %H:%M:%S.%f')
     self.assertTrue((datetime.datetime.now() - val).total_seconds() < 1)
Пример #11
0
def print_file_info():
    """Prints file details in the current directory"""
    tpl = TableLogger(columns='file,created,modified,size')
    for f in os.listdir('.'):
        size = os.stat(f).st_size
        date_created = datetime.fromtimestamp(os.path.getctime(f))
        date_modified = datetime.fromtimestamp(os.path.getmtime(f))
        tpl(f, date_created, date_modified, size)
Пример #12
0
def print_time_delta():
    tpl = TableLogger(columns='data',
                      rownum=True,
                      time_delta=True,
                      timestamp=True)
    for e in 'abcde':
        time.sleep(random.randint(0, 3))
        tpl(e)
Пример #13
0
    def test_rownum_column(self):
        f = io.BytesIO()
        tbl = TableLogger(file=f, rownum=True, border=False)

        for i in range(1, 10):
            tbl()
            val = int(f.getvalue().split()[-1])
            self.assertEqual(i, val)
Пример #14
0
 def _maybe_initialize_logger(self, metrics):
     if not hasattr(self, '_logger'):
         columns = ["Epoch"]
         self._names = [name for name in metrics]
         columns += ["T {}".format(name) for name in metrics]
         columns += ["V {}".format(name) for name in metrics]
         columns += ["Best {}".format(self._key_metric)]
         columns += ["Time"]
         self._logger = TableLogger(columns=columns,
                                    float_format='{:.3f}'.format,
                                    default_colwidth=10)
Пример #15
0
def watch(name=None,
          namespace=None,
          timeout_seconds=600,
          version=constants.KFSERVING_V1BETA1_VERSION):
    """Watch the created or patched InferenceService in the specified namespace"""

    if namespace is None:
        namespace = utils.get_default_target_namespace()

    if version != 'v1beta1':
        raise RuntimeError(
            "The watch API only support v1beta1, the v1alpha2 will be deprecated."
        )

    tbl = TableLogger(columns='NAME,READY,PREDICTOR_CANARY_TRAFFIC,URL',
                      colwidth={
                          'NAME': 20,
                          'READY': 10,
                          'PREDICTOR_CANARY_TRAFFIC': 25,
                          'URL': 65
                      },
                      border=False)

    stream = k8s_watch.Watch().stream(
        client.CustomObjectsApi().list_namespaced_custom_object,
        constants.KFSERVING_GROUP,
        version,
        namespace,
        constants.KFSERVING_PLURAL,
        timeout_seconds=timeout_seconds)

    for event in stream:
        isvc = event['object']
        isvc_name = isvc['metadata']['name']
        if name and name != isvc_name:
            continue
        else:
            if isvc.get('status', ''):
                url = isvc['status'].get('url', '')
                traffic_percent = isvc['status'].get('components', {}).get(
                    'predictor', {}).get('trafficPercent', '')
                status = 'Unknown'
                for condition in isvc['status'].get('conditions', {}):
                    if condition.get('type', '') == 'Ready':
                        status = condition.get('status', 'Unknown')
                tbl(isvc_name, status, traffic_percent, url)
            else:
                tbl(isvc_name, 'Unknown', '', '')
                # Sleep 2 to avoid status section is not generated within a very short time.
                time.sleep(2)
                continue

            if name == isvc_name and status == 'True':
                break
Пример #16
0
def isvc_watch(name=None, namespace=None, timeout_seconds=600):
    """Watch the created or patched InferenceService in the specified namespace"""

    if namespace is None:
        namespace = utils.get_default_target_namespace()

    tbl = TableLogger(columns='NAME,READY,PREV,LATEST,URL',
                      colwidth={
                          'NAME': 20,
                          'READY': 10,
                          'PREV': 25,
                          'LATEST': 25,
                          'URL': 65
                      },
                      border=False)

    stream = k8s_watch.Watch().stream(
        client.CustomObjectsApi().list_namespaced_custom_object,
        constants.KFSERVING_GROUP,
        constants.KFSERVING_V1BETA1_VERSION,
        namespace,
        constants.KFSERVING_PLURAL,
        timeout_seconds=timeout_seconds)

    for event in stream:
        isvc = event['object']
        isvc_name = isvc['metadata']['name']
        if name and name != isvc_name:
            continue
        else:
            if isvc.get('status', ''):
                url = isvc['status'].get('url', '')
                traffic = isvc['status'].get('components',
                                             {}).get('predictor',
                                                     {}).get('traffic', [])
                traffic_percent = 100
                for t in traffic:
                    if t["latestRevision"]:
                        traffic_percent = t["percent"]
                status = 'Unknown'
                for condition in isvc['status'].get('conditions', {}):
                    if condition.get('type', '') == 'Ready':
                        status = condition.get('status', 'Unknown')
                tbl(isvc_name, status, 100 - traffic_percent, traffic_percent,
                    url)
            else:
                tbl(isvc_name, 'Unknown', '', '', '')
                # Sleep 2 to avoid status section is not generated within a very short time.
                time.sleep(2)
                continue

            if name == isvc_name and status == 'True':
                break
Пример #17
0
def fetch_datasets(endpoint, token):
    """ fetch all datasets in ragnarok """
    u_datasets = fetch_uploaded_datasets(endpoint, token)
    g_datasets = fetch_generated_datasets(endpoint, token)
    j_datasets = fetch_job_datasets(endpoint, token)
    tbl = TableLogger(columns='state,type,name,timestamp,id',default_colwidth=30)
    for d in u_datasets:
        tbl(d['state'], 'UPLOADED', d['name'], d['created_at'], d['id'])
    for d in g_datasets:
        tbl(d['state'], 'GENERATED', d['name'], d['created_at'], d['id'])
    for d in j_datasets:
        tbl(d['state'], 'JOB', d['name'], d['created_at'], d['id'])
Пример #18
0
    def test_border(self):
        f = io.BytesIO()
        tbl = TableLogger(file=f, colwidth={0: 1, 1: 1})
        tbl(1, 1)
        self.assertEqual('+---+---+\n| 1 | 1 |\n',
                         f.getvalue().decode('utf-8'))

        f = io.BytesIO()
        tbl = TableLogger(file=f, colwidth={0: 1, 1: 1}, border=False)
        tbl(1, 1)
        self.assertEqual('1 1\n', f.getvalue().decode('utf-8'))

        f = io.BytesIO()
        tbl = TableLogger(file=f,
                          colwidth={
                              0: 1,
                              1: 1
                          },
                          columns=['a', 'b'],
                          border=False)
        tbl(1, 1)
        self.assertEqual('a b\n1 1\n', f.getvalue().decode('utf-8'))
def watch(name,
          plural,
          namespace=None,
          timeout_seconds=600,
          version=constants.TEKTON_VERSION):
    """Watch the created or patched tekton objects in the specified namespace"""

    if namespace is None:
        namespace = utils.get_default_target_namespace()

    tbl = TableLogger(columns='NAME,SUCCEEDED,REASON,STARTED,COMPLETED',
                      colwidth={
                          'NAME': 20,
                          'SUCCEEDED': 20,
                          'REASON': 20,
                          'STARTED': 20,
                          'COMPLETED': 20
                      },
                      border=False)

    stream = k8s_watch.Watch().stream(
        client.CustomObjectsApi().list_namespaced_custom_object,
        constants.TEKTON_GROUP,
        version,
        namespace,
        plural,
        timeout_seconds=timeout_seconds)

    for event in stream:
        tekton = event['object']
        tekton_name = tekton['metadata']['name']
        if name and name != tekton_name:
            continue
        else:
            if tekton.get('status', ''):
                status = ''
                reason = ''
                startTime = tekton['status'].get('startTime', '')
                completionTime = tekton['status'].get('completionTime', '')
                for condition in tekton['status'].get('conditions', {}):
                    status = condition.get('status', '')
                    reason = condition.get('reason', '')
                tbl(tekton_name, status, reason, startTime, completionTime)
            else:
                tbl(tekton_name, '', '', '', '')
                # Sleep 2 to avoid status section is not generated within a very short time.
                time.sleep(2)
                continue

            if name == tekton_name and status != 'Unknown':
                break
Пример #20
0
    def test_custom_formatters(self):
        f = io.BytesIO()
        tbl = TableLogger(file=f,
                          border=False,
                          formatters={
                              0: '{:,.2f}'.format,
                              1: '{:%Y-%m-%d}'.format
                          })
        tbl(12345.1234, datetime.date(2013, 12, 25))
        self.assertEqual('12,345.12 2013-12-25',
                         f.getvalue().decode('utf-8').strip())

        f = io.BytesIO()
        tbl = TableLogger(file=f,
                          border=False,
                          columns=['number', 'datetime'],
                          formatters={
                              'number': '{:,.2f}'.format,
                              'datetime': '{:%Y-%m-%d}'.format
                          })
        tbl(12345.1234, datetime.date(2013, 12, 25))
        self.assertEqual('12,345.12 2013-12-25',
                         f.getvalue().decode('utf-8').split('\n')[1].strip())
Пример #21
0
def fetch_jobs(url, auth_headers):
    """ fetch all datasets in ragnarok """
    endpoint = f'{url}/api/v1/jobs/'
    r = requests.get(endpoint, headers=auth_headers)
    if r.status_code != 200:
        log.warning('Unable to fetch jobs')
        return
    jobs = json.loads(r.text)['results']
    tbl = TableLogger(columns='state,name,operation,created',
                      default_colwidth=30)
    if len(jobs) == 0:
        log.info(None)
    for j in jobs:
        tbl(j['state'], j['name'], j['operation'], j['created_at'])
Пример #22
0
Файл: scenes.py Проект: kant/zpy
def fetch_scenes(endpoint, token):
    """ fetch all datasets in ragnarok """
    endpoint = f'{endpoint}/api/v1/scenes/'
    r = requests.get(endpoint, headers=auth_headers(token))
    if r.status_code != 200:
        log.warning('Unable to fetch scenes')
        return
    scenes = json.loads(r.text)['results']
    tbl = TableLogger(columns='state,name,zpy_version,blender_version,created',
                      default_colwidth=30)
    if len(scenes) == 0:
        log.info(None)
    for s in scenes:
        tbl(s['state'], s['name'], s['zpy_version'], s['blender_version'],
            s['created_at'])
Пример #23
0
    def test_time_delta_column(self):
        f = io.BytesIO()
        tbl = TableLogger(file=f, time_delta=True, border=False)
        tbl()
        val = float(f.getvalue().split()[-1])
        self.assertAlmostEqual(0, val, places=1)

        time.sleep(1)
        tbl()
        val = float(f.getvalue().split()[-1])
        self.assertAlmostEqual(1, val, places=1)

        time.sleep(3)
        tbl()
        val = float(f.getvalue().split()[-1])
        self.assertAlmostEqual(3, val, places=1)
Пример #24
0
def watch(name=None, namespace=None, timeout_seconds=600):
    """Watch the created or patched InferenceService in the specified namespace"""

    if namespace is None:
        namespace = utils.get_default_target_namespace()

    tbl = TableLogger(columns='NAME,READY,DEFAULT_TRAFFIC,CANARY_TRAFFIC,URL',
                      colwidth={
                          'NAME': 20,
                          'READY': 10,
                          'DEFAULT_TRAFFIC': 15,
                          'CANARY_TRAFFIC': 15,
                          'URL': 50
                      },
                      border=False)

    stream = k8s_watch.Watch().stream(
        client.CustomObjectsApi().list_namespaced_custom_object,
        constants.KFSERVING_GROUP,
        constants.KFSERVING_VERSION,
        namespace,
        constants.KFSERVING_PLURAL,
        timeout_seconds=timeout_seconds)

    for event in stream:
        isvc = event['object']
        isvc_name = isvc['metadata']['name']
        if name and name != isvc_name:
            continue
        else:
            if isvc.get('status', ''):
                url = isvc['status'].get('url', '')
                default_traffic = isvc['status'].get('traffic', '')
                canary_traffic = isvc['status'].get('canaryTraffic', '')
                status = 'Unknown'
                for condition in isvc['status'].get('conditions', {}):
                    if condition.get('type', '') == 'Ready':
                        status = condition.get('status', 'Unknown')
                tbl(isvc_name, status, default_traffic, canary_traffic, url)
            else:
                tbl(isvc_name, 'Unknown', '', '', '')
                # Sleep 2 to avoid status section is not generated within a very short time.
                time.sleep(2)
                continue

            if name == isvc_name and status == 'True':
                break
Пример #25
0
Файл: cli.py Проект: swpecht/zpy
def list_datasets():
    """list datasets

    List datasets from backend.
    """
    from cli.datasets import fetch_datasets

    try:
        with Loader("Fetching datasets..."):
            datasets = fetch_datasets()
        click.echo("Fetched datasets succesfully.")
    except requests.exceptions.HTTPError as e:
        click.secho(f"Failed to fetch datasets {e}.", fg="red", err=True)
        return

    tbl = TableLogger(columns="name,state,type,created,id",
                      default_colwidth=30)
    for d in datasets:
        tbl(d["name"], d["state"].lower(), d["type"], d["created_at"], d["id"])
Пример #26
0
Файл: cli.py Проект: swpecht/zpy
def list_jobs():
    """list jobs

    List jobs from backend.
    """
    from cli.jobs import fetch_jobs

    try:
        with Loader("Fetching jobs..."):
            jobs = fetch_jobs()
        click.echo("Fetched jobs succesfully.")
    except requests.exceptions.HTTPError as e:
        click.secho(f"Failed to fetch jobs {e}.", fg="red", err=True)
        return

    tbl = TableLogger(columns="state,name,operation,created",
                      default_colwidth=30)
    for j in jobs:
        tbl(j["state"], j["name"], j["operation"], j["created_at"])
Пример #27
0
def prepare_reports(reports, attrs_to_string=False):
    bare_reports = []
    labels = []
    for rep in reports:
        if attrs_to_string:
            bare = convert_to_string(rep['report'])
            bare_reports.append(bare)
        else:
            bare_reports.append(rep['report'])

        if rep['label'] == 'malware':
            labels.append(1)
        else:
            labels.append(0)

    _, counts = np.unique(labels, return_counts=True)
    logger.info('   Dataset statistics')
    tbl = TableLogger(columns='Class,# of reports')
    tbl('malware', counts[1])
    tbl('benignware', counts[0])
    print('+----------------------+----------------------+')
    return bare_reports, labels
Пример #28
0
    def __init__(self, scan_result):
        self.logger = logging.getLogger(__name__)
        self.logger.info('Initializing new burner device')

        super().__init__(scan_result)

        self.tbl = TableLogger(
            columns=
            'timestamp,FlowDes,Flow,Return,Flame,Power,PowKW,WaterPres,PumpPow,Status01,Status02,SetModeR,m3',
            formatters={
                'timestamp': '{:%Y-%m-%d %H:%M:%S}'.format,
                'FlowDes': '{:,.2f}'.format,
                'Flow': '{:,.2f}'.format,
                'Return': '{:,.2f}'.format,
                'Flame': '{:3}'.format,
                'Power': '{:,.2f}'.format,
                'PowKW': '{:,.2f}'.format,
                'PumpPow': '{:,.2f}'.format,
                'WaterPres': '{:,.3f}'.format,
                'm3': '{:,.3f}'.format,
            },
            colwidth={
                'Flow': 6,
                'FlowDes': 6,
                'Return': 6,
                'Flame': 5,
                'Power': 5,
                'PowKW': 5,
                'WaterPres': 9,
                'PumpPow': 7,
                'Status01': 25,
                'Status02': 20,
                'SetModeR': 27,
                'm3': 9
            })

        self.power_max_hc_kw = float(self.read_0('PartloadHcKW'))
        self.flame = self.read_0('Flame')
        self.pump_power = self.read_0('PumpPower')
Пример #29
0
    def __init__(self, scan_result):
        self.logger = logging.getLogger(__name__)
        self.logger.info('Initializing new temperature regulator device')

        super().__init__(scan_result)

        self.tbl = TableLogger(
            columns='timestamp,FlowDes,RoomDes,Room,Day,Night,Out',
            formatters={
                'timestamp': '{:%Y-%m-%d %H:%M:%S}'.format,
                'FlowDes': '{:,.2f}'.format,
                'RoomDes': '{:,.2f}'.format,
                'Room': '{:,.2f}'.format,
                'Day': '{:,.2f}'.format,
                'Night': '{:,.2f}'.format,
                'Out': '{:,.2f}'.format
            },
            colwidth={
                'FlowDes': 6, 'RoomDes': 6, 'Room': 6,
                'Day': 6, 'Night': 6, 'Out': 6
            })

        self.temp_room_des = self.read_0('z1ActualRoomTempDesired')
Пример #30
0
def watch(name=None, namespace=None, timeout_seconds=600):
    """Watch the created or patched KFService in the specified namespace"""

    if namespace is None:
        namespace = utils.get_default_target_namespace()

    tbl = TableLogger(
        columns='NAME,READY,DEFAULT_TRAFFIC,CANARY_TRAFFIC,URL',
        colwidth={'NAME': 20, 'READY':10, 'DEFAULT_TRAFFIC':15, 'CANARY_TRAFFIC':15, 'URL': 50},
        border=False)

    stream = k8s_watch.Watch().stream(
        client.CustomObjectsApi().list_namespaced_custom_object,
        constants.KFSERVING_GROUP,
        constants.KFSERVING_VERSION,
        namespace,
        constants.KFSERVING_PLURAL,
        timeout_seconds=timeout_seconds)

    for event in stream:
        kfserivce = event['object']
        kfsvc_name = kfserivce['metadata']['name']
        if name and name != kfsvc_name:
            continue
        else:
            url = kfserivce['status'].get('url', '')
            default_traffic = kfserivce['status'].get('default', {}).get('traffic', '')
            canary_traffic = kfserivce['status'].get('canary', {}).get('traffic', '')
            status = 'Unknown'
            for condition in kfserivce['status'].get('conditions', {}):
                if condition.get('type', '') == 'Ready':
                    status = condition.get('status', 'Unknown')
            tbl(kfsvc_name, status, default_traffic, canary_traffic, url)

            if name == kfsvc_name and status == 'True':
                break