Exemplo n.º 1
0
async def new_chat_member(message: types.Message, chat: Chat):
    if not chat.join_filter:
        return False

    if message.date < datetime.datetime.now() - datetime.timedelta(minutes=30):
        logger.warning(
            "Join message {message} in chat {chat} is too old. Skip filtering. (Age: {age})",
            message=message.message_id,
            chat=chat.id,
            age=datetime.datetime.now() - message.date,
        )
        return False

    if message.from_user not in message.new_chat_members:
        logger.opt(lazy=True).info(
            "User {user} add new members to chat {chat}: {new_members}",
            user=lambda: message.from_user.id,
            chat=lambda: message.chat.id,
            new_members=lambda: ", ".join([str(u.id) for u in message.new_chat_members]),
        )
        # TODO: Validate is admin add new members
    else:
        logger.opt(lazy=True).info(
            "New chat members in chat {chat}: {new_members}",
            chat=lambda: message.chat.id,
            new_members=lambda: ", ".join([str(u.id) for u in message.new_chat_members]),
        )

    users = {}
    for new_member in message.new_chat_members:
        try:
            chat_member = await message.chat.get_member(new_member.id)
            if chat_member.status == "restricted":
                return False  # ignore user that's been restricted to avoid capcha abusing.
            else:
                await message.chat.restrict(
                    new_member.id, permissions=types.ChatPermissions(can_send_messages=False)
                )
                users[new_member.id] = new_member.get_mention()
        except BadRequest as e:
            logger.error(
                "Cannot restrict chat member {user} in chat {chat} with error: {error}",
                user=new_member.id,
                chat=chat.id,
                error=e,
            )
            continue

    buttons = [
        types.InlineKeyboardButton(_("I'm bot"), callback_data=cb_join_list.new(answer="bot")),
        types.InlineKeyboardButton(_("I'm pet"), callback_data=cb_join_list.new(answer="pet")),
        types.InlineKeyboardButton(
            _("I'm spammer"), callback_data=cb_join_list.new(answer="spammer")
        ),
        types.InlineKeyboardButton(
            _("I'm scammer"), callback_data=cb_join_list.new(answer="scammer")
        ),
    ]
    random.shuffle(buttons)
    buttons.insert(
        random.randint(1, len(buttons)),
        types.InlineKeyboardButton(_("I'm human"), callback_data=cb_join_list.new(answer="human")),
    )
    msg = await message.reply(
        _(
            "{users}, Welcome to the chat. \n"
            "Please confirm that you are a human. "
            "User filter is enabled in this chat, so if you don't answer my question, "
            "I will be forced to remove you from this chat."
        ).format(users=", ".join(users.values())),
        reply_markup=types.InlineKeyboardMarkup(row_width=3).add(*buttons),
    )
    await join_list.create_list(
        chat_id=message.chat.id, message_id=msg.message_id, users=users.keys()
    )
    return True
Exemplo n.º 2
0
def test_colors_with_level(writer, colorize):
    logger.add(writer, format="{message}", colorize=colorize)
    logger.level("DEBUG", color="<green>")
    logger.opt(colors=True).debug("a <level>level</level> b")
    assert writer.read() == parse("a <green>level</green> b\n",
                                  strip=not colorize)
Exemplo n.º 3
0
def test_colored_string_used_as_spec(writer, colorize):
    logger.add(writer,
               colorize=colorize,
               format="{level.no:{message}} <red>{message}</red>")
    logger.opt(colors=True).log(30, "03d")
    assert writer.read() == parse("030 <red>03d</red>\n", strip=not colorize)
Exemplo n.º 4
0
def test_record_in_kwargs_too(writer):
    logger.add(writer, catch=False)

    with pytest.raises(TypeError, match=r"The message can't be formatted"):
        logger.opt(record=True).info("Foo {record}", record=123)
Exemplo n.º 5
0
def test_colors_nested(writer, colorize):
    logger.add(writer, format="(<red>[{message}]</red>)", colorize=colorize)
    logger.opt(colors=True).debug("A<green>B</green>C<blue>D</blue>E")
    assert writer.read() == parse(
        "(<red>[A<green>B</green>C<blue>D</blue>E]</red>)\n",
        strip=not colorize)
Exemplo n.º 6
0
            if SMSRunner.get_by_port(port) is None:
                SMSRunner(port).start()
    elif btn == 'disconnect':
        for index in values['thread_table']:
            selected_port = all_port[index]
            runner = SMSRunner.get_by_port(selected_port)
            if runner:
                threading.Thread(target=runner.disconnect).start()
    elif btn == 'restart':
        for index in values['thread_table']:
            selected_port = all_port[index]
            runner = SMSRunner.get_by_port(selected_port)
            if runner:
                threading.Thread(target=runner.restart).start()
    elif btn is 'ussd':
        cmd = sg.PopupGetText('USSD Command')
        for index in values['thread_table']:
            selected_port = all_port[index]
            runner = SMSRunner.get_by_port(selected_port)
            if runner:
                threading.Thread(target=runner.run_ussd, args=(cmd, )).start()
    btn, values = window.Read(timeout=2000)
    if btn is None:
        break
    try:
        update_table()
    except TclError as e:
        pass
    except:
        logger.opt(exception=True).error("Table error")
Exemplo n.º 7
0
    def on_task_output(self, task, config):
        config = self.prepare_config(config)
        # don't add when learning
        if task.options.learn:
            return
        if not config['enabled']:
            return
        # Do not run if there is nothing to do
        if not task.accepted:
            return
        if self.client is None:
            self.client = self.create_rpc_client(config)
            if self.client:
                logger.debug('Successfully connected to transmission.')
            else:
                raise plugin.PluginError("Couldn't connect to transmission.")
        session_torrents = self.client.get_torrents()
        for entry in task.accepted:
            if task.options.test:
                logger.info('Would {} {} in transmission.', config['action'],
                            entry['title'])
                continue
            # Compile user options into appropriate dict
            options = self._make_torrent_options_dict(config, entry)
            torrent_info = None
            for t in session_torrents:
                if t.hashString.lower() == entry.get(
                        'torrent_info_hash',
                        '').lower() or t.id == entry.get('transmission_id'):
                    torrent_info = t
                    logger.debug(
                        'Found {} already loaded in transmission as {}',
                        entry['title'],
                        torrent_info.name,
                    )
                    break

            if not torrent_info:
                if config['action'] != 'add':
                    logger.warning(
                        'Cannot {} {} because it is not loaded in transmission.',
                        config['action'],
                        entry['title'],
                    )
                    continue
                downloaded = not entry['url'].startswith('magnet:')

                # Check that file is downloaded
                if downloaded and 'file' not in entry:
                    entry.fail('`file` field missing?')
                    continue

                # Verify the temp file exists
                if downloaded and not os.path.exists(entry['file']):
                    tmp_path = os.path.join(task.manager.config_base, 'temp')
                    logger.debug('entry: {}', entry)
                    logger.debug('temp: {}', ', '.join(os.listdir(tmp_path)))
                    entry.fail("Downloaded temp file '%s' doesn't exist!?" %
                               entry['file'])
                    continue

                try:
                    if downloaded:
                        with open(entry['file'], 'rb') as f:
                            filedump = base64.b64encode(
                                f.read()).decode('utf-8')
                        torrent_info = self.client.add_torrent(
                            filedump, 30, **options['add'])
                    else:
                        if options['post'].get('magnetization_timeout', 0) > 0:
                            options['add']['paused'] = False
                        torrent_info = self.client.add_torrent(
                            entry['url'], timeout=30, **options['add'])
                except TransmissionError as e:
                    logger.opt(exception=True).debug('TransmissionError')
                    logger.debug('Failed options dict: {}', options['add'])
                    msg = 'Error adding {} to transmission. TransmissionError: {}'.format(
                        entry['title'], e.message or 'N/A')
                    logger.error(msg)
                    entry.fail(msg)
                    continue
                logger.info('"{}" torrent added to transmission',
                            entry['title'])
                # The info returned by the add call is incomplete, refresh it
                torrent_info = self.client.get_torrent(torrent_info.id)
            else:
                # Torrent already loaded in transmission
                if options['add'].get('download_dir'):
                    logger.verbose('Moving {} to "{}"', torrent_info.name,
                                   options['add']['download_dir'])
                    # Move data even if current reported torrent location matches new location
                    # as transmission may fail to automatically move completed file to final
                    # location but continue reporting final location instead of real location.
                    # In such case this will kick transmission to really move data.
                    # If data is already located at new location then transmission just ignore
                    # this command.
                    self.client.move_torrent_data(
                        torrent_info.id, options['add']['download_dir'], 120)

            try:
                total_size = torrent_info.totalSize
                main_id = None
                find_main_file = (options['post'].get('main_file_only')
                                  or 'content_filename' in options['post'])
                skip_files = options['post'].get('skip_files')
                # We need to index the files if any of the following are defined
                if find_main_file or skip_files:
                    file_list = self.client.get_files(
                        torrent_info.id)[torrent_info.id]

                    if options['post'].get('magnetization_timeout',
                                           0) > 0 and not file_list:
                        logger.debug(
                            'Waiting {} seconds for "{}" to magnetize',
                            options['post']['magnetization_timeout'],
                            entry['title'],
                        )
                        for _ in range(
                                options['post']['magnetization_timeout']):
                            sleep(1)
                            file_list = self.client.get_files(
                                torrent_info.id)[torrent_info.id]
                            if file_list:
                                total_size = self.client.get_torrent(
                                    torrent_info.id,
                                    ['id', 'totalSize']).totalSize
                                break
                        else:
                            logger.warning(
                                '"{}" did not magnetize before the timeout elapsed, file list unavailable for processing.',
                                entry['title'],
                            )

                    # Find files based on config
                    dl_list = []
                    skip_list = []
                    main_list = []
                    ext_list = ['*.srt', '*.sub', '*.idx', '*.ssa', '*.ass']

                    main_ratio = config['main_file_ratio']
                    if 'main_file_ratio' in options['post']:
                        main_ratio = options['post']['main_file_ratio']

                    for file_id, file in enumerate(file_list):
                        # No need to set main_id if we're not going to need it
                        if find_main_file and file.size > total_size * main_ratio:
                            main_id = file_id

                        if 'include_files' in options['post']:
                            if any(
                                    fnmatch(file.name, mask) for mask in
                                    options['post']['include_files']):
                                dl_list.append(file_id)
                            elif options['post'].get('include_subs') and any(
                                    fnmatch(file.name, mask)
                                    for mask in ext_list):
                                dl_list.append(file_id)

                        if skip_files:
                            if any(
                                    fnmatch(file.name, mask)
                                    for mask in skip_files):
                                skip_list.append(file_id)

                    if main_id is not None:
                        # Look for files matching main ID title but with a different extension
                        if options['post'].get('rename_like_files'):
                            for file_id, file in enumerate(file_list):
                                # if this filename matches main filename we want to rename it as well
                                fs = os.path.splitext(file.name)
                                if fs[0] == os.path.splitext(
                                        file_list[main_id].name)[0]:
                                    main_list.append(file_id)
                        else:
                            main_list = [main_id]

                        if main_id not in dl_list:
                            dl_list.append(main_id)
                    elif find_main_file:
                        logger.warning(
                            'No files in "{}" are > {:.0f}% of content size, no files renamed.',
                            entry['title'],
                            main_ratio * 100,
                        )

                    # If we have a main file and want to rename it and associated files
                    if 'content_filename' in options[
                            'post'] and main_id is not None:
                        if 'download_dir' not in options['add']:
                            download_dir = self.client.get_session(
                            ).download_dir
                        else:
                            download_dir = options['add']['download_dir']

                        # Get new filename without ext
                        file_ext = os.path.splitext(file_list[main_id].name)[1]
                        file_path = os.path.dirname(
                            os.path.join(download_dir,
                                         file_list[main_id].name))
                        filename = options['post']['content_filename']
                        if config['host'] == 'localhost' or config[
                                'host'] == '127.0.0.1':
                            counter = 1
                            while os.path.exists(
                                    os.path.join(file_path,
                                                 filename + file_ext)):
                                # Try appending a (#) suffix till a unique filename is found
                                filename = f'{options["post"]["content_filename"]}({counter})'
                                counter += 1
                        else:
                            logger.debug(
                                'Cannot ensure content_filename is unique '
                                'when adding to a remote transmission daemon.')

                        for file_id in main_list:
                            file_ext = os.path.splitext(
                                file_list[file_id].name)[1]
                            logger.debug(
                                'File {} renamed to {}',
                                file_list[file_id].name,
                                filename + file_ext,
                            )
                            # change to below when set_files will allow setting name, more efficient to have one call
                            # fl[index]['name'] = os.path.basename(pathscrub(filename + file_ext).encode('utf-8'))
                            try:
                                self.client.rename_torrent_path(
                                    torrent_info.id,
                                    file_list[file_id].name,
                                    os.path.basename(
                                        str(pathscrub(filename + file_ext))),
                                )
                            except TransmissionError:
                                logger.error(
                                    'content_filename only supported with transmission 2.8+'
                                )

                    if options['post'].get(
                            'main_file_only') and main_id is not None:
                        # Set Unwanted Files
                        options['change']['files_unwanted'] = [
                            x for x in range(len(file_list))
                            if x not in dl_list
                        ]
                        options['change']['files_wanted'] = dl_list
                        logger.debug(
                            'Downloading {} of {} files in torrent.',
                            len(options['change']['files_wanted']),
                            len(file_list),
                        )
                    elif (not options['post'].get('main_file_only')
                          or main_id is None) and skip_files:
                        # If no main file and we want to skip files

                        if len(skip_list) >= len(file_list):
                            logger.debug(
                                'skip_files filter would cause no files to be downloaded; '
                                'including all files in torrent.')
                        else:
                            options['change']['files_unwanted'] = skip_list
                            options['change']['files_wanted'] = [
                                x for x in range(len(file_list))
                                if x not in skip_list
                            ]
                            logger.debug(
                                'Downloading {} of {} files in torrent.',
                                len(options['change']['files_wanted']),
                                len(file_list),
                            )

                # Set any changed file properties
                if list(options['change'].keys()):
                    self.client.change_torrent(torrent_info.id, 30,
                                               **options['change'])

                start_torrent = partial(self.client.start_torrent,
                                        [torrent_info.id])

                if config['action'] == 'add':
                    # if add_paused was defined and set to False start the torrent;
                    # prevents downloading data before we set what files we want
                    start_paused = (
                        options['post']['paused']
                        if 'paused' in options['post'] else
                        not self.client.get_session().start_added_torrents)
                    if start_paused:
                        self.client.stop_torrent(torrent_info.id)
                    else:
                        self.client.start_torrent(torrent_info.id)
                elif config['action'] in ('remove', 'purge'):
                    self.client.remove_torrent(
                        [torrent_info.id],
                        delete_data=config['action'] == 'purge')
                    logger.info('{}d {} from transmission', config['action'],
                                torrent_info.name)
                elif config['action'] == 'pause':
                    self.client.stop_torrent([torrent_info.id])
                    logger.info('paused {} in transmission', torrent_info.name)
                elif config['action'] == 'resume':
                    start_torrent()
                    logger.info('resumed {} in transmission',
                                torrent_info.name)
                elif config['action'] == 'bypass_queue':
                    start_torrent(bypass_queue=True)
                    logger.info('resumed (bypass queue) {} in transmission',
                                torrent_info.name)

            except TransmissionError as e:
                logger.opt(exception=True).debug('TransmissionError')
                logger.debug('Failed options dict: {}', options)
                msg = 'Error trying to {} {}, TransmissionError: {}'.format(
                    config['action'], entry['title'], e.message or 'N/A')
                logger.error(msg)
                continue
Exemplo n.º 8
0
def test_raw_with_record(writer):
    logger.add(writer, format="Nope\n")
    logger.opt(raw=True, record=True).debug("Raw in '{record[function]}'\n")
    assert writer.read() == "Raw in 'test_raw_with_record'\n"
Exemplo n.º 9
0
def test_before_bind(writer):
    logger.add(writer, format="{message}")
    logger.opt(record=True).bind(key="value").info("{record[level]}")
    assert writer.read() == "INFO\n"
Exemplo n.º 10
0
def test_raw_with_format_function(writer):
    logger.add(writer, format=lambda _: "{time} \n")
    logger.opt(raw=True).debug("Raw {message} bis", message="message")
    assert writer.read() == "Raw message bis"
Exemplo n.º 11
0
def test_raw_with_ansi(writer, colorize):
    logger.add(writer, format="XYZ", colorize=colorize)
    logger.opt(raw=True,
               ansi=True).info("Raw <red>colors</red> and <lvl>level</lvl>")
    assert writer.read() == parse("Raw <red>colors</red> and <b>level</b>",
                                  colorize=colorize)
Exemplo n.º 12
0
def test_raw(writer):
    logger.add(writer, format="", colorize=True)
    logger.opt(raw=True).info("Raw {}", "message")
    logger.opt(raw=True).log(30, " + The end")
    assert writer.read() == "Raw message + The end"
Exemplo n.º 13
0
def test_ansi_message_getitem(writer, colorize):
    logger.add(writer, colorize=colorize, format="<red>{message[0]}</red>")
    logger.opt(ansi=True).info("ABC")
    assert writer.read() == parse("<red>A</red>\n", colorize=colorize)
Exemplo n.º 14
0
def test_ansi_message_used_as_spec(writer, colorize):
    logger.add(writer,
               colorize=colorize,
               format="{level.no:{message}} <red>{message}</red>")
    logger.opt(ansi=True).log(30, "03d")
    assert writer.read() == parse("030 <red>03d</red>\n", colorize=colorize)
Exemplo n.º 15
0
# -*- coding: UTF-8 -*-
"""
Simple client for test connection
"""
import socket
import sys
import uuid
from pathlib import Path
import requests
from loguru import logger
from file_hashing import get_hash_md5

logger.add("./log/client/debug.log", format="{time} {level} {message}",
           level="DEBUG",
           rotation="10KB")
logger = logger.opt(colors=True)


def send_non_http_request(host_addr: str = 'localhost', port: int = 9000,
                          chunk_size: int = 1024) -> str:
    """
    Функция позволяет "поздороваться" с сервером и получить от него ответ
    """
    server_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
    server_sock.connect((host_addr, port))

    server_sock.send("Hello world".encode())
    server_response = server_sock.recv(chunk_size)

    server_sock.close()
Exemplo n.º 16
0
def test_lazy(writer):
    counter = 0

    def laziness():
        nonlocal counter
        counter += 1
        return counter

    logger.add(writer, level=10, format="{level.no} => {message}")

    logger.opt(lazy=True).log(10, "1: {lazy}", lazy=laziness)
    logger.opt(lazy=True).log(5, "2: {0}", laziness)

    logger.remove()

    logger.opt(lazy=True).log(20, "3: {}", laziness)

    a = logger.add(writer, level=15, format="{level.no} => {message}")
    b = logger.add(writer, level=20, format="{level.no} => {message}")

    logger.log(17, "4: {}", counter)
    logger.opt(lazy=True).log(14, "5: {lazy}", lazy=lambda: counter)

    logger.remove(a)

    logger.opt(lazy=True).log(16, "6: {0}", lambda: counter)

    logger.opt(lazy=True).info("7: {}", laziness)
    logger.debug("7: {}", counter)

    assert writer.read() == "10 => 1: 1\n17 => 4: 1\n20 => 7: 2\n"
Exemplo n.º 17
0
def stream_process_output(process: ProcessToStream):
    for line in process.stdout:
        logline = line.strip('\n')
        logger.opt(colors=True).info(
            f"[{process.name}] {colorize(logline, process.colour)}")
Exemplo n.º 18
0
Example:
    $ python miners/text/sgmoe_validator.py --logging.debug

"""
import bittensor
import math
import torch
import wandb
import pandas
from termcolor import colored
from functools import partial

from torch.nn.utils import clip_grad_norm_
import torch.nn.functional as F
from qqdm import qqdm, format_str
from loguru import logger; logger = logger.opt(colors=True)
def run( config , validator, subtensor, wallet, metagraph, dataset, device, uid, dendrite):
    
    print(config)
    config.to_defaults()
    validator = validator.to(device)
    optimizer = torch.optim.SGD(
        validator.parameters(),
        lr = config.neuron.learning_rate,
        momentum = config.neuron.momentum,
    )
    if config.wandb.api_key != 'default':
        # Create wandb for telemetry.
        bittensor.wandb(
            config = config,
            cold_pubkey = wallet.coldkeypub.ss58_address,
Exemplo n.º 19
0
    def on_task_input(self, task, config):
        config = self.prepare_config(config)
        if not config['enabled']:
            return

        if not self.client:
            self.client = self.create_rpc_client(config)
        entries = []

        session = self.client.get_session()

        for torrent in self.client.get_torrents():
            seed_ratio_ok, idle_limit_ok = self.check_seed_limits(
                torrent, session)
            if config['only_complete'] and not (seed_ratio_ok and idle_limit_ok
                                                and torrent.progress == 100):
                continue
            entry = Entry(
                title=torrent.name,
                url='',
                torrent_info_hash=torrent.hashString,
                content_size=torrent.totalSize / (1024 * 1024),
            )
            # Location of torrent is only valid if transmission is on same machine as flexget
            if config['host'] in ('localhost', '127.0.0.1'):
                entry['location'] = torrent.torrentFile
                entry['url'] = 'file://' + torrent.torrentFile
            for attr in [
                    'id',
                    'comment',
                    'desiredAvailable',
                    'downloadDir',
                    'isFinished',
                    'isPrivate',
                    'leftUntilDone',
                    'ratio',
                    'status',
                    'date_active',
                    'date_added',
                    'date_done',
                    'date_started',
                    'errorString',
                    'priority',
                    'progress',
                    'secondsDownloading',
                    'secondsSeeding',
                    'torrentFile',
            ]:
                try:
                    entry['transmission_' + attr] = getattr(torrent, attr)
                except Exception:
                    logger.opt(exception=True).debug(
                        'error when requesting transmissionrpc attribute {}',
                        attr)
            # Availability in percent
            entry['transmission_availability'] = ((torrent.desiredAvailable /
                                                   torrent.leftUntilDone) if
                                                  torrent.leftUntilDone else 0)

            entry['transmission_trackers'] = [
                t['announce'] for t in torrent.trackers
            ]
            entry['transmission_seed_ratio_ok'] = seed_ratio_ok
            entry['transmission_idle_limit_ok'] = idle_limit_ok
            st_error_to_desc = {
                0: 'OK',
                1: 'tracker_warning',
                2: 'tracker_error',
                3: 'local_error',
            }
            entry['transmission_error_state'] = st_error_to_desc[torrent.error]
            # Built in done_date doesn't work when user adds an already completed file to transmission
            if torrent.progress == 100:
                entry['transmission_date_done'] = datetime.fromtimestamp(
                    max(torrent.addedDate, torrent.doneDate))
            entries.append(entry)
        return entries
Exemplo n.º 20
0
from system import clear
from system.lib import config, Console, locale, refill_menu, menu
from system.lib.features.initialization import initialize


if __name__ == '__main__':
    if not config.initialized:
        config.change_language(locale.change())

    if not config.initialized:
        initialize(True)
        exit()

    refill_menu()

    while True:
        try:
            handler = menu.choice()
            if handler is not None:
                start_time = time.time()
                with logger.catch():
                    handler()
                logger.opt(colors=True).info(f'<green>{locale.done % (time.time() - start_time)}</green>')
                input(locale.to_continue)
            clear()
        except KeyboardInterrupt:
            if Console.question(locale.want_exit):
                clear()
                break
Exemplo n.º 21
0
 def a():
     logger.opt(depth=1).debug("Test 1")
     logger.opt(depth=0).debug("Test 2")
     logger.opt(depth=1).log(10, "Test 3")
Exemplo n.º 22
0
 def fetch(self, url):
     logger.opt(ansi=True).info(f'<b>Fetch</b>: <blue>{url}</blue>')
     self.result = requests.get(url)
Exemplo n.º 23
0
def test_colors_not_colorize(writer):
    logger.add(writer, format="<red>a</red> {message}", colorize=False)
    logger.opt(colors=True).debug("<blue>b</blue>")
    assert writer.read() == parse("<red>a</red> <blue>b</blue>\n", strip=True)
Exemplo n.º 24
0
 def fetchApodArchive(self):
     logger.opt(ansi=True).info(f'<b><yellow>Fetching Archive</yellow></b>')
     self.fetch(self.apod_archive)
Exemplo n.º 25
0
def test_colors_with_args(writer, colorize):
    logger.add(writer, format="=> {message} <=", colorize=colorize)
    logger.opt(colors=True).debug("the {0}test{end}", "<red>", end="</red>")
    assert writer.read() == "=> the <red>test</red> <=\n"
Exemplo n.º 26
0
 def emit(self, record):
     # Retrieve context where the logging call occurred, this happens to be in the 6th frame upward
     logger_opt = logger.opt(depth=6, exception=record.exc_info)
     logger_opt.log(log_level_to_name(record.levelno),
                    record.getMessage())
Exemplo n.º 27
0
def test_colors_multiple_calls(writer, colorize):
    logger.add(writer, format="{message}", colorize=colorize)
    logger.opt(colors=True).debug("a <red>foo</red> b")
    logger.opt(colors=True).debug("a <red>foo</red> b")
    assert writer.read() == parse("a <red>foo</red> b\na <red>foo</red> b\n",
                                  strip=not colorize)
Exemplo n.º 28
0
def generate(dest_dir: Path):
    template_dir = icedata_dir / "docs" / "templates"
    template_images_dir = Path(template_dir) / "images"
    datasets_dir = dest_dir / "datasets"

    # Create dest_dir if doesn't exist
    if os.path.exists(dest_dir):
        print("Removing sources folder:", dest_dir)
        logger.opt(colors=True).log(
            "INFO",
            "️<magenta><bold>\nRemoving sources folder: {}</></>",
            dest_dir,
        )
        shutil.rmtree(dest_dir)
    os.makedirs(dest_dir)

    # Create datasets_dir
    os.makedirs(datasets_dir)
    logger.opt(colors=True).log(
        "INFO",
        "️<green><bold>\nCreating datasets folder: {}</></>",
        datasets_dir,
    )

    # Copy images folder from root folder to the template images folder
    copy_tree(str(icedata_dir / "images"), str(template_images_dir))
    from_to = f"root/images -> docs/images"
    logger.opt(colors=True).log(
        "INFO",
        "️<green><bold>\nCopying images folder: {}</></>",
        from_to,
    )

    # Generate APIs Documentation
    doc_generator = keras_autodoc.DocumentationGenerator(
        pages=PAGES,
        project_url="https://github.com/airctic/icedata/blob/master",
        template_dir=template_dir,
    )
    doc_generator.generate(dest_dir)

    # Copy CNAME file
    # shutil.copyfile(icedata_dir / "CNAME", dest_dir / "CNAME")

    # Copy web manifest
    shutil.copyfile("manifest.webmanifest", dest_dir / "manifest.webmanifest")
    from_to = f"root/manifest.webmanifest -> docs/manifest.webmanifest"
    logger.opt(colors=True).log(
        "INFO",
        "️<green><bold>\nCopying webmanifest file: {}</></>",
        from_to,
    )

    # Auto generate the index.md file using the README.md file and the index.md file in templates folder
    readme = (icedata_dir / "README.md").read_text()

    # Search for the beginning and the end of the installation procedure to hide in Docs to avoid duplication
    start = readme.find("<!-- Not included in docs - start -->")
    end = readme.find("<!-- Not included in docs - end -->")

    readme = readme.replace(readme[start:end], "")
    index = (template_dir / "index.md").read_text()
    index = index.replace("{{autogenerated}}", readme[readme.find("##") :])
    (dest_dir / "index.md").write_text(index, encoding="utf-8")

    # Copy static .md files from the root folder
    dir_to_search = icedata_dir
    fnamelist = [
        filename for filename in os.listdir(dir_to_search) if filename.endswith(".md")
    ]
    logger.opt(colors=True).log(
        "INFO",
        "️<green><bold>\nCopying .md files root folder: {}</></>",
        fnamelist,
    )

    for fname in fnamelist:
        fname_src = icedata_dir / fname
        fname_dst = dest_dir / fname.lower()
        shutil.copyfile(fname_src, fname_dst)
        from_to = f"{fname} -> {fname.lower()}"
        logger.opt(colors=True).log(
            "INFO",
            "️<light-blue><bold>file: {}</></>",
            from_to,
        )

    # Copy static .md files from the docs folder
    dir_to_search = icedata_dir / "docs"
    fnamelist = [
        filename for filename in os.listdir(dir_to_search) if filename.endswith(".md")
    ]
    logger.opt(colors=True).log(
        "INFO",
        "️<green><bold>\nCopying .md files from the docs folder: {}</></>",
        fnamelist,
    )
    for fname in fnamelist:
        fname_src = dir_to_search / fname
        fname_dst = dest_dir / fname.lower()
        shutil.copyfile(fname_src, fname_dst)
        from_to = f"{fname} -> {fname.lower()}"
        logger.opt(colors=True).log(
            "INFO",
            "️<light-blue><bold>Copying files: {}</></>",
            from_to,
        )

    # shutil.copyfile(icedata_dir / "docs/INSTALL.md", dest_dir / "install.md")
    # shutil.copyfile(icedata_dir / "docs/HOW-TO.md", dest_dir / "how-to.md")
    # shutil.copyfile(icedata_dir / "docs/ABOUT.md", dest_dir / "about.md")

    # shutil.copyfile(icedata_dir / "docs/README.md", dest_dir / "readme_mkdocs.md")

    # shutil.copyfile(
    #     icedata_dir / "docs/CHANGING-THE-COLORS.md",
    #     dest_dir / "changing_the_colors.md",
    # )

    ## Add each dataset README
    dir_to_search = icedata_dir / "icedata/datasets"
    ds_dirlist = sorted(
        [
            filename
            for filename in os.listdir(dir_to_search)
            if (
                os.path.isdir(os.path.join(dir_to_search, filename))
                and not filename.startswith("_")
            )
        ]
    )

    logger.opt(colors=True).log(
        "INFO",
        "️<green><bold>\nList of datasets: {}</></>",
        ds_dirlist,
    )

    for i, directory in enumerate(ds_dirlist):
        fname_src = f"icedata/datasets/{directory}/README.md"
        fname_dst = f"{directory}.md"
        from_to = f"{fname_src} -> {fname_dst}"
        shutil.copyfile(icedata_dir / fname_src, dest_dir / fname_dst)
        logger.opt(colors=True).log(
            "INFO",
            "️<light-blue><bold>Copying files: : {}</></>",
            from_to,
        )

    # Copy images folder from the template folder to the destination folder
    # print("Template folder: ", template_images_dir)
    dest_images_dir = Path(dest_dir) / "images"

    # Copy images folder
    copy_tree(str(template_images_dir), str(dest_images_dir))
    from_to = f"{template_images_dir} -> {dest_images_dir}"
    logger.opt(colors=True).log(
        "INFO",
        "️<green><bold>Copying Images: {}</></>",
        from_to,
    )

    # Copy css folder
    css_dir_src = str(icedata_dir / "docs/css")
    css_dir_dest = str(str(dest_dir / "css"))
    copy_tree(css_dir_src, css_dir_dest)
    from_to = f"{css_dir_src} -> {css_dir_dest}"
    logger.opt(colors=True).log(
        "INFO",
        "️<green><bold>Copying CSS files: {}</></>",
        from_to,
    )

    # Copy js folder
    # copy_tree(str(icedata_dir / "docs/js"), str(dest_dir / "js"))
    js_dir_src = str(icedata_dir / "docs/js")
    js_dir_dest = str(str(dest_dir / "js"))
    copy_tree(js_dir_src, js_dir_dest)
    from_to = f"{js_dir_src} -> {js_dir_dest}"
    logger.opt(colors=True).log(
        "INFO",
        "️<green><bold>Copying JS files: {}</></>",
        from_to,
    )

    # Generate .md files form Jupyter Notebooks located in the /ipynb folder
    nb_to_md(dest_dir)

    ## generate - Datasets Navigation Items
    # Search for the beginning and the end of the installation procedure to hide in Docs to avoid duplication
    mkdocs_yml = (icedata_dir / "docs/mkdocs.yml").read_text()
    start = mkdocs_yml.find("- Datasets")
    end = mkdocs_yml.find("  - API Documentation")

    ds_nav = "- Datasets:\n"
    for directory in ds_dirlist:
        ds_nav = (
            ds_nav + f"    - {directory[:1].upper() + directory[1:]}: {directory}.md\n"
        )
    ds_nav + "\n"

    mkdocs_yml = mkdocs_yml.replace(mkdocs_yml[start:end], ds_nav)
    (icedata_dir / "docs/mkdocs.yml").write_text(mkdocs_yml, encoding="utf-8")

    logger.opt(colors=True).log(
        "INFO",
        "️<fg #FFC000><bold>\nCreating datasets navigation bar: \n{}</></>",
        ds_nav,
    )
Exemplo n.º 29
0
def test_colored_string_getitem(writer, colorize):
    logger.add(writer, colorize=colorize, format="<red>{message[0]}</red>")
    logger.opt(colors=True).info("ABC")
    assert writer.read() == parse("<red>A</red>\n", strip=not colorize)
Exemplo n.º 30
0
 def emit(self, record):
     logger.opt(depth=6,
                exception=record.exc_info).log(record.levelno,
                                               record.getMessage())