Exemplo n.º 1
0
def simulator_main():
    _main.configure_logger = configure_logger
    color.disable_colors()
    color.enable_colors = lambda: None

    patcher = mock.patch.object(_main.pybot, '__bases__',
                                (SingleServerIRCBot_mock, ))
    with patcher:
        patcher.is_local = True
        _main.main(debug_mode=True)
Exemplo n.º 2
0
    all_known_file_hashs = {
        processed_file.hash
        for m in meta_manager.meta_items
        for processed_file in m.processed_files.values()
    }
    unlinked_files = (f for f in meta_manager.processed_files_manager.scan if f.file_no_ext and f.file_no_ext not in all_known_file_hashs)

    count = 0
    for unlinked_file in unlinked_files:
        if kwargs.get('dryrun'):
            print(unlinked_file.relative)
        else:
            os.remove(unlinked_file.absolute)
        count += 1
    log.info('Cleaned up - {} files'.format(count))

# Main -------------------------------------------------------------------------

def additional_arguments(parser):
    parser.add_argument('--dryrun', action='store_true', help='', default=False)


if __name__ == "__main__":
    from _main import main
    main(
        'cleanup_media',
        cleanup_media,
        version=VERSION,
        additional_arguments_function=additional_arguments,
    )
Exemplo n.º 3
0
                and (f.file == filename or f.stats.st_mtime == mtime)  # TODO: Depricate this!
                and str(f.hash) == scan_data["hash"]
            ):
                log.warning(
                    "Associating found missing file %s to %s - this should not be a regular occurance, move/rename this so it is grouped effectivly",
                    f.relative,
                    m.name,
                )
                m.associate_file(f)
                break

        # 5c.)
        # We have done our best at locating missing files
        # Remove them from the tracked list of files.
        m.unlink_unassociated_files()

    log.info("6.) Remove unmatched meta entrys")
    for m in [m for m in meta.meta.values() if not m.file_collection]:
        log.info("Removing meta %s", m.name)
        meta.delete(m.name)

    # (If processed data already exisits, it will be relinked at the encode level)

    meta.save_all()


if __name__ == "__main__":
    from _main import main

    main("scan_media", scan_media, mtime_path="source", version=VERSION)
Exemplo n.º 4
0
def main():
    import _main
    _main.main(debug_mode=False)
Exemplo n.º 5
0
def main(*args):
    return _main.main(*args)
Exemplo n.º 6
0

# Main -------------------------------------------------------------------------


def additional_arguments(parser):
    parser.add_argument('--api_host',
                        action='store',
                        help='',
                        default='localhost:6543')
    parser.add_argument(
        '--stat_limit',
        type=int,
        help=
        'Max number of metanames to display in summary before replacing them with a count',
        default=100)


if __name__ == "__main__":
    from _main import main
    stats = main(
        'import_media',
        import_media,
        mtime_path='meta',
        additional_arguments_function=additional_arguments,
    )
    pprint({
        k: len(v) if len(v) > import_media.calling_kwargs['stat_limit'] else v
        for k, v in stats.items()
    })
Exemplo n.º 7
0
                m.associate_file(f)
                break

        # 5c.)
        # We have done our best at locating missing files
        # Remove them from the tracked list of files.
        m.unlink_unassociated_files()

    log.info('6.) Remove unmatched meta entrys')
    for m in [m for m in meta.meta.values() if not m.file_collection]:
        log.info('Removing meta %s', m.name)
        meta.delete(m.name)

    # (If processed data already exisits, it will be relinked at the encode level)

    meta.save_all()


# Main -------------------------------------------------------------------------

def additional_arguments(parser):
    parser.add_argument('--disable_meta_write_safety', action='store_true', help="To prevent multiple process's conflicting. We keep track of meta/*.json file mtimes. If these files are modified by another process, we defensively don't overwrite these changes. This option is require by windows docker volumes mounts as the files take time to propergate to the windows filesystem and this upsets defensive mtime protection", default=False)


if __name__ == "__main__":
    from _main import main
    main(
        'scan_media', scan_media, folder_type_to_derive_mtime='source', version=VERSION,
        additional_arguments_function=additional_arguments,
    )
Exemplo n.º 8
0
        target_file.copy(source_file)

        return target_file.exists


# Main -------------------------------------------------------------------------


def additional_arguments(parser):
    parser.add_argument(
        "--process_order_function", choices=PROCESS_ORDER_FUNCS.keys(), help="", default=DEFAULT_ORDER_FUNC
    )


def process_arguments(kwargs):
    kwargs["process_order_function"] = PROCESS_ORDER_FUNCS[kwargs["process_order_function"]]


if __name__ == "__main__":
    from _main import main

    main(
        "encode_media",
        encode_media,
        mtime_path="meta",
        version=VERSION,
        additional_arguments_function=additional_arguments,
        additional_arguments_processing_function=process_arguments,
    )
Exemplo n.º 9
0
        target_file.copy(source_file)

        return target_file.exists


# Main -------------------------------------------------------------------------


def additional_arguments(parser):
    parser.add_argument('--process_order_function',
                        choices=PROCESS_ORDER_FUNCS.keys(),
                        help='',
                        default=DEFAULT_ORDER_FUNC)


def process_arguments(kwargs):
    kwargs['process_order_function'] = PROCESS_ORDER_FUNCS[
        kwargs['process_order_function']]


if __name__ == "__main__":
    from _main import main
    main(
        'encode_media',
        encode_media,
        mtime_path='meta',
        version=VERSION,
        additional_arguments_function=additional_arguments,
        additional_arguments_processing_function=process_arguments,
    )
Exemplo n.º 10
0
            mtime = scan_data['mtime']
            for f in folder_structure.scan(
                    lambda f: not IGNORE_SEARCH_EXTS_REGEX.search(f.file) and
                (f.file == filename or f.stats.st_mtime == mtime
                 )  # TODO: Depricate this!
                    and str(f.hash) == scan_data['hash']):
                log.warning(
                    'Associating found missing file %s to %s - this should not be a regular occurance, move/rename this so it is grouped effectivly',
                    f.relative, m.name)
                m.associate_file(f)
                break

        # 5c.)
        # We have done our best at locating missing files
        # Remove them from the tracked list of files.
        m.unlink_unassociated_files()

    log.info('6.) Remove unmatched meta entrys')
    for m in [m for m in meta.meta.values() if not m.file_collection]:
        log.info('Removing meta %s', m.name)
        meta.delete(m.name)

    # (If processed data already exisits, it will be relinked at the encode level)

    meta.save_all()


if __name__ == "__main__":
    from _main import main
    main('scan_media', scan_media, mtime_path='source', version=VERSION)
Exemplo n.º 11
0
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from _main import main

one = main()
one.main2()
Exemplo n.º 12
0
Arquivo: main.py Projeto: xgabst/pybot
def main():
    import _main
    _main.main()
Exemplo n.º 13
0
def additional_arguments(parser):
    parser.add_argument('name_regex', default='', help='regex for names')
    parser.add_argument('--hidesource', action='store_true')
    parser.add_argument('--hideprocessed', action='store_true')
    parser.add_argument('--showmissing', action='store_true')
    #parser.add_argument('--raw', action='store_true')
    #parser.add_argument('--pathstyle', choices=('relative', 'absolute'), default='relative')


def _metaviewer(*args, **kwargs):
    metaviewer = MetaViewer(*args, **kwargs)
    file_details = metaviewer.get_meta_details(kwargs['name_regex'])
    if kwargs.get('showmissing'):
        file_details = {
            k: filter(lambda f: not f.exists(), v)
            for k, v in file_details.items()
        }
    print_formated(file_details)


if __name__ == "__main__":
    from _main import main
    main(
        'metaviewer',
        _metaviewer,
        version=VERSION,
        additional_arguments_function=additional_arguments,
        lock=False,
    )
Exemplo n.º 14
0
        track_ids_to_delete.append(unneeded_track_id)

    log.info("""{api_host} -> Add:{add_count} Delete:{delete_count}""".format(
        api_host=kwargs['api_host'],
        add_count=len(tracks_to_add),
        delete_count=len(track_ids_to_delete),
    ))  # TODO: replace with formatstring
    #track_api(tracks_to_add, method='POST')
    track_api(track_ids_to_delete, method='DELETE')

    stats['db_end'] = track_api()['data']['tracks'].values()

    #assert stats['db_end'] == stats['meta_hash_matched_db_hash'] | stats['meta_imported']  # TODO! Reinstate this
    return stats


# Main -------------------------------------------------------------------------

def additional_arguments(parser):
    parser.add_argument('--api_host', action='store', help='', default='')
    parser.add_argument('--stat_limit', type=int, help='Max number of metanames to display in summary before replacing them with a count', default=100)


if __name__ == "__main__":
    from _main import main
    stats = main(
        'import_media', import_media, folder_type_to_derive_mtime='meta',
        additional_arguments_function=additional_arguments,
    )
    pprint({k: len(v) if len(v) > import_media.calling_kwargs['stat_limit'] else v for k, v in stats.items()})
Exemplo n.º 15
0
def main():
    import _main
    _main.main()
Exemplo n.º 16
0
log = logging.getLogger(__name__)

VERSION = '0.0.0'

# Main -------------------------------------------------------------------------


def cleanup_media(**kwargs):
    meta_manager = MetaManagerExtended(**kwargs)
    meta_manager.load_all()

    all_known_file_hashs = {
        processed_file.hash
        for m in meta_manager.meta_items
        for processed_file in m.processed_files.values()
    }
    unlinked_files = (
        f for f in meta_manager.processed_files_manager.scan
        if f.file_no_ext and f.file_no_ext not in all_known_file_hashs)

    # Todo .. have dryrun and say how much this is cleaning up
    for unlinked_file in unlinked_files:
        os.remove(unlinked_file.absolute)


# Main -------------------------------------------------------------------------

if __name__ == "__main__":
    from _main import main
    main('cleanup_media', cleanup_media, version=VERSION)
Exemplo n.º 17
0
from _main import main  # uploaded as .mpy

import gc
gc.collect()

# dupterm(None)  # disable output/input on WebREPL
# dupterm(None, 1)  # disable REPL (v1.9.4)

exc = None
try:
    main()
except Exception as e:
    exc = e  # save exception that happend in my program using UART0

import machine
machine.UART(0, 115200, bits=8, parity=None,
             stop=1)  # so that it fits REPL again

if exc is not None:
    raise exc  # show the exception on REPL

# dupterm(UART0)  # enable WebREPL
# dupterm(UART0, 1)  # enable REPL (v1.9.4)
Exemplo n.º 18
0
                if tag:
                    track.tags.append(tag)
                elif tag_string:
                    log.warning('null tag %s', tag_string)

        for duplicate_tag in (tag for tag in track.tags if track.tags.count(tag) > 1):
            log.warning('Unneeded duplicate tag found %s in %s', duplicate_tag, track.source_filename)
            track.tags.remove(duplicate_tag)


# Main -------------------------------------------------------------------------

def additional_arguments(parser):
    parser.add_argument('--config_uri', action='store', help='', default='development.ini')
    parser.add_argument('--stat_limit', type=int, help='Max number of metanames to display in summary before replacing them with a count', default=100)


def _import_media(*args, **kwargs):
    from pyramid.paster import get_appsettings
    settings = get_appsettings(kwargs['config_uri'])
    init_DBSession(settings)
    return import_media(*args, **kwargs)

if __name__ == "__main__":
    from _main import main
    stats = main(
        'import_media', _import_media, mtime_path='meta',
        additional_arguments_function=additional_arguments,
    )
    pprint({k: len(v) if len(v) > _import_media.calling_kwargs['stat_limit'] else v for k, v in stats.items()})
Exemplo n.º 19
0
# Llama - list-oriented programming language
# (C) Acapla Studios

import sys
import _main

if __name__ == '__main__':
    _main.main(sys.argv)
Exemplo n.º 20
0
log = logging.getLogger(__name__)


VERSION = '0.0.0'


# Main -------------------------------------------------------------------------


def cleanup_media(**kwargs):
    meta_manager = MetaManagerExtended(**kwargs)
    meta_manager.load_all()

    all_known_file_hashs = {
        processed_file.hash
        for m in meta_manager.meta_items
        for processed_file in m.processed_files.values()
    }
    unlinked_files = (f for f in meta_manager.processed_files_manager.scan if f.file_no_ext and f.file_no_ext not in all_known_file_hashs)

    # Todo .. have dryrun and say how much this is cleaning up
    for unlinked_file in unlinked_files:
        os.remove(unlinked_file.absolute)


# Main -------------------------------------------------------------------------

if __name__ == "__main__":
    from _main import main
    main('cleanup_media', cleanup_media, version=VERSION)
Exemplo n.º 21
0
    args.attacker_list_semanticBackdoor = np.random.permutation(list(
        range(n)))[:m]

    m = args.n_attacker_labelFlipping
    args.attacker_list_labelFlipping = np.random.permutation(list(
        range(n)))[:m]

    m = args.n_attacker_labelFlippingDirectional
    args.attacker_list_labelFlippingDirectional = np.random.permutation(
        list(range(n)))[:m]

    m = args.n_attacker_omniscient
    args.attacker_list_omniscient = np.random.permutation(list(range(n)))[:m]

    if args.experiment_name == None:
        args.experiment_name = f"{args.loader_type}/{args.attacks}/{args.AR}"

    return args


if __name__ == "__main__":

    import _main

    args = parse_args()
    print("#" * 64)
    for i in vars(args):
        print(f"#{i:>40}: {str(getattr(args, i)):<20}#")
    print("#" * 64)
    _main.main(args)
Exemplo n.º 22
0
        for f in files:
            print_file(f)

# Main -------------------------------------------------------------------------


def additional_arguments(parser):
    parser.add_argument('name_regex', default='', help='regex for names')
    parser.add_argument('--hidesource', action='store_true')
    parser.add_argument('--hideprocessed', action='store_true')
    parser.add_argument('--showmissing', action='store_true')
    #parser.add_argument('--raw', action='store_true')
    #parser.add_argument('--pathstyle', choices=('relative', 'absolute'), default='relative')


def _metaviewer(*args, **kwargs):
    metaviewer = MetaViewer(*args, **kwargs)
    file_details = metaviewer.get_meta_details(kwargs['name_regex'])
    if kwargs.get('showmissing'):
        file_details = {k: filter(lambda f: not f.exists(), v) for k, v in file_details.items()}
    print_formated(file_details)


if __name__ == "__main__":
    from _main import main
    main(
        'metaviewer', _metaviewer, version=VERSION,
        additional_arguments_function=additional_arguments,
        lock=False,
    )
Exemplo n.º 23
0
def main(*args):
  return _main.main(*args)