Example #1
0
    def download_images(self, services, registry, tag=None, nopull=False):
        """Download images from a remote registry and save to kard

        Args:
          * services: the name of the images to download
          * registry: a DockerRegistry instance
          * tag: the tag of the version to download
        """
        tag = tag or self.kard.meta['tag']

        save_path = Path(self.kard.path) / 'images'
        write('Cleaning images destination {}'.format(save_path))
        save_path.mkdir(exist_ok=True)
        for child in save_path.iterdir():
            child.unlink()

        if not nopull:
            self.pull_images(services, registry, tag=tag)

        for service in services:
            image_path = save_path / "{}.tar".format(service)
            image_name = self.make_image_name(service, tag)
            write('Saving {} to {}'.format(image_name, image_path))
            sys.stdout.flush()

            with open(image_path, 'wb') as f:
                for chunk in self.docker.get_image(image_name):
                    f.write(chunk)

            write(' Done !' + '\n')
        write('All images has been saved successfully !' + '\n')
Example #2
0
class TermiusApp(App):
    """Class for CLI application."""
    def __init__(self):
        """Construct new CLI application."""
        super(TermiusApp, self).__init__(
            description='Termius app',
            version=__version__,
            command_manager=CommandManager('termius.handlers'),
        )
        self.configure_signals()
        self.directory_path = Path(expanduser('~/.{}/'.format(self.NAME)))
        if not self.directory_path.is_dir():
            self.directory_path.mkdir(parents=True)

    def configure_logging(self):
        """Change logging level for request package."""
        super(TermiusApp, self).configure_logging()
        logging.getLogger('requests').setLevel(logging.WARNING)
        return

    # pylint: disable=no-self-use
    def configure_signals(self):
        """Bind subscribers to signals."""
        post_create_instance.connect(store_ssh_key, sender=SshKey)
        post_update_instance.connect(store_ssh_key, sender=SshKey)
        post_delete_instance.connect(delete_ssh_key, sender=SshKey)

        post_logout.connect(clean_data)
def test_backup_site_to_tarfile_vanishing_files(site, tmp_path, monkeypatch):
    test_dir = Path(site.dir) / "xyz"
    test_file = test_dir / "test_file"
    test_dir.mkdir(parents=True, exist_ok=True)  # pylint: disable=no-member
    test_file.touch()  # pylint: disable=no-member

    orig_add = omdlib.backup.BackupTarFile.add

    def add(self,
            name,
            arcname=None,
            recursive=True,
            exclude=None,
            filter=None):  # pylint: disable=redefined-builtin
        # The add() was called for test_dir which then calls os.listdir() and
        # add() for all found entries. Remove the test_file here to simulate
        # a vanished file during this step.
        if arcname == "unit/xyz/test_file":
            test_file.unlink()  # pylint: disable=no-member
        orig_add(self, name, arcname, recursive, exclude, filter)

    monkeypatch.setattr(omdlib.backup.BackupTarFile, "add", add)

    tar_path = tmp_path / "backup.tar"
    with tar_path.open("wb") as backup_tar:
        omdlib.backup.backup_site_to_tarfile(site,
                                             backup_tar,
                                             mode="w:",
                                             options={},
                                             verbose=False)

    with tar_path.open("rb") as backup_tar:
        tar = tarfile.open(fileobj=backup_tar, mode="r:*")
        _sitename, _version = omdlib.backup.get_site_and_version_from_backup(
            tar)
Example #4
0
def get_new_path(path):
    """ Return a path to a file, creat its parent folder if it doesn't exist, creat new one if existing.

    If the folder/file already exists, this function will use `path`_`idx` as new name, and make
    corresponding folder if `path` is a folder.
    idx will starts from 1 and keeps +1 until find a valid name without occupation.

    If the folder and its parent folders don't exist, keeps making these series of folders.

    Args:
        path: The path of a file/folder.
    Returns:
        _ : The guaranteed new path of the folder/file.
    """
    path = Path(path)
    root = Path(*path.parts[:-1])

    if not root.exists():
        root.mkdir(parents=True, exist_ok=True)

    if not path.exists():
        new_path = path
        if new_path.suffix == '':
            new_path.mkdir()
    else:
        idx = 1
        while True:
            stem = path.stem + "_" + str(idx)
            new_path = root / (stem + path.suffix)
            if not new_path.exists():
                if new_path.suffix == '':
                    new_path.mkdir()
                break
            idx += 1
    return str(new_path)
Example #5
0
 def prepare_batch(self, sequential_items, *args, **kwargs):
     target_dir = Path(settings.HEV_E["general"]["downloads_dir"])
     if not target_dir.is_dir():
         target_dir.mkdir(parents=True)
     result = {
         "target_dir": str(target_dir),
     }
     pre_gen_dir = Path(
         settings.HEV_E["general"]["pre_generated_files_dir"])
     for type_name in DatasetType.__members__.keys():
         items = [
             i for i in sequential_items
             if i["identifier"].partition(":")[0] == type_name
         ]
         name_hash = prepare_collection_type_batch(items)
         file_name = "{}.gpkg".format(name_hash)
         pre_gen_path = pre_gen_dir / file_name
         cached_path = target_dir / file_name
         target = pre_gen_path if pre_gen_path.exists() else cached_path
         result[type_name] = {
             "name_hash": name_hash,
             "geopackage_target_path": str(target),
             "geopackage_exists": target.is_file()
         }
     return result
Example #6
0
    def create_config(self, remote_repo=None):

        if not self.lk_config_dir_path_object.exists():
            Path.mkdir(self.lk_config_dir_path_object, parents=True, exist_ok=True)

        if self.lk_config_file_path_object.exists():

            print('# Config file exists at: {config_file_path}'.format(config_file_path=self.lk_config_file_path_string))

            if app_confirm('Do you want to override?'):
                pass
            else:
                click.Abort()

        if remote_repo is None:
            remote_repo_value = app_config.DEFAULT_COMMANDS_REPO
        else:
            remote_repo_value = remote_repo

        config_odict = odict([
            ('default_commands_repo', remote_repo_value)
        ])

        config_yaml_string = yaml.dump(config_odict)

        self.lk_config_file_path_object.write_text(config_yaml_string.decode('utf-8'))
Example #7
0
def _add_local_translation(lang, alias, texts):
    local_dir = Path(cmk.utils.paths.local_locale_dir) / lang / "LC_MESSAGES"
    local_dir.mkdir(parents=True)  # pylint: disable=no-member
    po_file = local_dir / "multisite.po"
    mo_file = local_dir / "multisite.mo"

    with (local_dir.parent / "alias").open("w", encoding="utf-8") as f:  # pylint: disable=no-member
        f.write(u"%s\n" % alias)

    with po_file.open(mode="w", encoding="utf-8") as f:  # pylint: disable=no-member
        f.write(u'''
msgid ""
msgstr ""
"Project-Id-Version: Locally modified Check_MK translation\\n"
"Report-Msgid-Bugs-To: [email protected]\\n"
"Language-Team: none\\n"
"Language: de\\n"
"MIME-Version: 1.0\\n"
"Content-Type: text/plain; charset=UTF-8\\n"
"Content-Transfer-Encoding: 8bit\\n"
"Plural-Forms: nplurals=2; plural=(n != 1);\\n"
''')

        for key, val in texts.items():
            f.write(u"""
msgid "%s"
msgstr "%s"
""" % (key, val))

    subprocess.call(['msgfmt', str(po_file), '-o', str(mo_file)])
Example #8
0
def datadir(original_datadir, tmpdir):
    result = Path(str(tmpdir.join(original_datadir.stem)))
    if original_datadir.is_dir():
        shutil.copytree(str(original_datadir), str(result))
    else:
        result.mkdir()
    return result
Example #9
0
def run_lithium(lithArgs, logPrefix, targetTime):  # pylint: disable=invalid-name,missing-param-doc,missing-return-doc
    # pylint: disable=missing-return-type-doc,missing-type-doc
    """Run Lithium as a subprocess: reduce to the smallest file that has at least the same unhappiness level.

    Returns a tuple of (lithlogfn, LITH_*, details).
    """
    deletableLithTemp = None  # pylint: disable=invalid-name
    if targetTime:
        # FIXME: this could be based on whether bot has a remoteHost  # pylint: disable=fixme
        # loop is being used by bot
        deletableLithTemp = tempfile.mkdtemp(prefix="fuzzbot-lithium")  # pylint: disable=invalid-name
        lithArgs = ["--maxruntime=" + str(targetTime), "--tempdir=" + deletableLithTemp] + lithArgs
    else:
        # loop is being run standalone
        lithtmp = logPrefix.parent / (logPrefix.stem + "-lith-tmp")
        Path.mkdir(lithtmp)
        lithArgs = ["--tempdir=" + str(lithtmp)] + lithArgs
    lithlogfn = (logPrefix.parent / (logPrefix.stem + "-lith-out")).with_suffix(".txt")
    print("Preparing to run Lithium, log file %s" % lithlogfn)
    print(" ".join(quote(str(x)) for x in runlithiumpy + lithArgs))
    with io.open(str(lithlogfn), "w", encoding="utf-8", errors="replace") as f:
        subprocess.run(runlithiumpy + lithArgs, stderr=subprocess.STDOUT, stdout=f)
    print("Done running Lithium")
    if deletableLithTemp:
        shutil.rmtree(deletableLithTemp)
    r = readLithiumResult(lithlogfn)  # pylint: disable=invalid-name
    subprocess.run(["gzip", "-f", str(lithlogfn)], check=True)
    return r
Example #10
0
def configure_logger(name, run_type='training'):
    # Create log directory
    log_dir = Path(os.path.dirname(log_file))
    log_dir.mkdir(parents=True, exist_ok=True)

    # Configure logging formatter
    formatter = logging.Formatter(
        '%(asctime)s - %(name)s - %(levelname)s - %(message)s')

    # Configure file handler
    file_handler = logging.FileHandler(
        log_file.format(run_type, int(time.time())))
    file_handler.setLevel(get_logging_level())
    file_handler.setFormatter(formatter)

    # Configure console handler
    console_handler = logging.StreamHandler(sys.stdout)
    console_handler.setLevel(get_logging_level())
    console_handler.setFormatter(formatter)

    # Configure logger
    logger = logging.getLogger(name)
    logger.setLevel(get_logging_level())
    logger.addHandler(file_handler)
    logger.addHandler(console_handler)

    return logger
def new_page():
    from string import Template     # Use Python templates, not Mako templates

    slug = raw_input('Slug for page: ')
    title = raw_input('Title of page: ')
    template = raw_input('Template to inherit from (default is example.html): ')

    new_dir = Path('site') / slug
    if new_dir.exists():
        print '\nDirectory %s already exists, aborting' % new_dir
        return
    new_dir.mkdir()

    html_file = new_dir / 'index.html'
    with html_file.open('w') as fp:
        fp.write(Template(NEW_PAGE_HTML_TEMPLATE).substitute(
            title=repr(title.strip()), template=template.strip() or 'example.html'))

    js_file = new_dir / 'app.es6'
    with js_file.open('w') as fp:
        class_name = ''.join(s.capitalize() for s in title.split(' '))
        fp.write(Template(NEW_PAGE_JS_TEMPLATE).substitute(
            title=title, class_name=class_name))

    marker = '// This comment marks where new entry points will be added'
    new_entry = "'%s': './site/%s/app.es6'," % (slug, slug)
    code = open('webpack.config.js').read()
    with open('webpack.config.js', 'w') as fp:
        fp.write(code.replace(marker, new_entry + '\n    ' + marker))
Example #12
0
 def get_temp_dir(self):
     testcase_name = get_testcase_name(self.testcase_context)
     testcase_subdir = "{}_{}".format(self.__get_current_date(), testcase_name)
     temp_dir = Path("/tmp", testcase_subdir)
     temp_dir.mkdir()
     self.__registered_dirs.append(temp_dir)
     return temp_dir
Example #13
0
def reproduce_fs_tree(target_dir: pathlib2.Path, root: FileNode) -> None:
    """
    Given a tree, recreate the tree structure in the file system
    :param target_dir: directory the structure is going to be reproduced in
    :param root: tree root node
    :return: None
    """
    target_dir.mkdir(parents=True, exist_ok=True)
    if root and root.get_filename():
        target_dir /= root.get_filename()

    def traverse(node: FileNode):
        if node.get_root() is None or node.get_relative_path() is None:
            raise ValueError("To reproduce given tree in file system, every node much have relative path,"
                             "since relative path is calculated with root path, FileNode has to be initialized"
                             "with root path. both root and relative path must be available")
        path = target_dir / node.get_relative_path()
        if node.is_dir():
            path.mkdir(parents=True, exist_ok=True)
        elif node.is_file():
            path.touch(exist_ok=True)
        if node.get_children():
            for child_node in node.get_children():
                traverse(child_node)

    traverse(root)
Example #14
0
def dagian_run_with_configs(global_config, bundle_config, dag_output_path=None,
                            no_bundle=False):
    """Generate feature with configurations.

    global_config (Mapping): global configuration
        generator_class: string
        data_bundles_dir: string
        generator_kwargs: Mapping

    bundle_config (Mapping): bundle configuration
        name: string
        structure: Mapping
    """
    if not isinstance(global_config, Mapping):
        raise ValueError("global_config should be a Mapping object.")
    if not isinstance(bundle_config, Mapping):
        raise ValueError("bundle_config should be a Mapping object.")
    data_generator = get_data_generator_from_config(global_config)
    data_definitions = get_data_definitions_from_structure(bundle_config['structure'])
    data_generator.generate(data_definitions, dag_output_path)

    if not no_bundle:
        data_bundles_dir = Path(global_config['data_bundles_dir']).expanduser()
        data_bundles_dir.mkdir(parents=True, exist_ok=True)
        bundle_path = data_bundles_dir / (bundle_config['name'] + '.h5')
        data_generator.bundle(
            bundle_config['structure'], data_bundle_hdf_path=bundle_path,
            structure_config=bundle_config['structure_config'])
Example #15
0
class PickleDataHandler(DataHandler):

    def __init__(self, pickle_dir):
        self.pickle_dir = Path(pickle_dir)
        self.pickle_dir.mkdir(parents=True, exist_ok=True)

    def can_skip(self, data_definition):
        data_path = self.pickle_dir / (data_definition.to_json() + ".pkl")
        if data_path.exists():
            return True
        return False

    def get(self, data_definition):
        if isinstance(data_definition, DataDefinition):
            with (self.pickle_dir / (data_definition.to_json() + ".pkl")).open('rb') as fp:
                return cPickle.load(fp)
        data = {}
        for data_def in data_definition:
            with (self.pickle_dir / (data_def.to_json() + ".pkl")).open('rb') as fp:
                data[data_def] = cPickle.load(fp)
        return data

    def write_data(self, data_definition, data):
        pickle_path = self.pickle_dir / (data_definition.to_json() + ".pkl")
        with SimpleTimer("Writing generated data %s to pickle file" % data_definition,
                         end_in_new_line=False), \
                pickle_path.open('wb') as fp:
            cPickle.dump(data, fp, protocol=cPickle.HIGHEST_PROTOCOL)
def create_logger(cfg, phase='train'):
    root_output_dir = Path(cfg.OUTPUT_DIR)
    # set up logger
    if not root_output_dir.exists():
        print('=> creating root output dir {}'.format(root_output_dir))
        root_output_dir.mkdir()

    model, _ = get_model_name(cfg)
    cfg_name = 'vdn_model'

    final_output_dir = root_output_dir

    print('=> creating {}'.format(final_output_dir))
    final_output_dir.mkdir(parents=True, exist_ok=True)

    time_str = time.strftime('%Y-%m-%d-%H-%M')
    log_file = '{}_{}_{}.log'.format(cfg_name, time_str, phase)
    final_log_file = final_output_dir / log_file
    head = '%(asctime)-15s %(message)s'

    logging.basicConfig(filename=str(final_log_file), format=head)
    logger = logging.getLogger()
    logger.setLevel(logging.INFO)
    console = logging.StreamHandler()
    logging.getLogger('').addHandler(console)

    tensorboard_log_dir = Path(cfg.LOG_DIR) / (''.join(
        (cfg_name, '_', time_str)))
    print('=> creating tensor board log dir {}'.format(tensorboard_log_dir))
    tensorboard_log_dir.mkdir(parents=True, exist_ok=True)

    return logger, str(final_output_dir), str(tensorboard_log_dir)
Example #17
0
    def main(self):
        user_input = self.parse_args()
        title, search_for_anime = self.parse_title(user_input.title)

        if search_for_anime:
            anime = self.choose_anime(title)
        else:
            anime = self.client.get_anime_by('slug_name', title)

        if not anime:
            print('Error: It looks like the title you entered is not found.')
            exit(1)

        episode_range = user_input.range
        if not episode_range:
            episode_range = self.choose_range(anime)

        if not episode_range:
            first_episode, last_episode = anime.first_episode.number, anime.last_episode.number
        else:
            first_episode, last_episode = self.validate_range(episode_range, anime)

        sources = self.get_sources_from_range(first_episode, last_episode, anime)

        if not user_input.directory:
            path = self.get_path(anime)
        else:
            path = Path(user_input.directory)
            path.mkdir(parents=True, exist_ok=True)

        self.download_files(path, sources)
Example #18
0
def test_pre_16_cfg(monkeypatch):
    multisite_dir = Path(cmk.gui.watolib.utils.multisite_dir())
    multisite_dir.mkdir(parents=True, exist_ok=True)
    hosttags_mk = multisite_dir / "hosttags.mk"

    with hosttags_mk.open("w", encoding="utf-8") as f:  # pylint: disable=no-member
        f.write(u"""# Created by WATO
# encoding: utf-8

wato_host_tags += [
    ('criticality', u'Criticality', [
        ('prod', u'Productive system', ['bla']),
        ('critical', u'Business critical', []),
        ('test', u'Test system', []),
        ('offline', u'Do not monitor this host', [])]),
    ('networking', u'Networking Segment', [
        ('lan', u'Local network (low latency)', []),
        ('wan', u'WAN (high latency)', []),
        ('dmz', u'DMZ (low latency, secure access)', []),
    ]),
]

wato_aux_tags += [("bla", u"bläää")]
""")

    cfg = tags.TagConfig()
    cfg.parse_config(TagConfigFile().load_for_reading())

    yield cfg

    if hosttags_mk.exists():  # pylint: disable=no-member
        hosttags_mk.unlink()  # pylint: disable=no-member
Example #19
0
def new_page():
    from string import Template  # Use Python templates, not Mako templates

    slug = raw_input('Slug for page: ')
    title = raw_input('Title of page: ')
    template = raw_input(
        'Template to inherit from (default is example.html): ')

    new_dir = Path('site') / slug
    if new_dir.exists():
        print '\nDirectory %s already exists, aborting' % new_dir
        return
    new_dir.mkdir()

    html_file = new_dir / 'index.html'
    with html_file.open('w') as fp:
        fp.write(
            Template(NEW_PAGE_HTML_TEMPLATE).substitute(
                title=repr(title.strip()),
                template=template.strip() or 'example.html'))

    js_file = new_dir / 'app.es6'
    with js_file.open('w') as fp:
        class_name = ''.join(s.capitalize() for s in title.split(' '))
        fp.write(
            Template(NEW_PAGE_JS_TEMPLATE).substitute(title=title,
                                                      class_name=class_name))

    marker = '// This comment marks where new entry points will be added'
    new_entry = "'%s': './site/%s/app.es6'," % (slug, slug)
    code = open('webpack.config.js').read()
    with open('webpack.config.js', 'w') as fp:
        fp.write(code.replace(marker, new_entry + '\n    ' + marker))
class TermiusApp(App):
    """Class for CLI application."""

    def __init__(self):
        """Construct new CLI application."""
        super(TermiusApp, self).__init__(
            description='Termius app',
            version=__version__,
            command_manager=CommandManager('termius.handlers'),
        )
        self.configure_signals()
        self.directory_path = Path(expanduser('~/.{}/'.format(self.NAME)))
        if not self.directory_path.is_dir():
            self.directory_path.mkdir(parents=True)

    def configure_logging(self):
        """Change logging level for request package."""
        super(TermiusApp, self).configure_logging()
        logging.getLogger('requests').setLevel(logging.WARNING)
        return

    # pylint: disable=no-self-use
    def configure_signals(self):
        """Bind subscribers to signals."""
        post_create_instance.connect(store_ssh_key, sender=SshKey)
        post_update_instance.connect(store_ssh_key, sender=SshKey)
        post_delete_instance.connect(delete_ssh_key, sender=SshKey)

        post_logout.connect(clean_data)
Example #21
0
    def __init__(self, positive_set, negative_set, extension, out_path):
        """
        Initialize dataframes and weights used for training weak classifiers in a monolithic or cascade classifier

        NOTE: algorithm geared for training images with same dimensions and file type

        :param positive_set: path to directory with training positive data      (str)
        :param negative_set: path to directory with training negative data      (str)
        :param extension: extension of training files - homogeneous             (str)
        :param out_path: directory to save associated files                     (str)
        """
        # Check inputted paths for images of provided extension
        [positives, negatives] = [
            sorted(Path(data_set).glob("**/*." + extension))
            for data_set in [positive_set, negative_set]
        ]
        n_positives, n_negatives = len(positives), len(negatives)
        if n_positives == 0 or n_negatives == 0:
            raise Exception("Empty positive/negative data set: positives: " +
                            str(n_positives) + " negatives: " +
                            str(n_negatives))
        print("Positive set:", n_positives, "\nNegative set:", n_negatives)

        # Calculate initial weights
        [self.face_weights, self.non_face_weights
         ] = self.calculate_weights(n_positives, n_negatives)

        # Set paths for reading/saving dataframes
        outpath = Path(out_path)
        feature_path = outpath / "features_df.pkl"  # all possible features for a given training image (row = feature)
        training_path = outpath / "training_df.pkl"  # sorted feature sums for all training images (column = feature)
        type_path = outpath / "type_df.pkl"  # booleans - (face or nonface) corresponding to the sorted sums
        image_path = outpath / "image_df.pkl"  # image number corresponding to the sorted sums
        if not outpath.exists():
            outpath.mkdir()

        # Read saved data if it exists
        if False not in [
                p.exists()
                for p in [feature_path, training_path, type_path, image_path]
        ]:
            self.features = pd.read_pickle(feature_path)
            self.training_df = pd.read_pickle(training_path)
            self.type_df = pd.read_pickle(type_path)
            self.image_df = pd.read_pickle(image_path)
            print("Loaded saved training data")
        else:
            # Calculate integral images and possible features
            [intg_p,
             intg_n] = self.gather_integral_images(positives, negatives)
            print("Integral images calculated")
            training_data = self.calculate_possible_features(intg_p, intg_n)
            print("Haar-like feature sums calculated")
            self.features, self.training_df, self.type_df, self.image_df = training_data

            # Save possible features and corresponding sum dataframes to pickle
            self.features.to_pickle(str(feature_path))
            self.training_df.to_pickle(str(training_path))
            self.type_df.to_pickle(str(type_path))
            self.image_df.to_pickle((str(image_path)))
Example #22
0
    def execute_babi_parser(self):
        self.logger.info(f'Execute BABI Parser.')
        babi_input_info = list()
        with open(self.babi_file_path, 'r') as babi_file:
            narrative = dict()
            question_count = 0
            for line in babi_file:
                number, sentence = self._get_number(line)
                if number == 1:
                    narrative = dict()
                    question_count = 0

                number = (number - question_count) - 1

                if self._is_question(sentence):
                    babi_input = self._create_babi_input(sentence)
                    babi_input.set_narrative(narrative.copy())
                    babi_input.set_question_time_point(number)
                    babi_input_info.append(babi_input)
                    question_count += 1
                else:
                    narrative[number] = sentence

        filename = self.babi_file_path.name
        output_path = Path(FilePaths.BABI_PARSER_OUTPUT_DIR, filename)

        with open(output_path, 'w') as output_file:
            output = str()
            for count, info in enumerate(babi_input_info):
                output += f'#{count}\n{info.__str__()}\n'
            output_file.write(output)

        # Write 200 training narrative text files to TestFiles/BABI_Tests/Train/

        if self.babi_file_path.name[3].isdigit():
            dir_name = self.babi_file_path.name[0:4]
        else:
            dir_name = self.babi_file_path.name[0:3]
        output_path = Path(FilePaths.TEST_FILE_DIR, 'BABI_Tests', 'Train',
                           dir_name.upper())

        if not output_path.is_dir():
            output_path.mkdir()

        for count, babi_info in enumerate(babi_input_info):
            output_file_path = output_path / f'{self.babi_file_path.stem}_{count}.txt'
            with open(output_file_path, 'w') as output_file:
                output_file.write(babi_info.get_narrative_str())
            if count >= 1999:
                break

        output_file_path = output_path / f'{self.babi_file_path.stem}_questions.txt'
        with open(output_file_path, 'w') as output_file:
            for count, babi_info in enumerate(babi_input_info):
                output_file.write(f'{count} : {babi_info.question} '
                                  f': {", ".join(babi_info.answers)} '
                                  f': {babi_info.question_time_point}\n')
                if count >= 1999:
                    break
Example #23
0
def main(args):
    sys.path.append(str(Path(__file__).parent))

    checkpoint_path = Path(args.checkpoint_dir)
    checkpoint_path.mkdir(exist_ok=True)

    logger = utils.setup_logger(__name__, os.path.join(args.checkpoint_dir, 'train.log'))

    utils.read_config_file(args.config)
    utils.config.update(args.__dict__)
#    logger.debug('Running with config %s', utils.config)

 #   configure(os.path.join('runs', args.expname))

    word2vec = None

    if not args.infer:
        dataset_path = utils.config['choidataset']
        train_dataset = ChoiDataset(dataset_path, word2vec)
        dev_dataset = ChoiDataset(dataset_path, word2vec)
        test_dataset = ChoiDataset(dataset_path, word2vec)

        train_dl = DataLoader(train_dataset, batch_size=args.bs, collate_fn=collate_fn, shuffle=True,
                              num_workers=args.num_workers)
        dev_dl = DataLoader(dev_dataset, batch_size=args.test_bs, collate_fn=collate_fn, shuffle=False,
                            num_workers=args.num_workers)
        test_dl = DataLoader(test_dataset, batch_size=args.test_bs, collate_fn=collate_fn, shuffle=False,
                             num_workers=args.num_workers)

    assert bool(args.model) ^ bool(args.load_from)  # exactly one of them must be set

    if args.model:
        model = import_model(args.model)
    elif args.load_from:
        with open(args.load_from, 'rb') as f:
            model = torch.load(f)

    model.train()
    model = maybe_cuda(model)

    optimizer = torch.optim.Adam(model.parameters(), lr=1e-3)
    if not args.infer:
        best_val_pk = 1.0
        for j in range(args.epochs):
            train(model, args, j, train_dl, logger, optimizer)
            with (checkpoint_path / 'model{:03d}.t7'.format(j)).open('wb') as f:
                torch.save(model, f)

            val_pk, threshold = validate(model, args, j, dev_dl, logger)
            if val_pk < best_val_pk:
                test_pk = test(model, args, j, test_dl, logger, threshold)
                logger.debug(
                    colored(
                        'Current best model from epoch {} with p_k {} and threshold {}'.format(j, test_pk, threshold),
                        'green'))
                best_val_pk = val_pk
                with (checkpoint_path / 'Meilleur_model.t7'.format(j)).open('wb') as f:
                    torch.save(model, f)
Example #24
0
    def export(self, name, file_path=None, dir_path=None):
        """
        Provides access to the persisted file corresponding to an entity.

        Can be called in three ways:

        .. code-block:: python

            # Returns a path to the persisted file.
            export(name)

            # Copies the persisted file to the specified file path.
            export(name, file_path=path)

            # Copies the persisted file to the specified directory.
            export(name, dir_path=path)

        The entity must be persisted and have only one instance. The dir_path
        and file_path options support paths on GCS, specified like:
        gs://mybucket/subdir/
        """

        result_group = self._deriver.derive(name)
        if len(result_group) != 1:
            raise ValueError(
                "Can only export an entity if it has a single value; "
                "entity %r has %d values" % (name, len(result_group)))

        result, = result_group

        if result.file_path is None:
            raise ValueError("Entity %r is not locally persisted" % name)
        src_file_path = result.file_path

        if dir_path is None and file_path is None:
            return src_file_path

        check_exactly_one_present(dir_path=dir_path, file_path=file_path)

        if dir_path is not None:
            dst_dir_path = Path(dir_path)
            filename = name + src_file_path.suffix
            dst_file_path = dst_dir_path / filename
        else:
            dst_file_path = Path(file_path)
            dst_dir_path = dst_file_path.parent

        if not dst_dir_path.exists() and 'gs:/' not in str(dst_dir_path):
            dst_dir_path.mkdir(parents=True)

        dst_file_path_str = str(dst_file_path)

        if dst_file_path_str.startswith('gs:/'):
            # The path object combines // into /, so we revert it here
            copy_to_gcs(str(src_file_path),
                        dst_file_path_str.replace('gs:/', 'gs://'))
        else:
            shutil.copyfile(str(src_file_path), dst_file_path_str)
Example #25
0
 def create_diff_folder():
     """
     :return: path of diff files
     :rtype: Path
     """
     diff_folder = Path(os.getcwd()).joinpath('./diffs/')
     diff_folder = diff_folder.joinpath(str(uuid.uuid4()).split('-')[-1])
     if not diff_folder.exists():
         diff_folder.mkdir(parents=True)
     return diff_folder
Example #26
0
def save_stat_page(name, pred_stat, pred_img, save_path):
    name = Path(name).stem
    save_path = Path(save_path)
    if not save_path.exists():
        save_path.mkdir(parents=True)
    stat_path = save_path / ('%s.json' % name)
    img_path = save_path / ('%s.png' % name)
    json.dump(pred_stat, stat_path.open('wb'))
    cv2.imwrite(str(img_path), pred_img)
    return
Example #27
0
 def getFullSavePath(self, serialize):
     if self.saveFolder == None:
         self.saveFolder = '/%s_%s_%s/' % (
             self.agentType, self.envType,
             time.strftime('%d-%m-%Y-%H-%M-%S'))
         self.savePath = self.savePath + self.saveFolder
     path = Path(self.savePath)
     if serialize:
         path.mkdir(parents=True, exist_ok=True)
     return self.getAbsPath(self.savePath)
Example #28
0
def datadir(original_datadir, tmpdir):
    # Method from: https://github.com/gabrielcnr/pytest-datadir
    # License: MIT
    import shutil
    result = Path(str(tmpdir.join(original_datadir.stem)))
    if original_datadir.is_dir():
        shutil.copytree(str(original_datadir), str(result))
    else:
        result.mkdir()
    return result
def datadir(original_datadir, tmpdir):
    # Method from: https://github.com/gabrielcnr/pytest-datadir
    # License: MIT
    import shutil
    result = Path(str(tmpdir.join(original_datadir.stem)))
    if original_datadir.is_dir():
        shutil.copytree(str(original_datadir), str(result))
    else:
        result.mkdir()
    return result
Example #30
0
def punic_cli(context, echo, verbose, timing, color):
    ### TODO: Clean this up!

    # Configure click
    context.token_normalize_func = lambda x: x if not x else x.lower()

    # Configure logging
    level = logging.DEBUG if verbose else logging.INFO

    logger = logging.getLogger()
    logger.setLevel(logging.DEBUG)

    formatter = HTMLFormatter()

    # create console handler and set level to debug
    stream_handler = logging.StreamHandler()
    stream_handler.setLevel(level)
    stream_handler.setFormatter(formatter)
    # add ch to logger
    logger.addHandler(stream_handler)

    logs_path = Path(
        '~/Library/Application Support/io.schwa.Punic/Logs').expanduser()
    if not logs_path.exists():
        logs_path.mkdir(parents=True)

    log_path = logs_path / "punic.log"
    needs_rollover = log_path.exists()

    file_handler = logging.handlers.RotatingFileHandler(str(log_path),
                                                        backupCount=4)
    if needs_rollover:
        file_handler.doRollover()
    file_handler.setLevel(logging.DEBUG)
    file_handler.setFormatter(
        HTMLStripperFormatter(
            logging.Formatter(
                "%(asctime)s - %(name)s - %(levelname)s - %(message)s")))
    logger.addHandler(file_handler)

    for name in ['boto', 'requests.packages.urllib3']:
        named_logger = logging.getLogger(name)
        named_logger.setLevel(logging.WARNING)
        named_logger.propagate = True

    formatter.color = color
    logger.color = color
    runner.echo = echo

    # Set up punic
    punic = Punic()
    punic.config.log_timings = timing
    context.obj = punic
    punic.config.verbose = verbose
    punic.config.echo = verbose
Example #31
0
def build_data_repo_structure(args):
    root = Path(args.root)
    if not root.exists():
        root.mkdir()
    data_path = root / args.name
    if not data_path.exists():
        data_path.mkdir()
    temp = Path(os.getcwd(), "temp")
    if not temp.exists():
        temp.mkdir()
    return data_path
Example #32
0
def get_folder(path):
    """ Return a path to a folder, creating it if it doesn't exist 
    Args:
        path: The path of the new folder.
    Returns:
        _ : The guaranteed path of the folder/file.
    """
    logger.debug(f"Requested path: {path}")
    path = Path(path)
    path.mkdir(parents=True, exist_ok=True)
    return path
Example #33
0
    def _report_image_plot_and_upload(
            self,
            title,  # type: str
            series,  # type: str
            iteration,  # type: int
            path=None,  # type: Optional[str]
            matrix=None,  # type: Optional[np.ndarray]
            max_image_history=None,  # type: Optional[int]
            delete_after_upload=False  # type: bool
    ):
        """
        Report an image, upload its contents, and present in plots section using plotly

        Image is uploaded to a preconfigured bucket (see :meth:`Logger.setup_upload`) with a key (filename)
        describing the task ID, title, series and iteration.

        :param title: Title (AKA metric)
        :type title: str
        :param series: Series (AKA variant)
        :type series: str
        :param iteration: Iteration number
        :type iteration: int
        :param path: A path to an image file. Required unless matrix is provided.
        :type path: str
        :param matrix: A 3D numpy.ndarray object containing image data (RGB). Required unless filename is provided.
        :type matrix: str
        :param max_image_history: maximum number of image to store per metric/variant combination \
        use negative value for unlimited. default is set in global configuration (default=5)
        :type max_image_history: int
        :param delete_after_upload: if True, one the file was uploaded the local copy will be deleted
        :type delete_after_upload: boolean
        """

        # if task was not started, we have to start it
        self._start_task_if_needed()
        upload_uri = self.get_default_upload_destination()
        if not upload_uri:
            upload_uri = Path(get_cache_dir()) / 'debug_images'
            upload_uri.mkdir(parents=True, exist_ok=True)
            # Verify that we can upload to this destination
            upload_uri = str(upload_uri)
            storage = StorageHelper.get(upload_uri)
            upload_uri = storage.verify_upload(folder_uri=upload_uri)

        self._task.reporter.report_image_plot_and_upload(
            title=title,
            series=series,
            path=path,
            matrix=matrix,
            iter=iteration,
            upload_uri=upload_uri,
            max_image_history=max_image_history,
            delete_after_upload=delete_after_upload,
        )
class TermiusClient(object):

    def __init__(self):
        self.app_directory = Path('~/.termius/').expanduser()
        self.command_mock = Mock(**{'app.directory_path': self.app_directory})
        self.prepare()

    def create_identity(self, label, is_visible):
        return self._create_instance(
            Identity, label=label, is_visible=is_visible
        )

    def create_host(self, address, label):
        return self._create_instance(Host, label=label, address=address)

    def create_pfrule(self, host_id, local_port, label):
        return self._create_instance(
            PFRule, label=label,
            pftype='L', local_port=local_port,
            remote_port=22, hostname='localhost'
        )

    def create_group(self, label):
        return self._create_instance(Group, label=label)

    def _create_instance(self, model, **kwargs):
        instance = model(**kwargs)
        with self.storage:
            return self.storage.save(instance)

    def prepare(self):
        self.clean()
        if not self.app_directory.is_dir():
            self.app_directory.mkdir()
        self.storage = ApplicationStorage(self.command_mock)

    def clean(self):
        if self.app_directory.is_dir():
            self._clean_dir(self.app_directory)

    def _clean_dir(self, dir_path):
        [self._clean_dir(i) for i in dir_path.iterdir() if i.is_dir()]
        [i.unlink() for i in dir_path.iterdir() if i.is_file()]
        dir_path.rmdir()
Example #35
0
    def __init__(self, options):
        self.do = options['data_options']
        self.mo = options['model_options']
        self.oo = options['optimization_options']
        self.lo = options['log_options']

        data_path = self.do['data_path']
        task_num = self.do['task_number']
        lang = self.do.get('language', 'en')  # defaults to use small Eng set
        self.qa_train, self.qa_test \
            = read_dataset(data_path,
                           task_num, lang, options['data_options']['reader'],
                           {'threshold': 0,
                            'context_length': self.mo['context_length'],
                            'context_length_percentage': self.mo.get('context_length_percentage', 1),
                            'sentence_length': self.mo['sentence_length']})

        self.data_size = len(self.qa_train.stories)
        self.mo['context_length'] = self.qa_train.context_length
        #self.options['model_options']['context_length'] = self.qa_train.context_length

        tokens = self.qa_train.specialWords
        self.NULL = tokens['<NULL>']
        self.EOS = tokens['<EOS>']
        self.UNKNOWN = tokens['<UNKNOWN>']

        if self.oo['dump_params']:
            weight_dir = Path(self.oo['weight_path'])
            if not weight_dir.exists():
                weight_dir.mkdir()
        self.batch_size_train = self.oo['batch_size_train']
        self.batch_size_test = self.oo['batch_size_test']

        self.verbose = self.oo['verbose']
        self.log = self.logger_factory()
        self.lo['dump_epoch'] = self.oo['max_epoch'] \
                                if self.lo['dump_epoch'] < 0 \
                                else self.lo['dump_epoch']

        vocab_size = len(self.qa_train.index_to_word)
        options['model_options']['vocab_size'] = vocab_size
        model_name = self.mo['model_name']
        self.model = Models.model(model_name)(options)
        self.log("context length: %d" % self.mo['context_length'])
Example #36
0
def preprocess_options(options, disp=False):
    if disp:
        print "options:\n", json.dumps(options, indent=4, sort_keys=False)

    log_options = options['log_options']
    if log_options['dump_config']:
        path = Path(log_options['dump_path'])
        if not path.exists():
            path.mkdir()
        dumpname = log_options['dump_name']
        basename = os.path.splitext(dumpname)[0] + '.json'
        json.dump(options,
                  open(
                      str(path / basename), 'w'),
                  indent=4,
                  sort_keys=False)

    data_readers = {'QAReader': QAReader,
                    'minibatch': MinibatchReader}

    options['data_options']['reader'] \
    = data_readers[options['data_options']['reader']]
Example #37
0
def new_page():
    from string import Template     # Use Python templates, not Mako templates

    slug = raw_input('Slug for page: ')
    title = raw_input('Title of page: ')
    template = raw_input('Template to inherit from (default is example.html): ')

    new_dir = Path('web') / slug
    if new_dir.exists():
        print '\nDirectory %s already exists, aborting' % new_dir
        return
    new_dir.mkdir()

    html_file = new_dir / 'index.html'
    with html_file.open('w') as fp:
        fp.write(Template(NEW_PAGE_HTML_TEMPLATE).substitute(
            title=repr(title.strip()), template=template.strip() or 'example.html'))

    js_file = new_dir / 'app.dart'
    with js_file.open('w') as fp:
        class_name = ''.join(s.capitalize() for s in title.split(' '))
        fp.write(Template(NEW_PAGE_CODE_TEMPLATE).substitute(title=title))
Example #38
0
def tophat2_index_transcriptome(bowtie_index_path,
                                gtf_path,
                                output_base_path,
                                log_path=None,
                                tmp_dir=None):
    """
    Builds a transcriptome index for a reference genome.

    Parameters
    ----------
    bowtie_index_path : pathlib.Path
        Path to the bowtie index.
    gtf_path : pathlib.Path
        Path to the reference gtf file.
    out_base_path pathlib.Path:
        Base output path for the built index.
    log_path : pathlib.Path
        Where to write the log output.
    tmp_dir : pathlib.Path
        Directory to use for temporary outputs.

    """

    if tmp_dir is None:
        tmp_dir = Path(tempfile.mkdtemp())
    else:
        tmp_dir.mkdir(parents=True, exist_ok=True)

    try:
        args = [
            'tophat2', '--GTF', str(gtf_path),
            '--transcriptome-index={}'.format(output_base_path), '--bowtie1',
            '--output-dir', str(tmp_dir), str(bowtie_index_path)
        ]

        run_command(args=args, log_path=log_path)
    finally:
        shutil.rmtree(str(tmp_dir))
Example #39
0
def clone_with_patches(spec_path, base_dest, base_res, patches_res, pq_res):
    """
    Given a source, archive and optional patchqueue resource clone repos, apply
    the patches followed by the patchqueue
    """
    work_path = Path(tempfile.mkdtemp(prefix='clone-',
                                      dir=str(base_dest.parent)))
    source_path = Path(work_path, "SOURCES")
    source_path.mkdir(parents=True)

    try:
        archive_resource(base_res, source_path)
        patches_tarball = archive_resource(patches_res, source_path)
        unpack_patches(patches_tarball, work_path)
        base_repo = create_repo_from_spec(spec_path, work_path, base_dest)

        if pq_res:
            pq_dest = Path(str(base_dest)+'.pg')
            pq_repo = clone_resource(pq_res, pq_dest)
            apply_patchqueue(base_repo, pq_repo, pq_res.prefix)

    finally:
        shutil.rmtree(str(work_path), ignore_errors=True)
Example #40
0
File: git.py Project: pmac/bedrock
class GitRepo(object):
    def __init__(self, path, remote_url=None, remote_name=None, branch_name='master'):
        self.path = Path(path)
        self.path_str = str(self.path)
        self.remote_url = remote_url
        self.branch_name = branch_name
        if not remote_name:
            remote_name = 'bedrock-dev' if settings.DEV else 'bedrock-prod'

        self.remote_name = remote_name

    def git(self, *args):
        """Run a git command against the current repo"""
        curdir = os.getcwd()
        try:
            os.chdir(self.path_str)
            output = check_output((GIT,) + args, stderr=STDOUT)
        finally:
            os.chdir(curdir)

        return output.strip()

    @property
    def full_branch_name(self):
        """Full branch name with remote (e.g. origin/master)"""
        return '{}/{}'.format(self.remote_name, self.branch_name)

    @property
    def current_hash(self):
        """The git revision ID (hash) of the current HEAD"""
        return self.git('rev-parse', 'HEAD')

    @property
    def remote_names(self):
        """Return a list of the remote names in the repo"""
        return self.git('remote').split()

    def has_remote(self):
        """Return True if the repo has a remote by the correct name"""
        return self.remote_name in self.remote_names

    def add_remote(self):
        """Add the remote to the git repo from the init args"""
        if not self.remote_url:
            raise RuntimeError('remote_url required to add a remote')

        self.git('remote', 'add', self.remote_name, self.remote_url)

    def diff(self, start_hash, end_hash):
        """Return a 2 tuple: (modified files, deleted files)"""
        diff_out = StringIO(self.git('diff', '--name-status', start_hash, end_hash))
        modified = set()
        removed = set()
        for line in diff_out:
            parts = line.split()
            # delete
            if parts[0] == 'D':
                removed.add(parts[1])
            # rename
            elif parts[0][0] == 'R':
                removed.add(parts[1])
                modified.add(parts[2])
            # everything else
            else:
                # some types (like copy) have two file entries
                for part in parts[1:]:
                    modified.add(part)

        return modified, removed

    def clone(self):
        """Clone the repo specified in the initial arguments"""
        if not self.remote_url:
            raise RuntimeError('remote_url required to clone')

        self.path.mkdir(parents=True, exist_ok=True)
        self.git('clone', '--origin', self.remote_name, '--depth', '1',
                 '--branch', self.branch_name, self.remote_url, '.')

    def pull(self):
        """Update the repo to the latest of the remote and branch

        Return the previous hash and the new hash."""
        if not self.has_remote():
            self.add_remote()

        old_hash = self.current_hash
        self.git('fetch', self.remote_name)
        self.git('checkout', '-f', self.full_branch_name)
        return old_hash, self.current_hash

    def update(self):
        """Updates a repo, cloning if necessary.

        :return a tuple of lists of modified and deleted files if updated, None if cloned
        """
        if self.path.is_dir():
            if not self.path.joinpath('.git').is_dir():
                rmtree(self.path_str, ignore_errors=True)
                self.clone()
            else:
                return self.diff(*self.pull())
        else:
            self.clone()

        return None, None
Example #41
0
class Config(object):
    def __init__(self):
        self.xcode = None
        self.repo_overrides = dict()

        self.root_path = Path.cwd()  # type: Path

        self.library_directory = Path('~/Library/Application Support/io.schwa.Punic').expanduser()
        if not self.library_directory.exists():
            self.library_directory.mkdir(parents=True)
        self.repo_cache_directory = self.library_directory / 'repo_cache'
        if not self.repo_cache_directory.exists():
            self.repo_cache_directory.mkdir(parents=True)
        self.punic_path = self.root_path / 'Carthage'
        self.build_path = self.punic_path / 'Build'
        self.checkouts_path = self.punic_path / 'Checkouts'

        self.derived_data_path = self.library_directory / "DerivedData"

        self.platforms = Platform.all
        self.configuration = None

        self.fetch = False
        self.xcode = Xcode.default()

        self.toolchain = None
        self.dry_run = False
        self.use_submodules = False
        self.use_ssh = False

        self.skips = []

        self.verbose = False
        self.echo = False

        self.continuous_integration = 'CI' in os.environ
        if self.continuous_integration:
            logging.info("Running on continuous integration")

        # Read in defaults from punic.yaml (or punic.yml if that exists)
        punic_configuration_path = Path('punic.yaml')
        if not punic_configuration_path.exists():
            punic_configuration_path = Path('punic.yml')
        if punic_configuration_path.exists():
            self.read(punic_configuration_path)
        runner.cache_path = self.library_directory / "cache.shelf"

    def update(self, **kwargs):
        for key, value in sorted(kwargs.items()):
            if value:
                if hasattr(self, key):
                    setattr(self, key, value)

        # Special case for platforms
        platform = kwargs['platform'] if 'platform' in kwargs else None
        if platform:
            self.platforms = parse_platforms(platform)

        if self.verbose and os.environ.get('DUMP_CONFIG', False):
            self.dump()

    def dump(self):

        logging.info('# Environment ##' + '#' * 64)

        logging.info('CWD: {}'.format(os.getcwd()))

        key_width = max([len(k) for k in os.environ.keys()] + [len(k) for k in self.__dict__.items()])

        os.environ.keys()

        for key, value in sorted(os.environ.items()):
            logging.info('{:{key_width}}: {}'.format(key, value, key_width = key_width + 1))

        logging.info('# Configuration ' + '#' * 64)

        for key, value in sorted(self.__dict__.items()):
            logging.info('{:{key_width}}: {}'.format(key, value, key_width = key_width + 1))
        logging.info('#' * 80)

    @property
    def xcode_version(self):
        return self.xcode.version if self.xcode else None

    @xcode_version.setter
    def xcode_version(self, value):
        xcode = Xcode.with_version(value)
        if value and not xcode:
            raise Exception('Could not find xcode version: {}'.format(value))
        if not xcode:
            xcode = Xcode.default()
        self.xcode = xcode

    def read(self, path):
        # type: (Path)

        d = yaml.safe_load(path.open())
        if not d:
            return
        if 'defaults' in d:
            defaults = d['defaults']
            if 'configuration' in defaults:
                self.configuration = defaults['configuration']
            if 'platforms' in defaults:
                self.platforms = parse_platforms(defaults['platforms'])
            elif 'platform' in defaults:
                self.platforms = parse_platforms(defaults['platform'])
            if 'xcode-version' in defaults:
                self.xcode_version = six.text_type(defaults['xcode-version'])

            if 'use-ssh' in defaults:
                self.use_ssh = defaults['use-ssh']

        if 'repo-overrides' in d:
            self.repo_overrides = d['repo-overrides']

        if 'skips' in d:
            self.skips = d['skips'] or []
Example #42
0
class GitRepo(object):
    def __init__(self, path, remote_url=None, branch_name='master'):
        self.path = Path(path)
        self.path_str = str(self.path)
        self.remote_url = remote_url
        self.branch_name = branch_name
        db_latest_key = '%s:%s:%s' % (self.path_str, remote_url or '',
                                         branch_name)
        self.db_latest_key = sha256(db_latest_key).hexdigest()

    def git(self, *args):
        """Run a git command against the current repo"""
        curdir = os.getcwd()
        try:
            os.chdir(self.path_str)
            output = check_output((GIT,) + args, stderr=STDOUT)
        finally:
            os.chdir(curdir)

        return output.strip()

    @property
    def current_hash(self):
        """The git revision ID (hash) of the current HEAD or None if no repo"""
        try:
            return self.git('rev-parse', 'HEAD')
        except OSError:
            return None

    def diff(self, start_hash, end_hash):
        """Return a 2 tuple: (modified files, deleted files)"""
        diff_out = StringIO(self.git('diff', '--name-status', start_hash, end_hash))
        modified = set()
        removed = set()
        for line in diff_out:
            parts = line.split()
            # delete
            if parts[0] == 'D':
                removed.add(parts[1])
            # rename
            elif parts[0][0] == 'R':
                removed.add(parts[1])
                modified.add(parts[2])
            # everything else
            else:
                # some types (like copy) have two file entries
                for part in parts[1:]:
                    modified.add(part)

        return modified, removed

    def clone(self):
        """Clone the repo specified in the initial arguments"""
        if not self.remote_url:
            raise RuntimeError('remote_url required to clone')

        self.path.mkdir(parents=True, exist_ok=True)
        self.git('clone', '--depth', '1', '--branch',
                 self.branch_name, self.remote_url, '.')

    def pull(self):
        """Update the repo to the latest of the remote and branch

        Return the previous hash and the new hash."""
        old_hash = self.current_hash
        self.git('fetch', '-f', self.remote_url, self.branch_name)
        self.git('checkout', '-f', 'FETCH_HEAD')
        return old_hash, self.current_hash

    def update(self):
        """Updates a repo, cloning if necessary.

        :return a tuple of lists of modified and deleted files if updated, None if cloned
        """
        if self.path.is_dir():
            if not self.path.joinpath('.git').is_dir():
                rmtree(self.path_str, ignore_errors=True)
                self.clone()
            else:
                return self.pull()
        else:
            self.clone()

        return None, None

    def reset(self, new_head):
        self.git('reset', '--hard', new_head)

    def get_db_latest(self):
        try:
            return GitRepoState.objects.get(repo_id=self.db_latest_key).latest_ref
        except GitRepoState.DoesNotExist:
            return None

    def has_changes(self):
        return self.current_hash != self.get_db_latest()

    def set_db_latest(self, latest_ref=None):
        latest_ref = latest_ref or self.current_hash
        rs, created = GitRepoState.objects.get_or_create(repo_id=self.db_latest_key,
                                                         defaults={'latest_ref': latest_ref})
        if not created:
            rs.latest_ref = latest_ref
            rs.save()
Example #43
0
class TermiusApp(App):
    """Class for CLI application."""

    def __init__(self):
        """Construct new CLI application."""
        super(TermiusApp, self).__init__(
            description='Termius - crossplatform SSH and Telnet client',
            version=__version__,
            command_manager=CommandManager('termius.handlers'),
        )

        self.configure_signals()
        self.directory_path = Path(expanduser('~/.{}/'.format(self.NAME)))
        if not self.directory_path.is_dir():
            self.directory_path.mkdir(parents=True)

        self.command_manager.add_command('help', HelpCommand)

    def configure_logging(self):
        """Change logging level for request package."""
        super(TermiusApp, self).configure_logging()
        logging.getLogger('requests').setLevel(logging.WARNING)
        return

    # pylint: disable=no-self-use
    def configure_signals(self):
        """Bind subscribers to signals."""
        post_create_instance.connect(store_ssh_key, sender=SshKey)
        post_update_instance.connect(store_ssh_key, sender=SshKey)
        post_delete_instance.connect(delete_ssh_key, sender=SshKey)

        post_logout.connect(clean_data)

    def build_option_parser(self, description, version,
                            argparse_kwargs=None):
        """Return an argparse option parser for this application.

        Subclasses may override this method to extend
        the parser with more global options.

        :param description: full description of the application
        :paramtype description: str
        :param version: version number for the application
        :paramtype version: str
        :param argparse_kwargs: extra keyword argument passed to the
                                ArgumentParser constructor
        :paramtype extra_kwargs: dict
        """
        argparse_kwargs = argparse_kwargs or {}
        parser = argparse.ArgumentParser(
            description=description,
            add_help=False,
            **argparse_kwargs
        )
        parser.add_argument(
            '--version',
            action='version',
            version='%(prog)s {0}'.format(version),
            help='display version information and exit'
        )
        verbose_group = parser.add_mutually_exclusive_group()
        verbose_group.add_argument(
            '-v', '--verbose',
            action='count',
            dest='verbose_level',
            default=self.DEFAULT_VERBOSE_LEVEL,
            help='provide a detailed output',
        )
        verbose_group.add_argument(
            '-q', '--quiet',
            action='store_const',
            dest='verbose_level',
            const=0,
            help='display warnings and errors only',
        )
        parser.add_argument(
            '--log-file',
            action='store',
            default=None,
            help='record output into a designated file',
        )
        if self.deferred_help:
            parser.add_argument(
                '-h', '--help',
                dest='deferred_help',
                action='store_true',
                help="display help message",
            )
        else:
            parser.add_argument(
                '-h', '--help',
                action=HelpAction,
                nargs=0,
                default=self,  # tricky
                help="show the help message",
            )
        parser.add_argument(
            '--debug',
            default=False,
            action='store_true',
            help='enable debugging mode',
        )
        return parser
Example #44
0
def punic_cli(context, echo, verbose, timing, color):
    ### TODO: Clean this up!

    # Configure click
    context.token_normalize_func = lambda x: x if not x else x.lower()

    # Configure logging
    level = logging.DEBUG if verbose else logging.INFO

    logger = logging.getLogger()
    logger.setLevel(logging.DEBUG)

    formatter = HTMLFormatter()

    # create console handler and set level to debug
    stream_handler = logging.StreamHandler()
    stream_handler.setLevel(level)
    stream_handler.setFormatter(formatter)
    # add ch to logger
    logger.addHandler(stream_handler)

    # TODO: This needs to be a better location
    logs_path = Path('~/Library/Application Support/io.schwa.Punic/Logs').expanduser()
    if not logs_path.exists():
        logs_path.mkdir(parents=True)

    log_path = logs_path / "punic.log"
    needs_rollover = log_path.exists()

    file_handler = logging.handlers.RotatingFileHandler(str(log_path), backupCount=4)
    if needs_rollover:
        file_handler.doRollover()
    file_handler.setLevel(logging.DEBUG)
    file_handler.setFormatter(HTMLStripperFormatter(logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")))
    logger.addHandler(file_handler)

    for name in ['boto', 'requests.packages.urllib3']:
        named_logger = logging.getLogger(name)
        named_logger.setLevel(logging.WARNING)
        named_logger.propagate = True

    runner.echo = echo

    # Set up punic
    punic = Punic()
    punic.config.log_timings = timing
    context.obj = punic
    punic.config.verbose = verbose
    punic.config.echo = verbose

    # Color:
    if color is None:
        if punic.config.continuous_integration:
            color = False

    if color is None:
        color = True


    punic.config.color = color
    formatter.color = color
    logger.color = color
Example #45
0
class Config:
    @classmethod
    def load_from_file(cls, f, dir_, extra=()):
        self = cls()
        data = IncludeLoader.load(f)

        for p in extra:
            data.update(IncludeLoader.load(p))

        try:
            self.name     = data['name']
            self.theme    = dir_ / data.get('theme', 'theme')
            self.output   = Path(data.get('output', 'out'))

            self.content_root = dir_ / data.get('content-root', '.')

            self.ignore   = data.get('ignore', [])

            self.output.mkdir(exist_ok=True)
            logger.info('Outputting to %s', self.output.resolve())

            self.htmlless = data.get('pretty-html', False)

            if 'compiled-theme' in data:
                self.compiled_theme = dir_ / data['compiled-theme']
            else:
                self.compiled_theme = None

            self.resources = []
            for r in data.get('resources', []):
                try:
                    command = r['command']
                    source = Path(r['source'])
                    output = Path(r['output'])
                    suffix = r.get('ext', None)
                    recursive = r.get('recursive', False)
                    pattern = r.get('pattern', '*')
                except KeyError as e:
                    raise CMS7Error('resource missing required key {}'.format(e.args[0])) from e
                self.resources.append(Resource(self, command, dir_, source, output, suffix, recursive, pattern))

            self.module_id = {}

            self._modules = []
            for m in data['modules']:
                name = m.pop('name')
                _id = None
                if 'id' in m:
                    _id = m.pop('id')
                if name not in _MODULES:
                    raise CMS7Error('unknown module: {!r}'.format(name))
                logger.info('Loading module: %s', name)
                module = _MODULES[name](self, self.content_root, **m)
                if _id is not None:
                    self.module_id[_id] = module
                self._modules.append(module)

        except KeyError as e:
            raise CMS7Error('config missing required key {}'.format(e.args[0])) from e

        self._data = data

        return self

    def modules(self):
        yield from self._modules

    def __getitem__(self, k):
        return self._data[k]
Example #46
0
class Config(object):
    def __init__(self):
        self.defaults = {
            'configuration': None,
            'platforms': [],
        }
        self.xcode = None
        self.repo_overrides = dict()

        self.root_path = Path.cwd()  # type: Path

        self.library_directory = Path(os.path.expanduser('~/Library/io.schwa.Punic'))
        if not self.library_directory.exists():
            self.library_directory.mkdir(parents=True)
        self.repo_cache_directory = self.library_directory / 'repo_cache'
        if not self.repo_cache_directory.exists():
            self.repo_cache_directory.mkdir(parents=True)
        self.punic_path = self.root_path / 'Carthage'
        self.build_path = self.punic_path / 'Build'
        self.checkouts_path = self.punic_path / 'Checkouts'

        self.derived_data_path = self.library_directory / "DerivedData"

        runner.cache_path = self.library_directory / "cache.shelf"

        self.can_fetch = False
        self.xcode = Xcode.default()

        # Read in defaults from punic.yaml
        self.read(Path('punic.yaml'))


    @property
    def xcode_version(self):
        return self.xcode.version if self.xcode else None

    @xcode_version.setter
    def xcode_version(self, value):
        xcode = Xcode.with_version(value)
        if value and not xcode:
            raise Exception('Could not find xcode version: {}'.format(value))
        if not xcode:
            xcode = Xcode.default()
        self.xcode = xcode


    def read(self, path):
        # type: (Path)

        if not path.exists():
            return

        d = pureyaml.load(path.open())
        if 'defaults' in d:
            defaults = d['defaults']
            if 'configuration' in defaults:
                self.configuration = defaults['configuration']
            if 'platforms' in defaults:
                self.platforms = parse_platforms(defaults['platforms'])
            elif 'platform' in defaults:
                self.platforms = parse_platforms(defaults['platform'])

        if 'repo-overrides' in d:
            self.repo_overrides = d['repo-overrides']

        if 'xcode-version' in d:
            xcode_version = d['xcode-version']
            self.xcode_version = xcode_version

    def dump(self):
        logger.info('Config:')
        logger.info('\tDefaults')
        for k, v in self.defaults.items():
            logger.info('\t\t{}: {}'.format(k, v))
        logger.info('\tOverrides: {}'.format(self.repo_overrides))

    def update(self, configuration=None, platform=None):
        # type: (str, string) -> bool
        if configuration:
            self.configuration = configuration
        if platform:
            self.platforms = parse_platforms(platform)

    @property
    def configuration(self):
        return self.defaults['configuration']

    @configuration.setter
    def configuration(self, configuration):
        self.defaults['configuration'] = configuration

    @property
    def platforms(self):
        return self.defaults['platforms']

    @platforms.setter
    def platforms(self, platforms):
        self.defaults['platforms'] = platforms