Beispiel #1
0
 def save_log(self, project, data):
     logpath = os.path.join(self.config.logdir,
                            project.path.replace(self.config.active_path, '').strip('\\/'),
                            project.id + '.log')
     if not os.path.isdir(os.path.dirname(logpath)):
         shell.mkdir(os.path.dirname(logpath))
     with open(logpath, 'wb') as f:
         f.write(data)
Beispiel #2
0
 def save(self):
     if self.filename is not None:
         dirname = os.path.abspath(os.path.dirname(self.filename))
         if not os.path.isdir(dirname):
             shell.mkdir(dirname)
         if self.fmt == Config.JSON:
             with io.open(self.filename, 'w', encoding=self.encoding) as f:
                 f.write(json.dumps(self.data, indent=2))
         elif self.fmt == Config.YAML:
             import yaml
             with io.open(self.filename, 'w', encoding=self.encoding) as f:
                 yaml.safe_dump(self.data, f, default_flow_style=False)
         else:
             logger.error('Unsupported configuration format: %s', self.fmt)
Beispiel #3
0
def unzip(archive, destination, filenames=None):
    """Unzip the a complete zip archive into destination directory,
    or unzip a specific file(s) from the archive.

    Usage:
        >>> output = os.path.join(os.getcwd(), 'output')
        >>> # Archive can be an instance of a ZipFile class
        >>> archive = zipfile.ZipFile('test.zip', 'r')
        >>> # Or just a filename
        >>> archive = 'test.zip'
        >>> # Extracts all files
        >>> unzip(archive, output)
        >>> # Extract only one file
        >>> unzip(archive, output, 'my_file.txt')
        >>> # Extract a list of files
        >>> unzip(archive, output, ['my_file1.txt', 'my_file2.txt'])
        >>> unzip_file('test.zip', 'my_file.txt', output)

    :type  archive: :class:`zipfile.ZipFile` or :obj:`str`
    :param archive: Zipfile object to extract from or path to the zip archive.
    :type  destination: str
    :param destination: Path to the output directory
    :type  filenames: str, list or None
    :param filenames: Path(s) to the filename(s) inside the zip archive that
        you want to extract.
    """
    close = False
    try:
        if not isinstance(archive, zipfile.ZipFile):
            archive = zipfile.ZipFile(archive, 'r', allowZip64=True)
            close = True
        logger.info('Extracting: %s -> %s' % (archive.filename, destination))
        if isinstance(filenames, str):
            filenames = [filenames]
        if filenames is None:  # extract all
            filenames = archive.namelist()
        for filename in filenames:
            if filename.endswith('/'):  # it's a directory
                shell.mkdir(os.path.join(destination, filename))
            else:
                if not _extract_file(archive, destination, filename):
                    raise Exception()
        logger.info('Extracting zip archive "%s" succeeded' % archive.filename)
        return True
    except:
        logger.exception('Error while unzipping archive %s' % archive.filename)
        return False
    finally:
        if close:
            archive.close()
Beispiel #4
0
def unzip(archive, destination, filenames=None):
    """Unzip a zip archive into destination directory.

    It unzips either the whole archive or specific file(s) from the archive.

    Usage:
        >>> output = os.path.join(os.getcwd(), 'output')
        >>> # Archive can be an instance of a ZipFile class
        >>> archive = zipfile.ZipFile('test.zip', 'r')
        >>> # Or just a filename
        >>> archive = 'test.zip'
        >>> # Extracts all files
        >>> unzip(archive, output)
        >>> # Extract only one file
        >>> unzip(archive, output, 'my_file.txt')
        >>> # Extract a list of files
        >>> unzip(archive, output, ['my_file1.txt', 'my_file2.txt'])
        >>> unzip_file('test.zip', 'my_file.txt', output)

    Args:
        archive (zipfile.ZipFile or str): Zipfile object to extract from or
            path to the zip archive.
        destination (str): Path to the output directory.
        filenames (str or list of str or None): Path(s) to the filename(s)
            inside the zip archive that you want to extract.
    """
    close = False
    try:
        if not isinstance(archive, zipfile.ZipFile):
            archive = zipfile.ZipFile(archive, "r", allowZip64=True)
            close = True
        logger.info("Extracting: %s -> %s" % (archive.filename, destination))
        if isinstance(filenames, str):
            filenames = [filenames]
        if filenames is None:  # extract all
            filenames = archive.namelist()
        for filename in filenames:
            if filename.endswith("/"):  # it's a directory
                shell.mkdir(os.path.join(destination, filename))
            else:
                if not _extract_file(archive, destination, filename):
                    raise Exception()
        logger.info('Extracting zip archive "%s" succeeded' % archive.filename)
        return True
    except Exception:
        logger.exception("Error while unzipping archive %s" % archive.filename)
        return False
    finally:
        if close:
            archive.close()
Beispiel #5
0
    def save(self):
        if self.filename is not None:
            dirname = os.path.abspath(os.path.dirname(self.filename))
            if not os.path.isdir(dirname):
                shell.mkdir(dirname)
            if self.fmt == Config.JSON:
                with io.open(self.filename, "w", encoding=self.encoding) as f:
                    f.write(json.dumps(self.data, indent=2))
            elif self.fmt == Config.YAML:
                import yaml

                with io.open(self.filename, "w", encoding=self.encoding) as f:
                    yaml.safe_dump(self.data, f, default_flow_style=False)
            else:
                logger.error("Unsupported configuration format: %s", self.fmt)
Beispiel #6
0
def setup(module, target='zip', output_path=None, data_dir=None):
    dist = os.path.abspath('dist')
    try:
        if target == 'zip':
            assert er(
                'setup.py', 'install', '--no-compile', '--install-lib',
                os.path.join(dist, 'lib'), '--install-scripts',
                os.path.join(dist),
                *(['--install-data',
                   os.path.join(dist, data_dir)]
                  if data_dir is not None else []))
            with shell.goto(dist) as ok:
                assert ok
                assert compress.mkzip('%s.zip' % module,
                                      glob.glob(os.path.join('lib', '*')))
                assert shell.remove('lib')
        elif target == 'exe':
            assert er(
                'setup.py', 'install', '--no-compile', '--install-lib',
                os.path.join(dist, 'lib', 'python'), '--install-scripts',
                os.path.join(dist, 'scripts'),
                *(['--install-data',
                   os.path.join(dist, data_dir)]
                  if data_dir is not None else []))
            with shell.goto(dist) as ok:
                assert ok

                modules = list(
                    filter(os.path.exists, ['lib', 'scripts'] +
                           ([data_dir] if data_dir is not None else [])))
                assert compress.seven_zip('%s.exe' % module,
                                          modules,
                                          self_extracting=True)
                # Cleanup
                for module in modules:
                    assert shell.remove(module)
        if output_path is not None:
            output_path = os.path.abspath(output_path)
            if output_path != dist:
                if not os.path.isdir(output_path):
                    assert shell.mkdir(output_path)
                for filename in shell.search(dist, '*'):
                    output = os.path.join(
                        output_path,
                        filename.replace(dist, '', 1).strip('\\/'))
                    assert shell.move(filename, output)
        return 0
    except AssertionError as e:
        print(e)
        return 1
    finally:
        # Cleanup
        if output_path != dist:
            shell.remove(dist)
        if os.path.isdir('build'):
            shell.remove('build')
Beispiel #7
0
 def get_key():
     from tea.system import get_appdata
     from tea.shell import mkdir
     dir_path = os.path.join(get_appdata(), 'Tea')
     key_path = os.path.join(dir_path, 'key.bin')
     if os.path.exists(dir_path) and os.path.exists(key_path):
         with open(key_path, 'rb') as f:
             cr_key = Array[Byte](map(ord, f.read()))
             key = ProtectedData.Unprotect(cr_key, None,
                                           DataProtectionScope.CurrentUser)
             return [int(k, 10) for k in key]
     else:
         mkdir(dir_path)
         key = _generate_key()
         arr_key = Array[Byte](key)
         cr_key = ProtectedData.Protect(arr_key, None,
                                        DataProtectionScope.CurrentUser)
         with open(key_path, 'wb') as f:
             f.write(cr_key)
         return key
Beispiel #8
0
def _extract_file(archive, destination, filename):
    try:
        output_path = os.path.join(destination, filename)
        output_dir = os.path.dirname(output_path)
        if not os.path.isdir(output_dir):
            shell.mkdir(output_dir)
        # Cannot write big chunks of data to windows shares
        MAX_BYTES = 5242880  # 5MB
        reader = archive.open(filename)
        writer = open(output_path, 'wb')
        while True:
            data = reader.read(MAX_BYTES)
            if len(data) > 0:
                writer.write(data)
            else:
                break
        return True
    except:
        logger.exception('Error while unzipping filename %s from archive %s' %
                         (filename, archive.filename))
        return False
Beispiel #9
0
    def get_key():
        from tea.system import get_appdata
        from tea.shell import mkdir

        dir_path = os.path.join(get_appdata(), "Tea")
        key_path = os.path.join(dir_path, "key.bin")
        if os.path.exists(dir_path) and os.path.exists(key_path):
            with open(key_path, "rb") as f:
                cr_key = Array[Byte](map(ord, f.read()))
                key = ProtectedData.Unprotect(
                    cr_key, None, DataProtectionScope.CurrentUser
                )
                return [int(k, 10) for k in key]
        else:
            mkdir(dir_path)
            key = _generate_key()
            arr_key = Array[Byte](key)
            cr_key = ProtectedData.Protect(
                arr_key, None, DataProtectionScope.CurrentUser
            )
            with open(key_path, "wb") as f:
                f.write(cr_key)
            return key
Beispiel #10
0
def _extract_file(archive, destination, filename):
    try:
        output_path = os.path.join(destination, filename)
        output_dir = os.path.dirname(output_path)
        if not os.path.isdir(output_dir):
            shell.mkdir(output_dir)
        # Cannot write big chunks of data to windows shares
        MAX_BYTES = 5242880  # 5MB
        reader = archive.open(filename)
        writer = open(output_path, "wb")
        while True:
            data = reader.read(MAX_BYTES)
            if len(data) > 0:
                writer.write(data)
            else:
                break
        return True
    except Exception:
        logger.exception(
            "Error while unzipping filename %s from archive %s"
            % (filename, archive.filename)
        )
        return False
Beispiel #11
0
    def handle(self, platform, configuration, build_number, no_deps, delete, *args, **kwargs):
        repos       = []
        # Build dependency graph, or not
        if no_deps:
            for repo in self.config.repositories:
                b = utils.load_build(repo)
                repos.append(b)
        else:
            all_repos = {}
            for repo in self.config.all_repositories:
                # Remove explicitly skipped
                skipped = self.config.get('options.skipped_repositories') or []
                if repo.name not in skipped:
                    b = utils.load_build(repo)
                    all_repos[b.name] = b
            dg = DependencyGraph(all_repos.values())
            for repo in dg.get_in_order(map(lambda x: all_repos[x.name].id, self.config.repositories), None):
                repos.append(repo)

        config = utils.BuildConfig(self.config, platform, configuration, build_number)

        # if --delete
        if delete and os.path.exists(config.output_path):
            if not shell.remove(config.output_path):
                self.ui.error('Abort: "%s" could not be deleted' % config.output_path)
                return
        # create output path if not exist
        if not os.path.exists(config.output_path):
            shell.mkdir(config.output_path)

        # Build
        for repo in repos:
            self.ui.info(str(repo))
            for project in repo.projects:
                status, output, error = project.build(config)
                self.save_log(project, output + '\n\n\n' + error)
                self.ui.report('  %s' % project, status, {'output': output, 'error': error})
Beispiel #12
0
def setup(module, target="zip", output_path=None, data_dir=None):
    dist = os.path.abspath("dist")
    try:
        if target == "zip":
            assert er(
                "setup.py",
                "install",
                "--no-compile",
                "--install-lib",
                os.path.join(dist, "lib"),
                "--install-scripts",
                os.path.join(dist),
                *(
                    ["--install-data", os.path.join(dist, data_dir)]
                    if data_dir is not None
                    else []
                ),
            )
            with shell.goto(dist) as ok:
                assert ok
                assert compress.mkzip(
                    "%s.zip" % module, glob.glob(os.path.join("lib", "*"))
                )
                assert shell.remove("lib")
        elif target == "exe":
            assert er(
                "setup.py",
                "install",
                "--no-compile",
                "--install-lib",
                os.path.join(dist, "lib", "python"),
                "--install-scripts",
                os.path.join(dist, "scripts"),
                *(
                    ["--install-data", os.path.join(dist, data_dir)]
                    if data_dir is not None
                    else []
                ),
            )
            with shell.goto(dist) as ok:
                assert ok

                modules = list(
                    filter(
                        os.path.exists,
                        ["lib", "scripts"]
                        + ([data_dir] if data_dir is not None else []),
                    )
                )
                assert compress.seven_zip(
                    "%s.exe" % module, modules, self_extracting=True
                )
                # Cleanup
                for module in modules:
                    assert shell.remove(module)
        if output_path is not None:
            output_path = os.path.abspath(output_path)
            if output_path != dist:
                if not os.path.isdir(output_path):
                    assert shell.mkdir(output_path)
                for filename in shell.search(dist, "*"):
                    output = os.path.join(
                        output_path, filename.replace(dist, "", 1).strip("\\/")
                    )
                    assert shell.move(filename, output)
        return 0
    except AssertionError as e:
        print(e)
        return 1
    finally:
        # Cleanup
        if output_path != dist:
            shell.remove(dist)
        if os.path.isdir("build"):
            shell.remove("build")
Beispiel #13
0
def create(project=None, author=None, email=None):
    """Creates the project skeleton.

    Args:
        project (str): Project name or path to the project directory
        author (str): Author's full name
        email (str): Author's email address
    """
    if project is None:
        project_name = input('Project name: ')
        project_dir = os.path.abspath(os.path.join(os.getcwd(), project_name))
    else:
        project_name = os.path.basename(project)
        if project_name != project:
            project_dir = os.path.abspath(project)
        else:
            project_dir = os.path.abspath(
                os.path.join(os.getcwd(), project_name))

    if author is None:
        author = input('Author: ')

    if email is None:
        email = input('Email: ')

    # Create the project skeleton
    print(f'Creating project: {project_dir}')
    shell.mkdir(project_dir)
    shell.touch(os.path.join(project_dir, '.gitignore'), content=GIT_IGNORE)
    shell.touch(os.path.join(project_dir, 'requirements.txt'),
                content=REQUIREMENTS)

    # Create configuration
    config_dir = os.path.join(project_dir, 'config')
    shell.mkdir(config_dir)
    shell.touch(os.path.join(config_dir, f'{project_name}.yaml'),
                content=CONFIG.format(project_name=project_name))

    # Create data directory
    data_dir = os.path.join(project_dir, 'data')
    shell.mkdir(data_dir)
    shell.touch(os.path.join(data_dir, '.keep'))

    # Create models directory
    models_dir = os.path.join(project_dir, 'models')
    shell.mkdir(models_dir)
    shell.touch(os.path.join(models_dir, '.keep'))

    # Create notebooks directory
    models_dir = os.path.join(project_dir, 'notebooks')
    shell.mkdir(models_dir)
    shell.touch(os.path.join(models_dir, '.keep'))

    # Create the app
    shell.mkdir(os.path.join(project_dir, project_name))
    signature = SIGNATURE.format(author=author,
                                 email=email,
                                 now=datetime.now())
    app_dir = os.path.join(project_dir, project_name)
    shell.mkdir(app_dir)
    shell.touch(os.path.join(app_dir, '__init__.py'), content=signature)
    # Create models subdirectory
    shell.mkdir(os.path.join(app_dir, 'models'))
    shell.touch(os.path.join(app_dir, 'models', '__init__.py'),
                content=signature)
    # Create commands subdirectory
    shell.mkdir(os.path.join(app_dir, 'commands'))
    shell.touch(os.path.join(app_dir, 'commands', '__init__.py'),
                content=signature)
Beispiel #14
0
def configure_logging(
    filename=None,
    filemode="a",
    datefmt=FMT_DATE,
    fmt=FMT,
    stdout_fmt=FMT_STDOUT,
    level=logging.DEBUG,
    stdout_level=logging.WARNING,
    initial_file_message="",
    max_size=1048576,
    rotations_number=5,
    remove_handlers=True,
):
    """Configure logging module.

    Args:
        filename (str): Specifies a filename to log to.
        filemode (str): Specifies the mode to open the log file.
            Values: ``'a'``, ``'w'``. *Default:* ``a``.
        datefmt (str): Use the specified date/time format.
        fmt (str): Format string for the file handler.
        stdout_fmt (str): Format string for the stdout handler.
        level (int): Log level for the file handler. Log levels are the same
            as the log levels from the standard :mod:`logging` module.
            *Default:* ``logging.DEBUG``
        stdout_level (int): Log level for the stdout handler. Log levels are
            the same as the log levels from the standard :mod:`logging` module.
            *Default:* ``logging.WARNING``
        initial_file_message (str): First log entry written in file.
        max_size (int): Maximal size of the logfile. If the size of the file
            exceed the maximal size it will be rotated.
        rotations_number (int): Number of rotations to save.
        remove_handlers (bool): Remove all existing handlers.
    """
    logger = logging.getLogger()
    logger.level = logging.NOTSET
    # Remove all handlers
    if remove_handlers:
        while len(logger.handlers) > 0:
            hdlr = logger.handlers[0]
            hdlr.close()
            logger.removeHandler(hdlr)
    # Create stdout handler
    if stdout_level is not None:
        stdout_handler = logging.StreamHandler(sys.stdout)
        stdout_handler.setLevel(stdout_level)
        stdout_formatter = logging.Formatter(stdout_fmt, datefmt)
        # stdoutFormatter.converter = time.gmtime
        stdout_handler.setFormatter(stdout_formatter)
        logger.addHandler(stdout_handler)
    # Create file handler if filename is provided
    if filename is not None:
        # Check if filename directory exists and creates it if it doesn't
        directory = os.path.abspath(os.path.dirname(filename))
        if not os.path.isdir(directory):
            shell.mkdir(directory)
        # Create file handler
        file_handler = RotatingFileHandler(
            filename, filemode, max_size, rotations_number
        )
        file_handler.setLevel(level)
        file_formatter = logging.Formatter(fmt, datefmt)
        file_formatter.converter = time.gmtime
        file_handler.setFormatter(file_formatter)
        logger.addHandler(file_handler)
        if initial_file_message:
            message = " %s " % initial_file_message
            file_handler.stream.write("\n" + message.center(100, "=") + "\n\n")
Beispiel #15
0
def configure_logging(filename=None,
                      filemode='a',
                      datefmt=FMT_DATE,
                      fmt=FMT,
                      stdout_fmt=FMT_STDOUT,
                      level=logging.DEBUG,
                      stdout_level=logging.WARNING,
                      initial_file_message='',
                      max_size=1048576,
                      rotations_number=5,
                      remove_handlers=True):
    """Configure logging module.

    :param str filename: Specifies a filename to log to.
    :param str filemode: Specifies the mode to open the log file. Values:
                         ``'a'``, ``'w'``. *Default:* ``a``
    :param str datefmt: Use the specified date/time format.
    :param str fmt: Format string for the file handler.
    :param str stdout_fmt: Format string for the stdout handler.
    :param int level: Log level for the file handler. Log levels are the same
                      as the log levels from the standard :mod:`logging`
                      module. *Default:* ``logging.DEBUG``
    :param int stdout_level: Log level for the stdout handler. Log levels are
                             the same as the log levels from the standard
                             :mod:`logging` module. *Default:*
                             ``logging.WARNING``
    :param str initial_file_message: First log entry written in file.
    :param int max_size: Maximal size of the logfile. If the size of the file
                         exceed the maximal size it will be rotated.
    :param int rotations_number: Number of rotations to save
    :param bool remove_handlers: Remove all existing handlers
    :rtype: None
    """
    logger = logging.getLogger()
    logger.level = logging.NOTSET
    # Remove all handlers
    if remove_handlers:
        while len(logger.handlers) > 0:
            hdlr = logger.handlers[0]
            hdlr.close()
            logger.removeHandler(hdlr)
    # Create stdout handler
    if stdout_level is not None:
        stdout_handler = logging.StreamHandler(sys.stdout)
        stdout_handler.setLevel(stdout_level)
        stdout_formatter = logging.Formatter(stdout_fmt, datefmt)
        # stdoutFormatter.converter = time.gmtime
        stdout_handler.setFormatter(stdout_formatter)
        logger.addHandler(stdout_handler)
    # Create file handler if filename is provided
    if filename is not None:
        # Check if filename directory exists and creates it if it doesn't
        directory = os.path.abspath(os.path.dirname(filename))
        if not os.path.isdir(directory):
            shell.mkdir(directory)
        # Create file handler
        file_handler = RotatingFileHandler(filename, filemode, max_size,
                                           rotations_number)
        file_handler.setLevel(level)
        file_formatter = logging.Formatter(fmt, datefmt)
        file_formatter.converter = time.gmtime
        file_handler.setFormatter(file_formatter)
        logger.addHandler(file_handler)
        if initial_file_message:
            message = ' %s ' % initial_file_message
            file_handler.stream.write('\n' + message.center(100, '=') + '\n\n')