Beispiel #1
0
 def _check_sync_flag(self):
     sync_flag = os.path.join(self.config['stage_dir'], 'sync.flag')
     if os.path.exists(sync_flag):
         stat = os.stat(sync_flag)
         owner = pwd.getpwuid(stat.st_uid).pw_name
         utils.get_logger().error(
             "%s's sync.flag is blocking deployments", owner)
         raise IOError(errno.EPERM, 'Blocked by sync.flag', sync_flag)
Beispiel #2
0
 def _check_sync_flag(self):
     sync_flag = os.path.join(self.config['stage_dir'], 'sync.flag')
     if os.path.exists(sync_flag):
         stat = os.stat(sync_flag)
         owner = pwd.getpwuid(stat.st_uid).pw_name
         utils.get_logger().error("%s's sync.flag is blocking deployments",
                                  owner)
         raise IOError(errno.EPERM, 'Blocked by sync.flag', sync_flag)
Beispiel #3
0
 def __exit__(self, *args):
     # Return the umask
     if self.lock_fd:
         fcntl.lockf(self.lock_fd, fcntl.LOCK_UN)
         os.close(self.lock_fd)
         if os.path.exists(self.filename):
             try:
                 os.unlink(self.filename)
             except OSError:
                 # Someone else deleted the lock. Freaky, but we already
                 # did our stuff so there's no point in halting execution
                 utils.get_logger().warning(
                     'Huh, lock file disappeared before deletion. ' +
                     'This is probably fine-ish :)')
Beispiel #4
0
 def __exit__(self, *args):
     # Return the umask
     if self.lock_fd:
         fcntl.lockf(self.lock_fd, fcntl.LOCK_UN)
         os.close(self.lock_fd)
         if os.path.exists(self.filename):
             try:
                 os.unlink(self.filename)
             except OSError:
                 # Someone else deleted the lock. Freaky, but we already
                 # did our stuff so there's no point in halting execution
                 utils.get_logger().warning(
                     'Huh, lock file disappeared before deletion. ' +
                     'This is probably fine-ish :)'
                 )
Beispiel #5
0
def refresh_cdb_json_files(in_dir, pool_size, verbose):
    """
    Update json files from corresponding cdb file in parallel.

    :param in_dir: directory containing cdb files
    :param pool_size: number of "threads" to use
    :param verbose: output verbosely
    """
    logger = utils.get_logger()
    cdb_files = glob.glob(os.path.join(in_dir, '*.cdb'))
    pool = multiprocessing.Pool(pool_size)

    reporter = log.MuteReporter()
    updated = 0

    if verbose:
        reporter = log.ProgressReporter('cdb update')

    reporter.expect(len(cdb_files))
    reporter.start()

    for result in pool.imap_unordered(refresh_cdb_json_file, cdb_files):
        if result:
            updated += 1
        reporter.add_success()

    reporter.finish()
    logger.info('Updated %s JSON file(s) in %s', updated, in_dir)
Beispiel #6
0
def refresh_cdb_json_files(in_dir, pool_size, verbose):
    """
    Update json files from corresponding cdb file in parallel.

    :param in_dir: directory containing cdb files
    :param pool_size: number of "threads" to use
    :param verbose: output verbosely
    """
    logger = utils.get_logger()
    cdb_files = glob.glob(os.path.join(in_dir, '*.cdb'))
    pool = multiprocessing.Pool(pool_size)

    reporter = log.MuteReporter()
    updated = 0

    if verbose:
        reporter = log.ProgressReporter('cdb update')

    reporter.expect(len(cdb_files))
    reporter.start()

    for result in pool.imap_unordered(refresh_cdb_json_file, cdb_files):
        if result:
            updated += 1
        reporter.add_success()

    reporter.finish()
    logger.info('Updated %s JSON file(s) in %s', updated, in_dir)
Beispiel #7
0
def update_submodules(location, git_remote=None, use_upstream=False,
                      reference=None):
    """Update git submodules on target machines"""

    if not use_upstream and git_remote is None:
        raise ValueError("Must set git_remote if not using upstream")

    ensure_dir(location)

    logger = utils.get_logger()

    with utils.cd(location):
        logger.debug('Fetch submodules')
        if not use_upstream:
            logger.debug('Remapping submodule %s to %s', location, git_remote)
            remap_submodules(location, git_remote)
        else:
            logger.debug('Using upstream submodules')

        cmd = ['update', '--init', '--recursive']
        cmd = append_jobs_arg(cmd)

        if reference is not None and GIT_VERSION[0] > 1:
            logger.debug('Using --reference repository: %s', reference)
            ensure_dir(reference)
            cmd.append('--reference')
            cmd.append(reference)

        git.submodule(*cmd)
Beispiel #8
0
def update_submodules(location,
                      git_remote=None,
                      use_upstream=False,
                      reference=None):
    """Update git submodules on target machines"""

    if not use_upstream and git_remote is None:
        raise ValueError("Must set git_remote if not using upstream")

    ensure_dir(location)

    logger = utils.get_logger()

    with utils.cd(location):
        logger.debug('Fetch submodules')
        if not use_upstream:
            logger.debug('Remapping submodule %s to %s', location, git_remote)
            remap_submodules(location, git_remote)
        else:
            logger.debug('Using upstream submodules')

        cmd = ['update', '--init', '--recursive']
        cmd = append_jobs_arg(cmd)

        if reference is not None and GIT_VERSION[0] > 1:
            logger.debug('Using --reference repository: %s', reference)
            ensure_dir(reference)
            cmd.append('--reference')
            cmd.append(reference)

        git.submodule(*cmd)
Beispiel #9
0
def info(directory):
    """Compute git version information for a given directory that is
    compatible with MediaWiki's GitInfo class.

    :param directory: Directory to scan for git information
    :returns: Dict of information about current repository state
    """
    git_dir = resolve_gitdir(directory)

    head_file = os.path.join(git_dir, 'HEAD')
    with open(head_file, 'r') as headfile:
        head = headfile.read().strip()
    if head.startswith('ref: '):
        head = head[5:]

    if head.startswith('refs/heads/'):
        branch = head[11:]
    elif head.startswith('refs/tags/'):
        branch = head[10:]
    else:
        branch = head

    head_sha1 = get_disclosable_head(directory, branch)
    if head_sha1:
        with sh.pushd(git_dir):
            commit_date = git.show('-s', '--format=%ct', head_sha1).strip()
    else:
        commit_date = ''

    # Requires git v1.7.5+
    try:
        with sh.pushd(git_dir):
            remote_url = git('ls-remote', '--get-url').strip()

    except ErrorReturnCode:
        remote_url = ''
        utils.get_logger().info("Unable to find remote URL for %s", git_dir)

    return {
        '@directory': directory,
        'head': head,
        'headSHA1': head_sha1,
        'headCommitDate': commit_date,
        'branch': branch,
        'remoteURL': remote_url,
    }
Beispiel #10
0
def info(directory):
    """Compute git version information for a given directory that is
    compatible with MediaWiki's GitInfo class.

    :param directory: Directory to scan for git information
    :returns: Dict of information about current repository state
    """
    git_dir = resolve_gitdir(directory)

    head_file = os.path.join(git_dir, 'HEAD')
    with open(head_file, 'r') as headfile:
        head = headfile.read().strip()
    if head.startswith('ref: '):
        head = head[5:]

    if head.startswith('refs/heads/'):
        branch = head[11:]
    elif head.startswith('refs/tags/'):
        branch = head[10:]
    else:
        branch = head

    head_sha1 = get_disclosable_head(directory, branch)
    if head_sha1:
        with sh.pushd(git_dir):
            commit_date = git.show('-s', '--format=%ct', head_sha1).strip()
    else:
        commit_date = ''

    # Requires git v1.7.5+
    try:
        with sh.pushd(git_dir):
            remote_url = git('ls-remote', '--get-url').strip()

    except ErrorReturnCode:
        remote_url = ''
        utils.get_logger().info("Unable to find remote URL for %s", git_dir)

    return {
        '@directory': directory,
        'head': head,
        'headSHA1': head_sha1,
        'headCommitDate': commit_date,
        'branch': branch,
        'remoteURL': remote_url,
    }
Beispiel #11
0
def get_disclosable_head(repo_directory, remote_thing):
    """
    Get the SHA1 of the most recent commit that can be publicly disclosed.
    If a commit only exists locally, it is considered private. This function
    will try to get the tip of the remote tracking branch, and fall back to
    the common ancestor of HEAD and the remote version of the local branch
    we're ostensibly tracking.

    :param repo_directory: Directory to look into
    :param remote_thing: If you're not actively tracking a remote branch, you
                         need to provide something remote for this function to
                         look for a common ancestor with. Otherwise, this
                         function has no way of knowing what common tree
                         you could possibly care about. This could be a branch,
                         a tag, or a plain sha1
    :returns: str
    """
    with open(os.devnull, 'wb') as dev_null:
        try:
            return subprocess.check_output(
                ('/usr/bin/git', 'rev-list', '-1', '@{upstream}'),
                cwd=repo_directory,
                stderr=dev_null).strip()
        except subprocess.CalledProcessError:
            try:
                remote = subprocess.check_output(('/usr/bin/git', 'remote'),
                                                 cwd=repo_directory,
                                                 stderr=dev_null).strip()

                # If the branch is not a SHA1, combine with remote name
                if not re.match('[a-f0-9]{40}', remote_thing):
                    remote_thing = '%s/%s' % (remote, remote_thing)
                # If the branch is a SHA1, count on remote HEAD being a
                # symbolic-ref for the actual remote
                else:
                    remote_thing = remote
                return subprocess.check_output(
                    ('/usr/bin/git', 'merge-base', 'HEAD', remote_thing),
                    cwd=repo_directory,
                    stderr=dev_null).strip()
            except subprocess.CalledProcessError:
                utils.get_logger().info(
                    'Unable to find remote tracking branch/tag for %s',
                    repo_directory)
                return ''
Beispiel #12
0
def checkout(location, rev):
    """Checkout a git repo sha at a location
    """
    ensure_dir(location)

    logger = utils.get_logger()

    with sh.pushd(location):
        logger.debug('Checking out rev: %s at location: %s', rev, location)
        git.checkout('--force', '--quiet', rev)
Beispiel #13
0
def sync_submodules(location):
    """Sync git submodules on target machines"""

    ensure_dir(location)

    logger = utils.get_logger()

    with sh.pushd(location):
        logger.debug('Syncing out submodules')
        git.submodule('sync', '--recursive')
Beispiel #14
0
def get_disclosable_head(repo_directory, remote_thing):
    """
    Get the SHA1 of the most recent commit that can be publicly disclosed.
    If a commit only exists locally, it is considered private. This function
    will try to get the tip of the remote tracking branch, and fall back to
    the common ancestor of HEAD and the remote version of the local branch
    we're ostensibly tracking.

    :param repo_directory: Directory to look into
    :param remote_thing: If you're not actively tracking a remote branch, you
                         need to provide something remote for this function to
                         look for a common ancestor with. Otherwise, this
                         function has no way of knowing what common tree
                         you could possibly care about. This could be a branch,
                         a tag, or a plain sha1
    :returns: str
    """
    with open(os.devnull, 'wb') as dev_null:
        try:
            return subprocess.check_output(
                ('/usr/bin/git', 'rev-list', '-1', '@{upstream}'),
                cwd=repo_directory, stderr=dev_null).strip()
        except subprocess.CalledProcessError:
            try:
                remote = subprocess.check_output(
                    ('/usr/bin/git', 'remote'),
                    cwd=repo_directory, stderr=dev_null).strip()

                # If the branch is not a SHA1, combine with remote name
                if not re.match('[a-f0-9]{40}', remote_thing):
                    remote_thing = '%s/%s' % (remote, remote_thing)
                # If the branch is a SHA1, count on remote HEAD being a
                # symbolic-ref for the actual remote
                else:
                    remote_thing = remote
                return subprocess.check_output(
                    ('/usr/bin/git', 'merge-base', 'HEAD', remote_thing),
                    cwd=repo_directory, stderr=dev_null).strip()
            except subprocess.CalledProcessError:
                utils.get_logger().info(
                    'Unable to find remote tracking branch/tag for %s',
                    repo_directory)
                return ''
Beispiel #15
0
def sync_submodules(location):
    """Sync git submodules on target machines"""

    ensure_dir(location)

    logger = utils.get_logger()

    with sh.pushd(location):
        logger.debug('Syncing out submodules')
        git.submodule('sync', '--recursive')
Beispiel #16
0
def checkout(location, rev):
    """Checkout a git repo sha at a location
    """
    ensure_dir(location)

    logger = utils.get_logger()

    with sh.pushd(location):
        logger.debug('Checking out rev: %s at location: %s', rev, location)
        git.checkout('--force', '--quiet', rev)
Beispiel #17
0
def remap_submodules(location, server):
    """Remap all submodules to deployment server

    This function supports remapping submodules available on the deployment
    server. Since the remote is a non-bare repo (as of git 1.7.8) all
    submodules should be available over http under the remote server
    checkout's git dir:
    [server]/[repo]/.git/modules/[submodule_path]

    :param location: String path to local git checkout containing a
                     `.gitmodules` file
    :param server: String path to remote, non-bare, repo gitdir
    """
    ensure_dir(location)

    logger = utils.get_logger()

    with sh.pushd(location):
        gitmodule = os.path.join(location, '.gitmodules')
        if not os.path.isfile(gitmodule):
            logger.warning(
                'Unable to rewrite_submodules: No .gitmodules in %s', location)
            return

        logger.info('Updating .gitmodule: %s', os.path.dirname(gitmodule))

        # ensure we're working with a non-modified .gitmodules file
        git.checkout('.gitmodules')

        # get .gitmodule info
        modules = git.config('--list', '--file', '.gitmodules')

        submodules = {}
        for line in modules.split('\n'):
            if not line.startswith('submodule.'):
                continue

            module_conf = line.split('=')
            module_name = module_conf[0].strip()

            if module_name.endswith('.path'):
                name = module_name[len('submodule.'):-len('.path')]
                submodules[name] = module_conf[1].strip()

        with open(gitmodule, 'w') as module:
            for submodule_name, submodule_path in submodules.items():
                # Since we're using a non-bare http remote, map the submodule
                # to the submodule path under $GIT_DIR/modules subdirectory of
                # the superproject (git documentation: https://git.io/v4W9F).
                remote_path = '{}/modules/{}'.format(server, submodule_name)
                module.write('[submodule "{}"]\n'.format(submodule_name))
                module.write('\tpath = {}\n'.format(submodule_path))
                module.write('\turl = {}\n'.format(remote_path))

        sync_submodules(location)
Beispiel #18
0
def remap_submodules(location, server):
    """Remap all submodules to new server (tin)

    This function supports remapping submodules available on the deployment
    server. Since the remote is a non-bare repo (as of git 1.7.8) all
    submodules should be available over http under the remote server
    checkout's git dir:
    [server]/[repo]/.git/modules/[submodule_path]

    :param location: String path to local git checkout containing a
                     `.gitmodules` file
    :param server: String path to remote, non-bare, repo gitdir
    """
    ensure_dir(location)

    logger = utils.get_logger()

    with sh.pushd(location):
        gitmodule = os.path.join(location, '.gitmodules')
        if not os.path.isfile(gitmodule):
            logger.warning(
                'Unable to rewrite_submodules: No .gitmodules in %s', location)
            return

        logger.info('Updating .gitmodule: %s', os.path.dirname(gitmodule))

        # ensure we're working with a non-modified .gitmodules file
        git.checkout('.gitmodules')

        # get .gitmodule info
        modules = git.config('--list', '--file', '.gitmodules')

        submodules = {}
        for line in modules.split('\n'):
            if not line.startswith('submodule.'):
                continue

            module_conf = line.split('=')
            module_name = module_conf[0].strip()

            if module_name.endswith('.path'):
                name = module_name[len('submodule.'):-len('.path')]
                submodules[name] = module_conf[1].strip()

        with open(gitmodule, 'w') as module:
            for submodule_name, submodule_path in submodules.items():
                # Since we're using a non-bare http remote, map the submodule
                # to the submodule path under $GIT_DIR/modules subdirectory of
                # the superproject (git documentation: https://git.io/v4W9F).
                remote_path = '{}/modules/{}'.format(server, submodule_name)
                module.write('[submodule "{}"]\n'.format(submodule_name))
                module.write('\tpath = {}\n'.format(submodule_path))
                module.write('\turl = {}\n'.format(remote_path))

        sync_submodules(location)
Beispiel #19
0
def get_lock_excuse(lockfile):
    """
    Get an excuse for why we couldn't lock the file.

    Read the file and its owner, if we can. Fail gracefully with something
    if we can't read it (most likely permissions)

    :param lockfile: Lock file to look for information in
    """

    bad_user = '******'
    excuses = 'no excuse given'
    try:
        bad_user = utils.get_username(os.stat(lockfile).st_uid) or bad_user
        excuses = open(lockfile, 'r').read() or excuses
    except (IOError, OSError) as failure:
        # Before we raise, let's at least warn what failed
        utils.get_logger().warning(failure)
    return 'Failed to acquire lock "%s"; owner is "%s"; reason is "%s"' % (
        lockfile, bad_user, excuses)
Beispiel #20
0
def get_lock_excuse(lockfile):
    """
    Get an excuse for why we couldn't lock the file.

    Read the file and its owner, if we can. Fail gracefully with something
    if we can't read it (most likely permissions)

    :param lockfile: Lock file to look for information in
    """

    bad_user = '******'
    excuses = 'no excuse given'
    try:
        bad_user = utils.get_username(os.stat(lockfile).st_uid) or bad_user
        excuses = open(lockfile, 'r').read() or excuses
    except (IOError, OSError) as failure:
        # Before we raise, let's at least warn what failed
        utils.get_logger().warning(failure)
    return 'Failed to acquire lock "%s"; owner is "%s"; reason is "%s"' % (
        lockfile, bad_user, excuses)
Beispiel #21
0
def update_deploy_head(deploy_info, location):
    """updates .git/DEPLOY_HEAD file

    :param deploy_info: current deploy info to write to file as YAML
    :param (optional) location: git directory location (default cwd)
    """
    logger = utils.get_logger()
    ensure_dir(location)

    with utils.cd(location):
        deploy_file = os.path.join(location, '.git', 'DEPLOY_HEAD')
        logger.debug('Creating %s', deploy_file)
        with open(deploy_file, 'w+') as deployfile:
            deployfile.write(yaml.dump(deploy_info, default_flow_style=False))
            deployfile.close()
Beispiel #22
0
def update_deploy_head(deploy_info, location):
    """updates .git/DEPLOY_HEAD file

    :param deploy_info: current deploy info to write to file as YAML
    :param (optional) location: git directory location (default cwd)
    """
    logger = utils.get_logger()
    ensure_dir(location)

    with utils.cd(location):
        deploy_file = os.path.join(location, '.git', 'DEPLOY_HEAD')
        logger.debug('Creating %s', deploy_file)
        with open(deploy_file, 'w+') as deployfile:
            deployfile.write(yaml.dump(deploy_info, default_flow_style=False))
            deployfile.close()
Beispiel #23
0
def logstash_canary_checks(canaries,
                           service,
                           threshold,
                           logstash,
                           delay,
                           cores=2):
    """
    Run the logstash canary checks on test application servers.

    :param canaries: list, canaries to check
    :param threshold: float, average log multiple at which to fail
    :param service: string, name of the service to check
    :param logstash: string, logstash server
    :param verbose: bool, verbose output
    :param delay: float, time between deploy and now
    :param cores: number of processor cores to use
    """
    logger = utils.get_logger()

    canary_checks = []

    # Build Check command list
    for canary in canaries:
        check_name = 'Logstash Error rate for {}'.format(canary)

        # Split canary name at first "." since domain isn't in logs
        canary = canary.split('.')[0]

        cmd = [
            '/usr/local/bin/logstash_checker.py', '--service-name', service,
            '--host', canary, '--fail-threshold', threshold, '--delay', delay,
            '--logstash-host', logstash
        ]

        cmd = ' '.join(map(str, cmd))
        canary_checks.append(
            checks.Check(check_name,
                         'logstash-canary',
                         command=cmd,
                         timeout=120.0))

    success, done = checks.execute(canary_checks, logger, concurrency=cores)
    failed = [job.check.name for job in done if job.isfailure()]

    return (len(done) - len(failed), len(failed))
Beispiel #24
0
def logstash_canary_checks(
        canaries, service, threshold, logstash, delay, cores=2):
    """
    Run the logstash canary checks on test application servers.

    :param canaries: list, canaries to check
    :param threshold: float, average log multiple at which to fail
    :param service: string, name of the service to check
    :param logstash: string, logstash server
    :param verbose: bool, verbose output
    :param delay: float, time between deploy and now
    :param cores: number of processor cores to use
    """
    logger = utils.get_logger()

    canary_checks = []

    # Build Check command list
    for canary in canaries:
        check_name = 'Logstash Error rate for {}'.format(canary)

        # Split canary name at first "." since domain isn't in logs
        canary = canary.split('.')[0]

        cmd = ['/usr/local/bin/logstash_checker.py',
               '--service-name', service,
               '--host', canary,
               '--fail-threshold', threshold,
               '--delay', delay,
               '--logstash-host', logstash]

        cmd = ' '.join(map(str, cmd))
        canary_checks.append(
            checks.Check(
                check_name, 'logstash-canary', command=cmd, timeout=120.0))

    success, done = checks.execute(canary_checks, logger, concurrency=cores)
    failed = [job.check.name for job in done if job.isfailure()]

    return (len(done) - len(failed), len(failed))
Beispiel #25
0
def endpoint_canary_checks(canaries, url, spec_path='/spec.yaml', cores=2):
    """
    Run service-checker-swagger canary checks on test application servers.

    :param canaries: list, canaries to check
    :param url: url to pass to service-checker-swagger
    :param spec_path: url to pass to service-checker-swagger
    :param cores: number of processor cores to use
    """
    logger = utils.get_logger()

    canary_checks = []

    # Build Check command list
    for canary in canaries:
        check_name = 'Check endpoints for {}'.format(canary)

        # service-checker-swagger \
        #   deployment-mediawiki-07.deployment-prep.eqiad.wmflabs \
        #   http://en.wikipedia.beta.wmflabs.org \
        #   -s "/spec.yaml"
        cmd = [
            '/usr/bin/service-checker-swagger',
            canary,
            url,
            '-s',
            spec_path
        ]

        cmd = ' '.join(map(str, cmd))
        canary_checks.append(
            checks.Check(
                check_name, 'endpoint-canary', command=cmd, timeout=120.0))

    success, done = checks.execute(canary_checks, logger, concurrency=cores)
    failed = [job.check.name for job in done if job.isfailure()]

    return (len(done) - len(failed), len(failed))
Beispiel #26
0
def endpoint_canary_checks(canaries, url, spec_path='/spec.yaml', cores=2):
    """
    Run service-checker-swagger canary checks on test application servers.

    :param canaries: list, canaries to check
    :param url: url to pass to service-checker-swagger
    :param spec_path: url to pass to service-checker-swagger
    :param cores: number of processor cores to use
    """
    logger = utils.get_logger()

    canary_checks = []

    # Build Check command list
    for canary in canaries:
        check_name = 'Check endpoints for {}'.format(canary)

        # service-checker-swagger \
        #   deployment-mediawiki-07.deployment-prep.eqiad.wmflabs \
        #   http://en.wikipedia.beta.wmflabs.org \
        #   -s "/spec.yaml"
        cmd = [
            '/usr/bin/service-checker-swagger', canary, url, '-s', spec_path
        ]

        cmd = ' '.join(map(str, cmd))
        canary_checks.append(
            checks.Check(check_name,
                         'endpoint-canary',
                         command=cmd,
                         timeout=120.0))

    success, done = checks.execute(canary_checks, logger, concurrency=cores)
    failed = [job.check.name for job in done if job.isfailure()]

    return (len(done) - len(failed), len(failed))
Beispiel #27
0
 def stage(self):
     utils.get_logger().info(
         'Check %s is empty and will not be run',
         self.name
     )
     return None
Beispiel #28
0
 def __init__(self, host):
     super(JSONOutputHandler, self).__init__(host)
     self._logger = utils.get_logger().getChild('target').getChild(host)
     self._partial = ''
Beispiel #29
0
 def stage(self):
     utils.get_logger().info('Check %s is empty and will not be run',
                             self.name)
     return None
Beispiel #30
0
def refresh_cdb_json_file(file_path):
    """
    Rebuild json file from cdb file.

    #. Check md5 file saved in upstream against md5 of cdb file
    #. Read cdb file to dict
    #. Write dict to named temporary file
    #. Change permissions on named temporary file
    #. Overwrite upstream json file
    #. Write upstream md5 file
    """
    cdb_dir = os.path.dirname(file_path)
    file_name = os.path.basename(file_path)
    upstream_dir = os.path.join(cdb_dir, 'upstream')
    upstream_md5 = os.path.join(upstream_dir, '{}.MD5'.format(file_name))
    upstream_json = os.path.join(upstream_dir, '{}.json'.format(file_name))

    logger = utils.get_logger()
    logger.debug('Processing: %s', file_name)

    cdb_md5 = utils.md5_file(file_path)
    try:
        with open(upstream_md5, 'r') as f:
            json_md5 = f.read()

        # If the cdb file matches the generated md5,
        # no changes are needed to the json
        if json_md5 == cdb_md5:
            return True
    except IOError:
        pass

    tmp_json = tempfile.NamedTemporaryFile(delete=False)
    with open(file_path, 'r') as fp:
        reader = cdblib.Reader(fp.read())

    out = collections.OrderedDict()
    for k, v in reader.items():
        out[k] = v

    json_data = json.dumps(out, indent=0, separators=(',', ':'))

    # Make python json.dumps match php's json_encode
    # Remove first newline
    json_data = json_data.replace('\n', '', 1)

    # Escape slashes
    json_data = json_data.replace('/', r'\/')

    # Remove final newline
    json_data = ''.join(json_data.rsplit('\n', 1))

    tmp_json.write(json_data)
    tmp_json.close()
    os.chmod(tmp_json.name, 0o644)
    shutil.move(tmp_json.name, upstream_json)
    logger.debug('Updated: %s', upstream_json)

    with open(upstream_md5, 'w') as md5:
        md5.write(cdb_md5)

    return True
Beispiel #31
0
 def __init__(self, host):
     super(JSONOutputHandler, self).__init__(host)
     self._logger = utils.get_logger().getChild('target').getChild(host)
     self._partial = ''
Beispiel #32
0
def refresh_cdb_json_file(file_path):
    """
    Rebuild json file from cdb file.

    #. Check md5 file saved in upstream against md5 of cdb file
    #. Read cdb file to dict
    #. Write dict to named temporary file
    #. Change permissions on named temporary file
    #. Overwrite upstream json file
    #. Write upstream md5 file
    """
    cdb_dir = os.path.dirname(file_path)
    file_name = os.path.basename(file_path)
    upstream_dir = os.path.join(cdb_dir, 'upstream')
    upstream_md5 = os.path.join(upstream_dir, '{}.MD5'.format(file_name))
    upstream_json = os.path.join(upstream_dir, '{}.json'.format(file_name))

    logger = utils.get_logger()
    logger.debug('Processing: %s', file_name)

    cdb_md5 = utils.md5_file(file_path)
    try:
        with open(upstream_md5, 'r') as f:
            json_md5 = f.read()

        # If the cdb file matches the generated md5,
        # no changes are needed to the json
        if json_md5 == cdb_md5:
            return True
    except IOError:
        pass

    tmp_json = tempfile.NamedTemporaryFile(delete=False)
    with open(file_path, 'r') as fp:
        reader = cdblib.Reader(fp.read())

    out = collections.OrderedDict()
    for k, v in reader.items():
        out[k] = v

    json_data = json.dumps(out, indent=0, separators=(',', ':'))

    # Make python json.dumps match php's json_encode
    # Remove first newline
    json_data = json_data.replace('\n', '', 1)

    # Escape slashes
    json_data = json_data.replace('/', r'\/')

    # Remove final newline
    json_data = ''.join(json_data.rsplit('\n', 1))

    tmp_json.write(json_data)
    tmp_json.close()
    os.chmod(tmp_json.name, 0o644)
    shutil.move(tmp_json.name, upstream_json)
    logger.debug('Updated: %s', upstream_json)

    with open(upstream_md5, 'w') as md5:
        md5.write(cdb_md5)

    return True