예제 #1
0
def main():
    parser = argparse.ArgumentParser(description=(
        'Script to dump a HAProxy map between container IPs and task IDs.'), )
    parser.add_argument(
        'map_file',
        nargs='?',
        default='/var/run/synapse/maps/ip_to_service.map',
        help='Where to write the output map file',
    )
    args = parser.parse_args()

    prev_ip_to_task_id = get_prev_file_contents(args.map_file)

    new_lines = []
    ip_addrs = []
    service_ips_and_ids = extract_taskid_and_ip(get_docker_client())

    for ip_addr, task_id in service_ips_and_ids:
        ip_addrs.append(ip_addr)
        update_haproxy_mapping(ip_addr, task_id, prev_ip_to_task_id,
                               args.map_file)
        new_lines.append(f'{ip_addr} {task_id}')

    remove_stopped_container_entries(prev_ip_to_task_id.keys(), ip_addrs,
                                     args.map_file)

    # Replace the file contents with the new map
    with atomic_file_write(args.map_file) as fp:
        fp.write('\n'.join(new_lines))
예제 #2
0
def test_atomic_file_write_itest():
    tempdir = tempfile.mkdtemp()
    target_file_name = os.path.join(tempdir, 'test_atomic_file_write_itest.txt')

    try:
        old_umask = os.umask(0022)
        with open(target_file_name, 'w') as f_before:
            f_before.write('old content')

        with utils.atomic_file_write(target_file_name) as f_new:
            f_new.write('new content')

            with open(target_file_name) as f_existing:
                # While in the middle of an atomic_file_write, the existing
                # file should still contain the old content, and should not
                # be truncated, etc.
                assert f_existing.read() == 'old content'

        with open(target_file_name) as f_done:
            # once we're done, the content should be in place.
            assert f_done.read() == 'new content'

        file_stat = os.stat(target_file_name)
        assert stat.S_ISREG(file_stat.st_mode)
        assert stat.S_IMODE(file_stat.st_mode) == 0644

    finally:
        os.umask(old_umask)
        shutil.rmtree(tempdir)
def main():
    args = parse_args()
    soa_dir = os.path.abspath(args.soa_dir)
    service = args.service
    if args.verbose:
        log.setLevel(logging.DEBUG)
    else:
        log.setLevel(logging.WARNING)
    try:
        with open(os.path.join(soa_dir, service, TARGET_FILE), 'r') as f:
            old_deployments_dict = json.load(f)
            old_mappings = get_deploy_group_mappings_from_deployments_dict(
                old_deployments_dict)
    except (IOError, ValueError):
        old_mappings = {}
    mappings = get_deploy_group_mappings(
        soa_dir=soa_dir,
        service=service,
        old_mappings=old_mappings,
    )

    deployments_dict = get_deployments_dict_from_deploy_group_mappings(
        mappings)

    with atomic_file_write(os.path.join(soa_dir, service, TARGET_FILE)) as f:
        json.dump(deployments_dict, f)
예제 #4
0
def write_json_file(filename):
    configuration = generate_configuration()
    with atomic_file_write(filename) as fp:
        json.dump(obj=configuration,
                  fp=fp,
                  indent=2,
                  sort_keys=True,
                  separators=(",", ": "))
예제 #5
0
def write_rfc1700_file(filename):
    strings = [
        "# This file is generated by generate_services_file",
        "# DO NOT EDIT BY HAND",
    ]
    for service in sorted(os.listdir(DEFAULT_SOA_DIR)):
        strings.extend(get_service_lines_for_service(service))
    with atomic_file_write(filename) as fp:
        fp.write("\n".join(strings))
        fp.write("\n")
예제 #6
0
def test_atomic_file_write():
    with mock.patch("tempfile.NamedTemporaryFile", autospec=True) as ntf_patch:
        file_patch = ntf_patch().__enter__()
        file_patch.name = "/hurp/.durp-AAA"
        ntf_patch.reset_mock()

        with mock.patch("os.rename", autospec=True) as rename_patch:
            with mock.patch("os.chmod", autospec=True) as chmod_patch:
                with utils.atomic_file_write("/hurp/durp"):
                    ntf_patch.assert_called_once_with(dir="/hurp", prefix=".durp-", delete=False)
                chmod_patch.assert_called_once_with("/hurp/.durp-AAA", mock.ANY)

            rename_patch.assert_called_once_with("/hurp/.durp-AAA", "/hurp/durp")
예제 #7
0
def main():
    if len(sys.argv) != 2:
        print >>sys.stderr, "Usage: %s <output_path>"
        sys.exit(1)

    output_path = sys.argv[1]
    configuration = generate_configuration()

    with atomic_file_write(output_path) as fp:
        yaml.dump(configuration,
                  fp,
                  indent=2,
                  explicit_start=True,
                  default_flow_style=False)
예제 #8
0
def generate_deployments_for_service(service: str, soa_dir: str) -> None:
    try:
        with open(os.path.join(soa_dir, service, TARGET_FILE), "r") as oldf:
            old_deployments_dict = json.load(oldf)
    except (IOError, ValueError):
        old_deployments_dict = {}
    mappings, v2_mappings = get_deploy_group_mappings(soa_dir=soa_dir, service=service)

    deployments_dict = get_deployments_dict_from_deploy_group_mappings(
        mappings, v2_mappings
    )
    if deployments_dict != old_deployments_dict:
        with atomic_file_write(os.path.join(soa_dir, service, TARGET_FILE)) as newf:
            json.dump(deployments_dict, newf)
예제 #9
0
def main():
    if len(sys.argv) != 2:
        print >> sys.stderr, "Usage: %s <output_path>"
        sys.exit(1)

    output_path = sys.argv[1]
    configuration = generate_configuration()

    with atomic_file_write(output_path) as fp:
        yaml.dump(configuration,
                  fp,
                  indent=2,
                  explicit_start=True,
                  default_flow_style=False)
예제 #10
0
def main(argv=None):
    parser = argparse.ArgumentParser()
    parser.add_argument("output_path")
    args = parser.parse_args(argv)

    configuration = generate_configuration()

    with atomic_file_write(args.output_path) as fp:
        fp.write(
            "# This file is automatically generated by paasta_tools.\n"
            "# It was automatically generated at {now} on {host}.\n".format(
                host=socket.getfqdn(), now=datetime.now().isoformat()
            )
        )
        yaml.dump(configuration, fp, indent=2, explicit_start=True, default_flow_style=False, allow_unicode=False)
예제 #11
0
def write_yaml_file(filename):
    configuration = generate_configuration()
    with atomic_file_write(filename) as fp:
        fp.write(
            '# This file is automatically generated by paasta_tools.\n'
            '# It was automatically generated at {now} on {host}.\n'.format(
                host=socket.getfqdn(),
                now=datetime.now().isoformat(),
            ), )
        yaml.dump(
            configuration,
            fp,
            indent=2,
            explicit_start=True,
            default_flow_style=False,
            allow_unicode=False,
        )
def generate_deployments_for_service(service, soa_dir):
    try:
        with open(os.path.join(soa_dir, service, TARGET_FILE), 'r') as f:
            old_deployments_dict = json.load(f)
            old_mappings = get_deploy_group_mappings_from_deployments_dict(old_deployments_dict)
    except (IOError, ValueError):
        old_mappings = {}
    mappings, v2_mappings = get_deploy_group_mappings(
        soa_dir=soa_dir,
        service=service,
        old_mappings=old_mappings,
    )

    deployments_dict = get_deployments_dict_from_deploy_group_mappings(mappings, v2_mappings)

    with atomic_file_write(os.path.join(soa_dir, service, TARGET_FILE)) as f:
        json.dump(deployments_dict, f)
def generate_deployments_for_service(service, soa_dir):
    try:
        with open(os.path.join(soa_dir, service, TARGET_FILE), 'r') as f:
            old_deployments_dict = json.load(f)
            old_mappings = get_deploy_group_mappings_from_deployments_dict(old_deployments_dict)
    except (IOError, ValueError):
        old_mappings = {}
        old_deployments_dict = {}
    mappings, v2_mappings = get_deploy_group_mappings(
        soa_dir=soa_dir,
        service=service,
        old_mappings=old_mappings,
    )

    deployments_dict = get_deployments_dict_from_deploy_group_mappings(mappings, v2_mappings)
    if deployments_dict != old_deployments_dict:
        with atomic_file_write(os.path.join(soa_dir, service, TARGET_FILE)) as f:
            json.dump(deployments_dict, f)
예제 #14
0
파일: test_utils.py 프로젝트: ese/paasta
def test_atomic_file_write():
    with mock.patch('tempfile.NamedTemporaryFile', autospec=True) as ntf_patch:
        file_patch = ntf_patch().__enter__()
        file_patch.name = '/hurp/.durp-AAA'
        ntf_patch.reset_mock()

        with mock.patch('os.rename', autospec=True) as rename_patch:
            with mock.patch('os.chmod', autospec=True) as chmod_patch:
                with utils.atomic_file_write('/hurp/durp'):
                    ntf_patch.assert_called_once_with(
                        dir='/hurp',
                        prefix='.durp-',
                        delete=False,
                    )
                chmod_patch.assert_called_once_with('/hurp/.durp-AAA',
                                                    mock.ANY)

            rename_patch.assert_called_once_with('/hurp/.durp-AAA',
                                                 '/hurp/durp')
예제 #15
0
def write_yaml_file(filename):
    previous_config = maybe_load_previous_config(filename, yaml.safe_load)
    configuration = generate_configuration()

    if previous_config and previous_config == configuration:
        return

    with atomic_file_write(filename) as fp:
        fp.write(
            "# This file is automatically generated by paasta_tools.\n"
            "# It was automatically generated at {now} on {host}.\n".format(
                host=socket.getfqdn(), now=datetime.now().isoformat()))
        yaml.safe_dump(
            configuration,
            fp,
            indent=2,
            explicit_start=True,
            default_flow_style=False,
            allow_unicode=False,
        )
def main():
    args = parse_args()
    soa_dir = os.path.abspath(args.soa_dir)
    service = args.service
    if args.verbose:
        log.setLevel(logging.DEBUG)
    else:
        log.setLevel(logging.WARNING)
    try:
        with open(os.path.join(soa_dir, service, TARGET_FILE), 'r') as f:
            old_deployments_dict = json.load(f)
            old_mappings = get_branch_mappings_from_deployments_dict(old_deployments_dict)
    except (IOError, ValueError):
        old_mappings = {}
    mappings = get_branch_mappings(soa_dir, service, old_mappings)

    deployments_dict = get_deployments_dict_from_branch_mappings(mappings)

    with atomic_file_write(os.path.join(soa_dir, service, TARGET_FILE)) as f:
        json.dump(deployments_dict, f)
예제 #17
0
def test_atomic_file_write():
    with mock.patch('tempfile.NamedTemporaryFile', autospec=True) as ntf_patch:
        file_patch = ntf_patch().__enter__()
        file_patch.name = '/hurp/.durp-AAA'
        ntf_patch.reset_mock()

        with mock.patch('os.rename', autospec=True) as rename_patch:
            with mock.patch('os.chmod', autospec=True) as chmod_patch:
                with utils.atomic_file_write('/hurp/durp'):
                    ntf_patch.assert_called_once_with(
                        dir='/hurp',
                        prefix='.durp-',
                        delete=False,
                    )
                chmod_patch.assert_called_once_with('/hurp/.durp-AAA', mock.ANY)

            rename_patch.assert_called_once_with(
                '/hurp/.durp-AAA',
                '/hurp/durp'
            )
예제 #18
0
def main(argv=None):
    parser = argparse.ArgumentParser()
    parser.add_argument('output_path')
    args = parser.parse_args(argv)

    configuration = generate_configuration()

    with atomic_file_write(args.output_path) as fp:
        fp.write(
            '# This file is automatically generated by paasta_tools.\n'
            '# It was automatically generated at {now} on {host}.\n'.format(
                host=socket.getfqdn(),
                now=datetime.now().isoformat(),
            ), )
        yaml.safe_dump(
            configuration,
            fp,
            indent=2,
            explicit_start=True,
            default_flow_style=False,
            allow_unicode=False,
        )
예제 #19
0
def write_rfc1700_file(filename):
    strings = []
    for service in sorted(os.listdir(DEFAULT_SOA_DIR)):
        strings.extend(get_service_lines_for_service(service))
    with atomic_file_write(filename) as fp:
        fp.write("\n".join(strings))
def main() -> None:
    parser = argparse.ArgumentParser(
        description=(
            'Script to dump a HAProxy map between container IPs and task IDs.'
        ),
    )
    parser.add_argument(
        '--update-haproxy',
        '-U',
        action='store_true',
        help='Whether to update haproxy for map updates',
    )
    parser.add_argument(
        '--haproxy-timeout',
        '-T',
        type=int,
        default=1,
        help='Timeout for haproxy socket connections',
    )
    parser.add_argument(
        'map_file',
        nargs='?',
        default='/var/run/synapse/maps/ip_to_service.map',
        help='Where to write the output map file',
    )
    args = parser.parse_args()

    if args.update_haproxy:
        prev_ip_to_task_id = get_prev_file_contents(args.map_file)

    new_lines = []
    ip_addrs = []
    service_ips_and_ids = extract_taskid_and_ip(get_docker_client())

    for ip_addr, task_id in service_ips_and_ids:
        ip_addrs.append(ip_addr)
        if args.update_haproxy:
            update_haproxy_mapping(
                ip_addr,
                task_id,
                prev_ip_to_task_id,
                args.map_file,
                args.haproxy_timeout,
            )
        new_lines.append('{ip_addr} {task_id}'.format(
            ip_addr=ip_addr,
            task_id=task_id,
        )
        )

    if args.update_haproxy:
        remove_stopped_container_entries(
            prev_ip_to_task_id.keys(),
            ip_addrs,
            args.map_file,
            args.haproxy_timeout,
        )

    # Replace the file contents with the new map
    with atomic_file_write(args.map_file) as fp:
        fp.write('\n'.join(new_lines))