예제 #1
0
 def newDaemon(self, config_file):
     """
     returns a list of daemons instances
     """
     try:
         init_config(config_file)
     except Exception, e1:
         self.log.error("While reading configuration %s got: %r. Aborting!" % (config_file, e1))
         sys.exit(-1) 
예제 #2
0
def main():
    args = cli()
    if args.verbose:
        logger.setLevel(logging.INFO)
    config = utils.init_config(args.C)
    if config.has_option('data', 'nunoc_cosmos'):
        data_dir = config.get('data', 'nunoc_cosmos')
        insert_hosts(utils.load_json(data_dir))
예제 #3
0
def run_restore(config_file):
    """
    Function to start the consumer from another script.
    """
    config = utils.init_config(config_file)
    noclook_data = config.get('data', 'noclook')
    if noclook_data:
        nodes = utils.load_json(noclook_data, starts_with="node")
        relationships = utils.load_json(noclook_data,
                                        starts_with="relationship")
        consume_noclook(nodes, relationships)
def get_lastest_tag(repo_name, today):
    logger.debug('Init config')

    config = init_config(repo_name)

    environment = config['environments'][env_name]

    env_key = environment['env_key'][repo_name]

    tag_re_patten = config['tag_re_patten']

    logger.debug('Searched tag patten is {}'.format(tag_re_patten))

    git_path = git_folder_path + repo_name

    logger.debug('Git path is {}'.format(git_path))

    logger.debug('Init repo')

    repo = git.Repo.init(path=git_path)

    repo.git.fetch()

    logger.debug('Fetch remote tags')

    all_tag = repo.git.ls_remote('--tags')

    tags = get_tags(all_tag)

    logger.debug('Start to find latest tag')

    tag_re_patten = tag_re_patten.format(env_key, today)

    tag_name = find_latest_tag(tag_re_patten, tags)

    if tag_name is None:
        logger.warning(
            'Can\'t find matched tag of {}. Maybe no tag today, or check tag patten.'
            .format(repo_name))
        return None

    logger.info('The latest tag of {} is {}'.format(repo_name, tag_name))

    return tag_name
예제 #5
0
def run_consume(config_file):
    """
    Function to start the consumer from another script.
    """
    config = utils.init_config(config_file)
    # juniper_conf
    juniper_conf_data = config.get('data', 'juniper_conf')
    remove_expired_juniper_conf = config.getboolean('delete_data',
                                                    'juniper_conf')
    juniper_conf_data_age = config.get('data_age', 'juniper_conf')
    # nmap services
    nmap_services_py_data = config.get('data', 'nmap_services_py')
    # nagios checkmk
    nagios_checkmk_data = config.get('data', 'nagios_checkmk')
    # cfengine report
    cfengine_data = config.get('data', 'cfengine_report')
    # noclook
    noclook_data = config.get('data', 'noclook')
    # Consume data
    if juniper_conf_data:
        data = utils.load_json(juniper_conf_data)
        switches = False
        noclook_juniper_consumer.consume_juniper_conf(data, switches)
    if nmap_services_py_data:
        data = utils.load_json(nmap_services_py_data)
        noclook_nmap_consumer.insert_nmap(data)
    if nagios_checkmk_data:
        data = utils.load_json(nagios_checkmk_data)
        noclook_checkmk_consumer.insert(data)
    if cfengine_data:
        data = utils.load_json(cfengine_data)
        noclook_cfengine_consumer.insert(data)
    if config.has_option('data', 'nunoc_cosmos'):
        data = utils.load_json(config.get('data', 'nunoc_cosmos'))
        noclook_nunoc_consumer.insert_hosts(data)
    if noclook_data:
        nodes = utils.load_json(noclook_data, starts_with="node")
        relationships = utils.load_json(noclook_data,
                                        starts_with="relationship")
        consume_noclook(nodes, relationships)
    # Clean up expired data
    if remove_expired_juniper_conf:
        noclook_juniper_consumer.remove_juniper_conf(juniper_conf_data_age)
예제 #6
0
def main():
    # User friendly usage output
    parser = argparse.ArgumentParser()
    parser.add_argument('-C', nargs='?', help='Path to the configuration file.')
    parser.add_argument('-X', action='store_true', default=False, help='Mark host services as public if found.')
    parser.add_argument('--verbose', '-V', action='store_true', default=False)
    args = parser.parse_args()
    # Load the configuration file
    if not args.C:
        logger.error('Please provide a configuration file with -C.')
        sys.exit(1)
    else:
        if args.verbose:
            logger.setLevel(logging.INFO)
        config = utils.init_config(args.C)
        nmap_services_data = config.get('data', 'nmap_services_py')
        if nmap_services_data:
            insert_nmap(utils.load_json(nmap_services_data), args.X)
    return 0
예제 #7
0
def main():
    # User friendly usage output
    parser = argparse.ArgumentParser()
    parser.add_argument('-C',
                        nargs='?',
                        help='Path to the configuration file.')
    parser.add_argument('--verbose', '-V', action='store_true', default=False)
    args = parser.parse_args()
    # Load the configuration file
    if not args.C:
        print('Please provide a configuration file with -C.')
        sys.exit(1)
    else:
        if args.verbose:
            logger.setLevel(logging.INFO)
        config = utils.init_config(args.C)
        cfengine_data = config.get('data', 'cfengine_report')
        if cfengine_data:
            insert(utils.load_json(cfengine_data))
    return 0
예제 #8
0
def main():
    args = cli()

    if args.verbose:
        logger.setLevel(logging.INFO)

    if args.C:
        config = utils.init_config(args.C)
        snap_data_path = config.get('data', 'snap_metadata')
    elif args.data:
        snap_data_path = args.data
    else:
        logger.error(
            "No snap data specified, either supply config or data argument")
        return

    starts_with = 'manifest.json' if args.raw else ''

    if snap_data_path:
        insert_snap(utils.load_json(snap_data_path, starts_with))
예제 #9
0
def main():
    # User friendly usage output
    parser = argparse.ArgumentParser()
    parser.add_argument('-C',
                        nargs='?',
                        help='Path to the configuration file.')
    parser.add_argument('--verbose', '-V', action='store_true', default=False)
    parser.add_argument('--switches',
                        '-S',
                        action='store_true',
                        default=False,
                        help='Insert as switches rather than routers')
    parser.add_argument(
        '--data',
        '-d',
        required=False,
        help='Directory to load date from. Trumps config file.')
    args = parser.parse_args()
    # Load the configuration file
    if not args.C and not args.data:
        print(
            'Please provide a configuration file with -C or --data for a data directory.'
        )
        sys.exit(1)
    elif not args.data:
        config = utils.init_config(args.C)
    else:
        config = None

    data = args.data or config.get('data', 'juniper_conf')

    if args.verbose:
        logger.setLevel(logging.INFO)
    if data:
        consume_juniper_conf(utils.load_json(data), args.switches)
    if config and config.has_option('delete_data',
                                    'juniper_conf') and config.getboolean(
                                        'delete_data', 'juniper_conf'):
        remove_juniper_conf(config.get('data_age', 'juniper_conf'))
    return 0
예제 #10
0
from base_data import base_query
from preprocess import preprocess_df
from nyc_taxi_prediction import NycTaxiPredictionRFService, NycTaxiPredictionTorchService
from utils import init_config

if __name__ == '__main__':
    parser = argparse.ArgumentParser()

    parser.add_argument("--dev_env",
                        help="Development Env [local], [dev], [prod]",
                        type=str,
                        default="local")

    flag = parser.parse_args()

    config = init_config(flag.dev_env)
    print(f"Config : {config}")
    model_dir = f"{config['save_folder']}/models/"

    print('load data')
    base_df = pd.read_gbq(query=base_query,
                          dialect='standard',
                          project_id=config['project'],
                          auth_local_webserver=True)
    train_df, y_train_raw, test_df, y_test_raw = preprocess_df(base_df)

    x_train = train_df.copy()
    x_test = test_df.copy()

    if not os.path.isdir(model_dir):
        os.mkdir(model_dir)
예제 #11
0
def main(repo_name):
    logger.debug('init config')

    config = init_config(repo_name)

    environment = config['environments'][env_name]

    env_key = environment['env_key'][repo_name]

    today = datetime.date.today().strftime('%Y%m%d')

    logger.debug('get today is ' + today)

    git_path = git_folder_path + repo_name

    logger.info('git path is ' + git_path)

    logger.debug('init repo')

    repo = git.Repo.init(path=git_path)

    # Need prune for branch is deleted then created with same name.
    repo.git.fetch('--prune')

    logger.debug('get remote tags')

    all_tag = repo.git.ls_remote('--tags')

    tags = get_tags(all_tag)

    logger.debug('find latest tag')

    tag_re_patten = config['tag_re_patten']

    tag_re_patten = tag_re_patten.format(env_key, today)

    tag_name = find_latest_tag(tag_re_patten, tags)

    if tag_name is None:
        raise TagNotFoundException()

    logger.info('latest tag is ' + tag_name)

    logger.debug('find latest branch')

    branches = repo.git.branch('-r').split('\n')

    merged_branch_name_re_patten = config[
        'merged_branch_name_re_patten'].format(env_key, today)

    latest_branch = find_latest_branch(merged_branch_name_re_patten, branches)

    branch_index = get_branch_index(latest_branch)

    new_branch_name = config['new_branch_name_patten'].format(
        env_key, today, branch_index)

    logger.info('new branch is ' + new_branch_name)

    logger.debug('check branch is exists or not')

    is_exists = some(branches, lambda b: new_branch_name in b)

    if is_exists:
        raise BranchIsExistException(new_branch_name)
    else:
        logger.debug('branch is not exists')

    logger.debug('create branch')

    repo.git.checkout(tag_name, '-b', new_branch_name)

    logger.debug('push branch')

    repo.git.push('origin', new_branch_name)

    logger.debug('checkout to dev')

    repo.git.checkout(source_branch)

    logger.debug('get branch diff commit')

    all_log = repo.git.log(
        'origin/{}..origin/{}'.format(config['uat_branch'], new_branch_name),
        '--oneline', '--no-merges')

    logger.debug('build pull request desc')

    pr_desc = build_pr_desc(all_log)

    logger.debug('create request service')

    request_service = RequestService(config['host'], config['headers'], auth)

    logger.debug('get reviewers')

    uat_branch = config['uat_branch']

    default_reviewers_api = config['default_reviewers_api'].format(repo_name)

    reviewers = get_reviewers(request_service, default_reviewers_api,
                              uat_branch, new_branch_name)

    logger.debug('build pull request obj')

    pr_obj = build_pr_obj(new_branch_name, uat_branch, pr_desc, reviewers)

    logger.debug('post to create pull request')

    pull_requests_api = config['pull_requests_api'].format(repo_name)

    rs = post_pr(request_service, pull_requests_api, pr_obj)

    if rs.status_code != 201:
        logger.error('{} {} create pull request failed.'.format(
            repo_name, new_branch_name))
        status_code = rs.status_code
        result = json.loads(rs.text, encoding='utf-8')
        message = result['errors'][0]['message']
        raise BitbucketException(status_code, message, new_branch_name)

    logger.info('create pull request success.')
    logger.info('finish')
예제 #12
0
from __future__ import with_statement
from fabric.api import env, parallel, roles

import datetime
import fab.linux as fl
import fab.xen as fx
import getopt
import os
import sys
import utils
import utils.linux as ul

# CONFIG file settings ########################################################
__SCRIPT_PATH = os.path.dirname(os.path.realpath(__file__))
__CONFIG_FILE = __SCRIPT_PATH + "/config.ini"
__CONFIG = utils.init_config(__CONFIG_FILE)

__SERVER_BKP_PATH = __CONFIG.get("server", "backup_path")
__LOG = __CONFIG.get("server", "log_file")

__XEN_BKP_PATH = __CONFIG.get("xen", "backup_mount_point")
env.roledefs['xen'] = __CONFIG.get("xen", "hosts").split(',')
###############################################################################

__VERSION = "20170310-2000"
__DATE_TIME = datetime.datetime.now().strftime('%Y%m%d-%H%M%S')


def __make_list_of_vms(f, logger=None):
    vms = {}
    if f == "":
예제 #13
0
def run():
    from optparse import OptionParser
    import datetime
    import glob
    import os
    import sys
    import time

    import audio
    import html
    import notify
    import utils
    import recorder
    import remote
    import schedule

    option_parser = OptionParser()
    option_parser.add_option(
        "-p",
        "--print",
        dest="print_schedule",
        action="store_true",
        default=False,
        help="Print information about the date but don't do anything",
    )

    option_parser.add_option("-w", "--wav", dest="wavs", action="store_true", default=False, help="Construct WAVs")

    option_parser.add_option("-e", "--encode", dest="encode", action="store_true", default=False, help="Encode MP3s")

    option_parser.add_option(
        "-i", "--index", dest="index", action="store_true", default=False, help="Generate index pages"
    )

    option_parser.add_option(
        "-u", "--upload", dest="upload", action="store_true", default=False, help="Upload data to web server"
    )

    option_parser.add_option(
        "-c", "--config", dest="config_file", help="Specify alternative config file", metavar="FILE"
    )

    option_parser.add_option(
        "-f",
        "--filter",
        dest="filter",
        action="append",
        help="Filter schedule to items containing at least one of the given show/presenter",
        metavar="NAME",
    )

    options, args = option_parser.parse_args()

    task_options = ["print_schedule", "wavs", "encode", "index", "upload"]
    num_task_options_supplied = len([None for option_name in task_options if getattr(options, option_name, False)])
    # No specific do-something options were given.  Do everything.
    if num_task_options_supplied == 0:
        for option_name in task_options:
            setattr(options, option_name, True)

    config_files = utils.default_config_files()
    if options.config_file is not None:
        config_files.append(options.config_file)
    config = utils.init_config(config_files)

    date_string = "yesterday"
    if len(args) == 1:
        date_string = args[0]
    date = utils.interpret_date_string(date_string)

    start_time = time.time()

    recorder.init_module()
    bounds_and_files = recorder.get_bounds_and_files_for_date(date)
    schedule_list = schedule.get_schedule(date, filter_items=options.filter)

    if options.wavs or options.print_schedule:
        if options.filter:
            print "Schedule (filtered):"
        else:
            print "Schedule:"
        schedule.print_schedule(schedule_list)
        print
        print "Recordings:"
        recorder.print_bounds_and_files(bounds_and_files)

    wav_files = None
    if options.wavs:
        wav_files = audio.make_wav_files(bounds_and_files, schedule_list)

    mp3_files = None
    if options.encode:
        # Always rebuild WAVs list in case any are hanging around from before.
        if True:  # wav_files is None:
            if config.has_option("main", "wavs"):
                wavs_dir = config.get("main", "wavs")
            else:
                wavs_dir = os.getcwd()
            wav_files = glob.glob(os.path.join(wavs_dir, "*.wav"))
        # XXX Delete working WAVs?  Only if MP3 was created for it.
        mp3_files = [audio.encode_file(path) for path in wav_files]
        if True:  # XXX look for no-delete option later
            print "Deleting local copies of WAVs..."
            for (wav, mp3) in zip(wav_files, mp3_files):
                if mp3 is not None and os.path.isfile(wav):
                    os.unlink(wav)
                    print "   ", wav
            print "done."
            print

    ftp_conn = None
    remote_audio_files = []
    if options.upload or options.index:
        if config.has_option("ftp", "keep_days"):
            keep_days = config.getint("ftp", "keep_days")
        else:
            keep_days = 7
        earliest_keep_date = date - datetime.timedelta(days=keep_days - 1)
        ftp_conn = remote.connect()
        remote_audio_files = ftp_conn.get_list_of_audio_files()

        # First make an index with no old files:
        # XXX For now this ignores per-file limits, so will remove everything
        # over N days old from the index temporarily.  If a file has a higher
        # number of days defined, it will be restored to the index later when
        # it's not deleted -- but for a file with a lower number of days
        # defined, it'll disappear later than it should.
        audio_files_for_first_index = [
            fname
            for (fname, details) in [
                (fname, schedule.schedule_from_audio_file_name(fname)) for fname in remote_audio_files
            ]
            if details is not None and details["date"] >= earliest_keep_date
        ]

        index_fname = html.make_index_file(date, audio_files_for_first_index)
        if options.upload:
            ftp_conn.storlines("STOR index.html", open(index_fname, "r"))

    if options.upload:
        ftp_conn.remove_old_audio(date, keep_days)

        # XXX Here we should delete local copies of MP3s that are more than N
        # days old, in case the upload has failed for more than N days.
        pass

        # Always build the list again as we can pick up files we missed before.
        if True:  # mp3_files is None:
            if config.has_option("main", "mp3s"):
                mp3s_dir = config.get("main", "mp3s")
            else:
                mp3s_dir = os.getcwd()
            mp3_files = glob.glob(os.path.join(mp3s_dir, "*.mp3"))

        try:
            uploaded = remote.upload_audio(ftp_conn, mp3_files)
        except:
            import traceback

            print "Exception uploading files"
            traceback.print_exc(file=sys.stdout)
            print "Continuing..."

        # Reconnect (or grab the cached connection) in case there were failures
        # during the upload.  A better structure would see us making this
        # completely transparent across all remote calls, but for now we focus
        # on the big upload.
        ftp_conn = remote.connect()

        if True:  # XXX look for no-delete option later
            print "Deleting local copies of MP3s..."
            for mp3_path in mp3_files:
                if os.path.split(mp3_path)[1] in uploaded and os.path.isfile(mp3_path):
                    print "   ", mp3_path
                    os.unlink(mp3_path)
            print "done."
            print

        notify.notify_all(mp3_files)

    if options.index:
        # Second index file: whatever's on the server.
        remote_audio_files = ftp_conn.get_list_of_audio_files()

        index_fname = html.make_index_file(date, remote_audio_files)
        if options.upload:
            ftp_conn.storlines("STOR index.html", open(index_fname, "r"))
            # XXX Now also sync up anything that's in the www directory
            # (resource files such as JS, CSS, images, jPlayer...).
            pass

    if ftp_conn is not None:
        ftp_conn.quit()

    end_time = time.time()
    if not options.print_schedule:
        duration = end_time - start_time
        print "Took %2.2dm %2.2ds" % divmod(duration, 60)

    return 0