示例#1
0
 def __init__(self, afile):
     self.afile = afile
     self.config = read_config(self.afile)
     default_dict = self.config.defaults()
     self.threshold = default_dict['threshold']
     self.downscaler_interval = int(default_dict['downscaler_interval'])
     self.policy_in_place = default_dict['policy_in_place']
示例#2
0
 def __init__(self, file):
     self.file = file
     self.config = read_config(self.file)
     default_dict = self.config.defaults()
     self.cloud = default_dict['cloud']
     self.instance_type = default_dict['instance_type']
     self.image_id = default_dict['image_id']
     self.script_path = default_dict['script_path']
示例#3
0
 def __init__(self, file):
     self.file = file
     self.config = read_config(file)
     default_dict = self.config.defaults()
     self.key_name = default_dict['key_name']
     self.pub_path = default_dict['pub_path']
     self.priv_path = default_dict['priv_path']
     self.initial_monitor_time_limit = default_dict['initial_monitor_time_limit']
示例#4
0
 def __init__(self, file):
     self.file = file
     self.config = read_config(self.file)
     default_dict = self.config.defaults()
     self.user = default_dict['user']
     self.submit_local = default_dict['submit_local']
     self.directory = default_dict['directory']
     self.submit_remote = default_dict['submit_remote']
     self.log_remote = default_dict['log_remote']
示例#5
0
def main():
    (options, args) = parse_options()
    configure_logging(options.debug)
    config = read_config(options.config_file)
    signal.signal(signal.SIGINT, clean_exit)
    automaton = Automaton(config)
    automaton.start()
    # wake every seconed to make sure signals are handled by the main thread
    # need this due to a quirk in the way Python threading handles signals
    while automaton.isAlive():
        automaton.join(timeout=1.0)
示例#6
0
def main():
    (options, args) = parse_options()
    configure_logging(options.debug)
    config = read_config(options.config_file)
    signal.signal(signal.SIGINT, clean_exit)
    automaton = Automaton(config)
    automaton.start()
    # wake every seconed to make sure signals are handled by the main thread
    # need this due to a quirk in the way Python threading handles signals
    while automaton.isAlive():
        automaton.join(timeout=1.0)
示例#7
0
    def __init__(self, file):
        self.file = file
        self.config = read_config(self.file)
        default_dict = self.config.defaults()
        self.url = default_dict['url']
        self.port = default_dict['port']
        self.access_id = default_dict['access_id']
        self.secret_key = default_dict['secret_key']
        self.launch_name = default_dict['launch_name']

        self.domain_name_prefix = default_dict['domain_name']
        self.domain_name = None # Will be assigned later based on the prefix and the timestamp
示例#8
0
文件: phorque.py 项目: cu-csc/phorque
def main():
    (options, args) = parse_options()
    configure_logging(options.debug)
    config = read_config(options.config_file)
    signal.signal(signal.SIGINT, clean_exit)
    phorque = Phorque(config)
    LOG.info("Starting Phorque thread")
    phorque.daemon = True
    phorque.start()
    # wake every second to make sure signals can be handled by the main thread
    while phorque.isAlive():
        phorque.join(timeout=1.0)
示例#9
0
    def __init__(self, file):
        self.file = file
        self.config = read_config(self.file)
        cloud_names = self.config.sections()

        self.worker_groups = list()
        for cloud in cloud_names:
            items = self.config.items(cloud)
            dict = {'cloud': cloud}
            # Form a dictionary out of items list
            for pair in items:
                dict[pair[0]] = pair[1]
            self.worker_groups.append(dict)
示例#10
0
 def __init__(self, file):
     self.file = file
     self.config = read_config(self.file)
     self.list = self.config.sections()
示例#11
0
 def get_desired_data(self):
     desired_dict = {"hotel":0, "sierra":0}
     config_data = read_config(self.config_file)
     desired_dict['hotel'] = config_data.getint("hotel","desired")
     desired_dict['sierra'] = config_data.getint("sierra","desired")
     return desired_dict
示例#12
0
 def __init__(self, input_dir):
     self.input_dir = input_dir
     self.config_data = read_config(os.path.join(self.input_dir,"workers.conf"))
     self.cpobj = log_parser.CondorParser(os.path.join(self.input_dir,"sleep.log"))
     self.cpobj.parse_file()
     self.condor_data = self.cpobj.condor_jobs_db
示例#13
0
import os
import sys
import web
import json
import time
import hashlib
from lib import payload
from lib.util import MysqlHandler, read_config
from lib.logger import Logger

log = Logger(config_file='conf/server.conf',
             logfile=os.path.join('logs', os.path.basename(os.path.realpath(sys.argv[0])).replace(".py", '.log'))
)

mysql_handler = MysqlHandler()
opts = read_config()


def test(data):
    sreq = payload.SREQ("tcp://%(job_ip)s:%(job_port)s" % opts)
    try:
        ret = sreq.send(data, timeout=2)
    except Exception, e:
        print e


if __name__ == '__main__':
    # data = {
    #     "job_id": "201408271734511409132091",
    #     "job_type": 0,
    #     "release_type": "jetty",
示例#14
0
文件: config.py 项目: dmdu/automaton
 def __init__(self, options):
     self.options = options
     self.globals = GlobalConfig(read_config(options.global_file))
     self.clouds = CloudsConfig(read_config(options.clouds_file))
     self.benchmarking = BenchmarkingConfig(read_config(options.benchmarking_file))
示例#15
0
of staged deployment.  We will implement the execution workflow here
when we have more time.

"""

import shutil

from lib import util
from deployment import common
from deployment.executor import Executor

# config file path
config_file = "../etc/global.conf"

# clone the repo locally
git_repo_location = util.read_config(config_file).get("DEFAULT",
                                                      "git_repo_location")
my_local_folder = util.clone_git_repo(git_repo_location)

# we fill a dict with our stages
git_repo_home = util.read_config(config_file).get("DEFAULT", "git_repo_home")
stages = common.get_stages("client", my_local_folder, git_repo_home)

# remove the directory since it is not needed anymore
shutil.rmtree(my_local_folder, ignore_errors=True)

# clone the repo to the vm
ssh_priv_key = util.read_config(config_file).get("DEFAULT", "ssh_priv_key")
cmd = "git clone %s %s" % (git_repo_location, git_repo_home)
remote_clone_result = util.RemoteCommand("vm-148-120.uc.futuregrid.org",
                                         ssh_priv_key, cmd).execute()
示例#16
0
 def __init__(self, options):
     self.options = options
     self.globals = GlobalConfig(read_config(options.global_file))
     self.clouds = CloudsConfig(read_config(options.clouds_file))
     self.benchmarking = BenchmarkingConfig(
         read_config(options.benchmarking_file))
示例#17
0
"""
This entire file is just an example to demonstrate functionality of staged deployment.
We will implement the execution workflow here when we have more time.
"""

import shutil

from lib import util
from deployment import common
from deployment.executor import Executor

# config file path
config_file = "../etc/global.conf"

# clone the repo locally
my_local_folder = util.clone_git_repo(util.read_config(config_file).get("DEFAULT","git_repo_location"))

# we fill a dict with our stages
stages = common.get_stages("client", my_local_folder, util.read_config(config_file).get("DEFAULT","git_repo_home"))


# remove the directory since it is not needed anymore
shutil.rmtree(my_local_folder,ignore_errors=True)

# clone the repo to the vm
remote_clone_result = util.RemoteCommand("vm-148-120.uc.futuregrid.org",\
    util.read_config(config_file).get("DEFAULT", "ssh_priv_key"),
    "git clone %s %s" % (util.read_config(config_file).get("DEFAULT","git_repo_location") ,
                         util.read_config(config_file).get("DEFAULT","git_repo_home"))).execute()

# initiate executor class with the stages
示例#18
0
This entire file is just an example to demonstrate functionality of staged deployment.
We will implement the execution workflow here when we have more time.
"""

import shutil

from lib import util
from deployment import common
from deployment.executor import Executor

# config file path
config_file = "../etc/global.conf"

# clone the repo locally
my_local_folder = util.clone_git_repo(
    util.read_config(config_file).get("DEFAULT", "git_repo_location"))

# we fill a dict with our stages
stages = common.get_stages(
    "client", my_local_folder,
    util.read_config(config_file).get("DEFAULT", "git_repo_home"))

# remove the directory since it is not needed anymore
shutil.rmtree(my_local_folder, ignore_errors=True)

# clone the repo to the vm
remote_clone_result = util.RemoteCommand("vm-148-120.uc.futuregrid.org",\
    util.read_config(config_file).get("DEFAULT", "ssh_priv_key"),
    "git clone %s %s" % (util.read_config(config_file).get("DEFAULT","git_repo_location") ,
                         util.read_config(config_file).get("DEFAULT","git_repo_home"))).execute()