Example #1
0
File: flof.py Project: floli/flof
def main():
    oparser = add_options()
    (options, args) = oparser.parse_args()

    if not args:
        oparser.print_help()
        sys.exit()
    if not os.path.isfile(args[0]):
        print "Configuration file %s not existing." % args[0]
        sys.exit(-1)

    config_file = norm_path(args[0])
    config = configuration.parse_merge(config_file)
    loglevel = config.getroot().get("loglevel", 10)
    common.setup_logging("~/.flof/flof.log", loglevel)

    register_bundled_workers()

    if options.only:
        WorkerRegistry.workers = filter(lambda a: a in options.only.split(","),
                                        WorkerRegistry.workers)

    if options.do_not:
        WorkerRegistry.workers = filter(
            lambda a: a not in options.do_not.split(","),
            WorkerRegistry.workers)

    os.chdir(os.path.dirname(norm_path(args[0])))
    context = ContextManager({"config_file": config_file})
    context.overrides = parse_context(options.context)
    RootWorker(config, context).run()
Example #2
0
File: flof.py Project: floli/flof
def main():
    oparser = add_options()
    (options, args) = oparser.parse_args()

    if not args:
        oparser.print_help()
        sys.exit()
    if not os.path.isfile(args[0]):
        print "Configuration file %s not existing." % args[0]
        sys.exit(-1)

    config_file = norm_path(args[0])
    config = configuration.parse_merge(config_file)
    loglevel = config.getroot().get("loglevel", 10)
    common.setup_logging("~/.flof/flof.log", loglevel)
 
    register_bundled_workers()

    if options.only:
        WorkerRegistry.workers = filter(lambda a: a in options.only.split(","), WorkerRegistry.workers)

    if options.do_not:
        WorkerRegistry.workers = filter(lambda a: a not in options.do_not.split(","), WorkerRegistry.workers)

    os.chdir(os.path.dirname(norm_path(args[0])))
    context = ContextManager({"config_file" : config_file})
    context.overrides = parse_context(options.context)
    RootWorker(config, context).run()
Example #3
0
    def output_file(self):
        """ Return the location of the spider output file, relative to its config file. """

        spider_conf = norm_path(self.config["configuration"])

        # Extract the output filename from the spider configuration,
        # assumes that the line after the keyword OUTFILENAME contains it.
        with open(spider_conf) as f:
            for line in f:
                if "OUTFILENAME" in line:
                    break

            return norm_path(os.path.dirname(spider_conf), f.next().strip())
Example #4
0
    def output_file(self):
        """ Return the location of the spider output file, relative to its config file. """
        
        spider_conf = norm_path(self.config["configuration"])
        
        # Extract the output filename from the spider configuration,
        # assumes that the line after the keyword OUTFILENAME contains it.
        with open(spider_conf) as f:
            for line in f:
                if "OUTFILENAME" in line:
                    break

            return norm_path(os.path.dirname(spider_conf), f.next().strip())
Example #5
0
 def create_mesh(self):
     """ Create the mesh either from a fluent file or by copying from another OF case. """
     if self.config.find("./mesh/fluent") is not None:
         tag = self.config.find("./mesh/fluent")
         mesh = norm_path(tag.attrib["mesh"])
         cmd = "fluentMeshToFoam -case %s %s %s" % (self.case, tag.get("arguments", ""), mesh)
         self.logger.info("Creating mesh from fluent mesh file %s", mesh)
         self.start_process(cmd)
     elif self.config.find("./mesh/copy") is not None:
         tag = self.config.find("./mesh/copy")
         src_case = norm_path(tag.get("source", self.config.attrib["template"]))
         time = tag.get("time", "constant")
         src_path = join(src_case, time, "polyMesh")
         self.logger.info("Copy mesh from %s", src_path)
         shutil.copytree(src_path, join(self.case, "constant/polyMesh"))
Example #6
0
    def __init__(self, case_config=None, cmd_config=None, global_config="~/.flof", defaults=None):

        ConfigParser.SafeConfigParser.__init__(self, defaults=defaults)

        self.readfp(default_config)
        default_config.seek(0)  # Rewind default_config after usage

        if global_config:
            self.read(norm_path(global_config))
        if case_config:
            case_config = norm_path(case_config)
            self.read(case_config)
            self.case_config = case_config
        if cmd_config:
            self.merge_config(cmd_config)
Example #7
0
 def __init__(self, filename):
     self.path = norm_path(filename)
     try:
         parsed = self._grammar.parseFile(self.path, parseAll=True)
         self._data = self._as_dict(parsed)
     except IOError:
         self._data = {}
Example #8
0
 def __init__(self, filename):
     self.path = norm_path(filename)
     try:
         parsed = self._grammar.parseFile(self.path, parseAll=True)
         self._data = self._as_dict(parsed)
     except IOError:
         self._data = {}
Example #9
0
    def __init__(self,
                 case_config=None,
                 cmd_config=None,
                 global_config="~/.flof",
                 defaults=None):

        ConfigParser.SafeConfigParser.__init__(self, defaults=defaults)

        self.readfp(default_config)
        default_config.seek(0)  # Rewind default_config after usage

        if global_config:
            self.read(norm_path(global_config))
        if case_config:
            case_config = norm_path(case_config)
            self.read(case_config)
            self.case_config = case_config
        if cmd_config:
            self.merge_config(cmd_config)
Example #10
0
 def _logfilename(self):
     """ Returns a path for the case log file. Creates it, if not already existing. """
     if not common.getboolean(self.config.get("log", True)) or self.case is None:
         return None
                                  
     logfile = norm_path(self.case, "log/", self.config.tag)
     try:
         os.makedirs(os.path.dirname(logfile))
     except OSError:
         pass
     return logfile
Example #11
0
    def run(self):
        spider_conf = norm_path(self.config["configuration"])

        if not self.config["overwrite"]:
            if os.path.isfile(self.output_file):
                self.logger.info("Spider output file %s exists, not recreating mesh.", self.output_file)
                return

        cmd = "spider " + spider_conf

        ret_code = self.start_subproc(cmd, cwd = os.path.dirname(spider_conf))       
Example #12
0
def parse_merge(source):
    conf = ET.parse(source)

    try:
        defaults = ET.parse(norm_path("~/.flof/defaults.xml"))
    except IOError:
        pass  # No defaults file present
    else:
        default_attr = defaults.getroot().attrib
        default_attr.update(conf.getroot().attrib)
        conf.getroot().attrib = default_attr

    return conf
Example #13
0
def parse_merge(source):
    conf = ET.parse(source)

    try:
        defaults = ET.parse(norm_path("~/.flof/defaults.xml"))
    except IOError:
        pass  # No defaults file present
    else:
        default_attr = defaults.getroot().attrib
        default_attr.update(conf.getroot().attrib)
        conf.getroot().attrib = default_attr

    return conf
Example #14
0
    def run(self):
        spider_conf = norm_path(self.config["configuration"])

        if not self.config["overwrite"]:
            if os.path.isfile(self.output_file):
                self.logger.info(
                    "Spider output file %s exists, not recreating mesh.",
                    self.output_file)
                return

        cmd = "spider " + spider_conf

        ret_code = self.start_subproc(cmd, cwd=os.path.dirname(spider_conf))
Example #15
0
    def upload(self, local_path, remote_path, remote_name=None):
        local_path = norm_path(local_path)
        file_name = os.path.split(local_path)[-1]
        if remote_name is None: remote_name = file_name
        fid = self.create(remote_path, remote_name)

        # load local file and divide into blocks
        with open(local_path, 'rb') as f:
            data = f.read()
        block_datas = self.divide_file(data)
        block_list = []

        print("uploading...")
        for block_data in block_datas:
            # request block in namenode
            block_meta_dict = self.request_new_block(fid)
            block_info = block_meta_dict["block"]
            datanodes = block_meta_dict["datanodes"]
            block_info["length"] = len(block_data)
            last_checksum = None

            # upload blocks to datanodes
            for node in datanodes:
                success = False
                err = None
                for t in range(5):
                    try:
                        checksum = self.send_full_block_to_datanode(
                            block_info, block_data, (node, self.config["datanode_port"]))
                        success = True
                    except Exception as e:
                        err = e
                        time.sleep(2)
                        success = False
                    if success: break
                if not success:
                    logging.fatal("something goes wrong when upload files: {}".format(err))
                    exit(1)
                if checksum is None:
                    return None
                if last_checksum is not None and last_checksum != checksum:
                    logging.error("checksum error: {} != {}".format(last_checksum, checksum))
                    return None
                last_checksum = checksum

            # update block info in namenode
            self.update_block_info(fid, block_info)

        return fid
Example #16
0
 def __init__(self, configuration, context):
     # The directory to the case. The is no guarantee it actually exists, e.g. when it is created by the CaseBuilder.
     self.context = context
     self.config = configuration.getroot()
     self.do_string_interpolation(recurse=self._do_recursive_string_interpolation)
     if "name" in context:
         self.case = norm_path(context["name"])
     else:
         self.case = None
 
     # Setup logging: Add an additional handler for worker based logfiles
     self.logger = logging.getLogger(self.__class__.__module__ + "." + self.__class__.__name__ )
     if self._logfilename():
         handler = logging.FileHandler(self._logfilename())
         handler.setFormatter(logging.Formatter("%(asctime)s - %(name)s:%(levelname)s - %(message)s"))
         self.logger.addHandler(handler)
Example #17
0
 def _copy_rec(self, rel_dir, dir_node):
     """ Recursive copy according to the <files> entry. """
     src_dir = norm_path(self.config.attrib["template"], rel_dir)
     target = join(self.case, rel_dir)
     for f in os.listdir(src_dir):
         if os.path.isfile(join(src_dir, f)):
             for tag in dir_node.findall("./file"):
                 if re.match(tag.attrib["name"], f):
                     try:
                         os.makedirs(target)
                     except OSError:
                         pass # Directory already exists
                     self.logger.debug("Copy file from  %s to %s"  % (join(src_dir, f), join(target,f )))
                     shutil.copy( join(src_dir, f), join(target, f) )
         elif os.path.isdir(join(src_dir, f)):
             for tag in dir_node.findall("./directory"):
                 if re.match(tag.attrib["name"], f):
                     self._copy_rec( os.path.join(rel_dir, f), tag)
Example #18
0
    def copy_0_time(self):
        """ Copy the boundary conditions from another case. """
        template = norm_path(self.config.attrib["template"])
        node = self.config.find("./zeroTime")
        timestep = node.attrib.get("time", "0")
        if timestep == "latestTime":
            casedirs = []
            for d in [i for i in os.listdir(template) if os.path.isdir(i) ]:
                try:
                    casedirs.append(float(d))
                except ValueError:
                    pass
            timestep = max(casedirs)

        src = join(template, timestep)
        self.logger.info("Copy 0 timestep from %s", src)
        os.mkdir(join(self.case, "0"))
        for f in os.listdir(src):
            shutil.copy( join(src, f), join(self.case, "0") )
Example #19
0
    def __init__(self, configuration, context):
        # The directory to the case. The is no guarantee it actually exists, e.g. when it is created by the CaseBuilder.
        self.context = context
        self.config = configuration.getroot()
        self.do_string_interpolation(
            recurse=self._do_recursive_string_interpolation)
        if "name" in context:
            self.case = norm_path(context["name"])
        else:
            self.case = None

        # Setup logging: Add an additional handler for worker based logfiles
        self.logger = logging.getLogger(self.__class__.__module__ + "." +
                                        self.__class__.__name__)
        if self._logfilename():
            handler = logging.FileHandler(self._logfilename())
            handler.setFormatter(
                logging.Formatter(
                    "%(asctime)s - %(name)s:%(levelname)s - %(message)s"))
            self.logger.addHandler(handler)
Example #20
0
def put(case_config, prio):
    case_config = norm_path(case_config)
    print "Put case with config %s, priority %s in the queue." % (case_config, prio)
    jid = proxy.enqueue(prio, case_config)
    print "Queued job with ID", jid