예제 #1
0
def _setup_flowsheet_turbine_node(dat, nkey, user_name):
    """ From s3 download all simulation files into AspenSinterComsumer cache directory '{working_directory\test\{simulation_guid}'.  If
    Simulation does not exist create one.  If Simulation does exist just s3 download all simulation files into the above cache directory.

    The new simulation_guid is created for all file updates to TurbineWS, so this is sidestepping that process.

    TODO: Provide a simulation_id via S3 ( eg.  {simulation_name}/Id )

    """
    assert len(dat.flowsheet.nodes[nkey].turbApp) == 2, \
        'DAT Flowsheet nodes turbApp is %s' %dat.flowsheet.nodes[nkey].turbApp

    node = dat.flowsheet.nodes[nkey]
    turb_app = node.turbApp[0]
    model_name = node.modelName
    assert turb_app is not None
    turb_app = turb_app.lower()
    assert turb_app in ['acm', 'aspenplus'], 'unknown turbine application "%s"' %turb_app

    """ Search S3 Bucket for node simulation
    """
    s3 = boto3.client('s3', region_name=AWS_REGION)
    bucket_name = FOQUSAWSConfig.get_instance().get_simulation_bucket_name()
    prefix = '%s/%s/' %(user_name,model_name)
    l = s3.list_objects(Bucket=bucket_name, Prefix=prefix)
    assert 'Contents' in l, 'Node %s failure: S3 Bucket %s is missing simulation files for "%s"' %(nkey, bucket_name, prefix)
    key_sinter_filename = None
    key_model_filename = None
    s3_key_list = [i['Key'] for i in l['Contents']]
    _log.debug('Node model %s staged-input files %s' %(model_name, s3_key_list))
    for k in s3_key_list:
        if k.endswith('/%s_sinter.json' %turb_app):
            key_sinter_filename = k
        elif turb_app == 'acm' and k.endswith('.acmf'):
            assert key_model_filename is None, 'detected multiple model files'
            key_model_filename = k
        elif turb_app == 'aspenplus' and k.endswith('.bkp'):
            assert key_model_filename is None, 'detected multiple model files'
            key_model_filename = k

    assert key_sinter_filename is not None, 'Flowsheet node=%s simulation=%s sinter configuration not in %s' %(nkey, model_name, str(s3_key_list))
    assert key_model_filename is not None, 'Flowsheet node=%s simulation=%s model file not in %s' %(nkey, model_name, str(s3_key_list))

    """ search TurbineLite WS for node simulation
    """
    print(turbine_simulation_script.__file__)
    turbine_cfg = node.gr.turbConfig.getFile()
    _log.debug('CWD: %s', os.path.abspath(os.path.curdir))
    turbine_cfg = os.path.abspath(turbine_cfg)
    _log.debug('Turbine Configuration File: %s', turbine_cfg)
    sim_list = turbine_simulation_script.main_list([turbine_cfg], func=None)
    print('Simulation List %s' %sim_list)
    sim_d = [i for i in sim_list if i['Name'] == model_name]
    cache_sim_guid = None
    assert len(sim_d) < 2, 'Expecting 0 or 1 entries for simulation %s' %model_name
    if len(sim_d) == 0:
        _log.debug('No simulation="%s" in TurbineLite' %model_name)
        sim_d = None
        cache_sim_guid = str(uuid.uuid4())
    else:
        _log.debug('Found simulation="%s" in TurbineLite' %model_name)
        sim_d = sim_d[0]
        assert 'Id' in sim_d, 'Missing keys in Simulation %s' %sim_d
        cache_sim_guid = sim_d['Id']

    """ upload all staged-inputs to TurbineLite if new or updated in
    s3://{bucketname}/{username}/{simulation}
    """
    entry_list = [i for i in l['Contents'] if i['Key'] != prefix and i['Key'].startswith(prefix)]
    update_required = False
    #target_dir = os.path.join(CURRENT_JOB_DIR, model_name)
    target_dir = os.path.join(WORKING_DIRECTORY, 'test', cache_sim_guid)
    os.makedirs(target_dir, exist_ok=True)
    sinter_local_filename = None
    for entry in entry_list:
        _log.debug("s3 staged input: %s", entry)
        key = entry['Key']
        etag = entry.get('ETag', "").strip('"')
        # Upload to TurbineLite
        # if ends with json or acmf
        si_metadata = []
        target_file_path = None
        assert key.startswith(prefix)
        if key == key_sinter_filename:
            #assert key_sinter_filename == '/'.join(prefix, key_sinter_filename.split('/')[-1]), \
            #    'sinter configuration "%s" must be in model base directory: "%s"' %(key_model_filename,prefix)
            target_file_path = os.path.join(target_dir, "sinter_configuration.txt")
            sinter_local_filename = target_file_path
            if sim_d: si_metadata = [i for i in sim_d["StagedInputs"] if i['Name'] == 'configuration']
            s3.download_file(bucket_name, key, target_file_path)
        elif key == key_model_filename:
            #assert key_model_filename == '/'.join(prefix, key_model_filename.split('/')[-1]), \
            #    'sinter configuration "%s" must be in model base directory: "%s"' %(key_model_filename,prefix)
            target_file_path = os.path.join(target_dir, key.split('/')[-1])
            if sim_d: si_metadata = [i for i in sim_d["StagedInputs"] if i['Name'] == 'aspenfile']
            s3.download_file(bucket_name, key, target_file_path)
        else:
            args = [ i for i in key[len(prefix):].split('/') if i ]
            args.insert(0, target_dir)
            target_file_path = os.path.join(*args)
            if sim_d: si_metadata = [i for i in sim_d["StagedInputs"] if i['Name'] == key.split('/')[-1]]
            s3.download_file(bucket_name, key, target_file_path)

        _log.debug('model="%s" key="%s" staged-in file="%s"' %(model_name, key, target_file_path))
        assert len(si_metadata) < 2, 'Turbine Error:  Duplicate entries for "%s", "%s"' %(model_name, key)
        """NOTE: Multipart uploads have different ETags ( end with -2  or something )
        Thus the has comparison will fail.  For now ignore it, but fixing this check is performance optimization.

        if len(si_metadata) == 1:
            file_hash = si_metadata[0]['MD5Sum']
            if file_hash.lower() != etag.lower():
                _log.debug('Updated detected(hash "%s" != "%s"):  s3.getObject "%s"' %(etag,file_hash,key))
                s3.download_file(bucket_name, key, target_file_path)
                update_required = True
            else:
                _log.debug('md5 matches for staged-in file "%s"' %key)
        else:
            _log.debug('Add to Turbine Simulation(%s) s3.getObject: "%s"' %(model_name, key))
            s3.download_file(bucket_name, key, target_file_path)
            update_required = True
        """


    assert sinter_local_filename is not None, 'missing sinter configuration file'

    if sim_d is None:
        _log.debug('Adding Simulation "%s" "%s"' %(model_name,cache_sim_guid))
        node.gr.turbConfig.uploadSimulation(model_name, sinter_local_filename, guid=cache_sim_guid, update=False)
    """
예제 #2
0
 def test_GET_List(self):
     l = tss.main_list([self.config_name], func=None)
     self.log.debug('simulation names %s' %l)
     self.failUnless(type(l) is list,'return type should be list')
예제 #3
0
 def test_GET_List(self):
     simulation_list = tss.main_list([self.config_name], func=None)
     simulation_names = map(lambda i: i['Name'], simulation_list)
     self.log.debug('simulation names %s' %simulation_names)
     self.failUnless(set([]).issubset(simulation_names), 
                     '%s not superset' %simulation_names)
예제 #4
0
    def setup_foqus(self, db, job_desc):
        """
        Move job to state setup
        Pull FOQUS nodes' simulation files from AWS S3
        ACM simulations store in TurbineLite
        """
        sfile,rfile,vfile,ofile = getfilenames(job_desc['Id'])

        guid = job_desc['Id']
        jid = None
        simId = job_desc['Simulation']

        # Run the job
        db.add_message("consumer={0}, starting job {1}"\
            .format(db.consumer_id, jid), guid)

        _log.debug("setup foqus")
        db.job_change_status(guid, "setup")

        configContent = db.get_configuration_file(simId)

        logging.getLogger("foqus." + __name__)\
            .info("Job {0} is submitted".format(jid))

        #db.jobConsumerID(guid, consumer_uuid)
        #db.job_prepare(guid, jid, configContent)

        # Load the session file
        dat = Session(useCurrentWorkingDir=True)
        dat.load(sfile, stopConsumers=True)
        dat.loadFlowsheetValues(vfile)

        # dat.flowsheet.nodes.
        s3 = boto3.client('s3', region_name='us-east-1')
        bucket_name = 'foqus-simulations'
        flowsheet_name = job_desc['Simulation']
        username = '******'
        prefix = '%s/%s' %(username,flowsheet_name)
        l = s3.list_objects(Bucket=bucket_name, Prefix=prefix)
        assert l.has_key('Contents'), "S3 Simulation:  No keys match %s" %prefix
        _log.debug("Process Flowsheet nodes")
        for nkey in dat.flowsheet.nodes:
            if dat.flowsheet.nodes[nkey].turbApp is None:
                continue
            assert len(dat.flowsheet.nodes[nkey].turbApp) == 2, \
                'DAT Flowsheet nodes turbApp is %s' %dat.flowsheet.nodes[nkey].turbApp

            node = dat.flowsheet.nodes[nkey]
            turb_app = node.turbApp[0]
            model_name = node.modelName
            #sinter_filename = 'anonymous/%s/%s/%s.json' %(job_desc['Simulation'],nkey, model_name)
            sinter_filename = '/'.join((username, flowsheet_name, nkey, '%s.json' %model_name))

            s3_key_list = map(lambda i: i['Key'] , l['Contents'])
            assert sinter_filename in s3_key_list, 'missing sinter configuration "%s" not in %s' %(sinter_filename, str(s3_key_list))
            simulation_name = job_desc.get('Simulation')
            #sim_list = node.gr.turbConfig.getSimulationList()
            sim_list = turbine_simulation_script.main_list([node.gr.turbConfig.getFile()])

            _log.info("Node Turbine Simulation Requested: (%s, %s)", turb_app, simulation_name)

            if turb_app == 'ACM':
                model_filename = 'anonymous/%s/%s/%s.acmf' %(simulation_name,nkey, model_name)
                assert model_filename in s3_key_list, 'missing sinter configuration "%s"' %sinter_filename
            else:
                _log.info("Turbine Application Not Implemented: '%s'", turb_app)
                raise NotImplementedError, 'Flowsheet Node model type: "%s"' %(str(dat.flowsheet.nodes[nkey].turbApp))

            sim_d = filter(lambda i: i['Name'] == model_name, sim_list)
            assert len(sim_d) < 2, 'Expecting 0 or 1 entries for simulation %s' %simulation_name
            if len(sim_d) == 0:
                sim_d = None
            else:
                sim_d = sim_d[0]

            prefix = 'anonymous/%s/%s/' %(job_desc['Simulation'],nkey)
            entry_list = filter(lambda i: i['Key'] != prefix and i['Key'].startswith(prefix), l['Contents'])
            sinter_local_filename = None
            update_required = False
            for entry in entry_list:
                _log.debug("ENTRY: %s", entry)
                key = entry['Key']
                etag = entry.get('ETag', "").strip('"')
                file_name = key.split('/')[-1]
                file_path = os.path.join(CURRENT_JOB_DIR, file_name)
                # Upload to TurbineLite
                # if ends with json or acmf
                si_metadata = []
                if key.endswith('.json'):
                    _log.debug('CONFIGURATION FILE')
                    sinter_local_filename = file_path
                    if sim_d:
                        si_metadata = filter(lambda i: i['Name'] == 'configuration', sim_d["StagedInputs"])
                elif key.endswith('.acmf'):
                    _log.debug('ACMF FILE')
                    if sim_d:
                        si_metadata = filter(lambda i: i['Name'] == 'aspenfile', sim_d["StagedInputs"])
                else:
                    raise NotImplementedError, 'Not allowing File "%s" to be staged in' %key

                assert len(si_metadata) < 2, 'Turbine Error:  Too many entries for "%s", "%s"' %(simulation_name, file_name)

                # NOTE: Multipart uploads have different ETags ( end with -2  or something )
                #     THus the has comparison will fail
                #     FOr now ignore it, but fixing this check is performance optimization.
                #
                if len(si_metadata) == 1:
                    file_hash = si_metadata[0]['MD5Sum']
                    if file_hash.lower() != etag.lower():
                        _log.debug("Compare %s:  %s != %s" %(file_name, etag, file_hash))
                        _log.debug('s3 download(%s): %s' %(CURRENT_JOB_DIR, key))
                        s3.download_file(bucket_name, key, file_path)
                        update_required = True
                    else:
                        _log.debug("MATCH")
                        s3.download_file(bucket_name, key, file_path)
                else:
                    _log.debug("Add to Turbine Simulation(%s) File: %s" %(simulation_name, file_name))
                    s3.download_file(bucket_name, key, file_path)
                    update_required = True

            assert sinter_local_filename is not None, 'missing sinter configuration file'

            if model_name not in map(lambda i: i['Name'], sim_list):
                _log.debug('Adding Simulation "%s"' %model_name)
                node.gr.turbConfig.uploadSimulation(model_name, sinter_local_filename, update=False)
            elif update_required:
                # NOTE: Requires the configuration file on update, so must download_file it above...
                _log.debug('Updating Simulation "%s"' %model_name)
                node.gr.turbConfig.uploadSimulation(model_name, sinter_local_filename, update=True)
            else:
                _log.debug('No Update Required for Simulation "%s"' %model_name)

        return dat
예제 #5
0
def run_foqus(job_desc):
    """
    job_desc: {"Initialize":false,
        "Input":{"BFBRGN.Cr":1,"BFBRGN.Dt":9.041,"BFBRGN.Lb":8.886,
            "BFBRGNTop.Cr":1,"BFBRGNTop.Dt":9.195,"BFBRGNTop.Lb":7.1926,
            "BFBadsB.Cr":1,"BFBadsB.Dt":11.897,"BFBadsB.Lb":2.085,
            "BFBadsB.dx":0.0127,"BFBadsM.Cr":1,"BFBadsM.Dt":15,"BFBadsM.Lb":1.972,
            "BFBadsM.dx":0.06695,"BFBadsT.Cr":1,"BFBadsT.Dt":15,"BFBadsT.Lb":2.203,
            "BFBadsT.dx":0.062397,"GHXfg.A_exch":16358,"GHXfg.GasIn.P":1.01325,
            "GHXfg.GasIn.T":54,"Kd":100,"MinStepSize":0.001,"RunMode":"Steady State",
            "Script":"Run_Steady","Snapshot":"","TimeSeries":[0],"TimeUnits":"Hours",
            "dp":0.00015,"fg_flow":100377,"homotopy":0,"printlevel":0},
        "Reset":false,
        "Simulation":"BFB_v11_FBS_01_26_2018",
        "Visible":false,
        "Id":"8a3033b4-6de2-409c-8552-904889929704"}
    """
    exit_code = 0
    sfile, rfile, vfile, ofile = getfilenames(job_desc['Id'])
    # Session file to run
    load_gui = False
    dat = Session(useCurrentWorkingDir=True)
    #Make ctrl-c do nothing but and SIGINT donothing but interupt
    #the loop
    #signal.signal(signal.SIGINT, signal_handler)

    #Register consumer TurbineLite DB
    db = TurbineLiteDB()
    #db.dbFile = os.path.join(dat.foqusSettings.turbLiteHome,
    #                "Data/TurbineCompactDatabase.sdf")
    #logging.getLogger("foqus." + __name__).info(
    #    "TurbineLite Database:\n   {0}".format(db.dbFile))
    #add 'foqus' app to TurbineLite DB if not already there
    #db.add_new_application('foqus')
    #register the consumer in the database
    db.consumer_register()
    #print("consumer_uuid: {0}".format(consumer_uuid))
    #write the time to the turbineLite db about every minute
    kat = _KeepAliveTimer(db, freq=60)
    kat.start()

    guid = job_desc['Id']
    jid = None
    simId = job_desc['Simulation']

    # Run the job
    db.add_message("consumer={0}, starting job {1}"\
        .format(db.consumer_id, jid), guid)

    db.job_change_status(guid, "setup")

    configContent = db.get_configuration_file(simId)

    logging.getLogger("foqus." + __name__)\
        .info("Job {0} is submitted".format(jid))

    #db.jobConsumerID(guid, consumer_uuid)
    db.job_prepare(guid, jid, configContent)

    # Load the session file
    dat.load(sfile, stopConsumers=True)
    dat.loadFlowsheetValues(vfile)
    '''
    "flowsheet"
        "nodes": {
          "BFB": {
            "browser_conf": null,
            "modelName": "BFB_v11_FBS_01_26_2018",
            "pythonCode": "#run steady state init\nself.options[\"Script\"].value = \"Run_Steady\"\nself.runModel()\nif self.calcError != -1:\n    raise(Exception(\"Steady state homotopy failed\"))\n#Run optimization\nself.options[\"Script\"].value = \"Init_Opt\"\nself.runModel()\nif self.calcError != -1:\n    raise(Exception(\"Optimization failed\"))\n# Update the x and f dicts from the node output\n# f gets copied to the node outputs so need this \n# for now x doesn't get copied back\nx, f = self.getValues()",
            "calcError": 0,
            "turbSession": "9c9dff4f-48b9-482a-99be-1bfe879350f5",
            "dmf_sim_ids": null,
            "scriptMode": "total",
            "turbApp": [
              "ACM",
              "aspenfile"
            ],
            "synced": true,
            "modelType": 2,
            "y": 0,
            "x": -200,
            "z": 0,
            "options": {...}
    '''
    # dat.flowsheet.nodes.
    s3 = boto3.client('s3', region_name='us-east-1')
    bucket_name = 'foqus-simulations'
    l = s3.list_objects(Bucket=bucket_name,
                        Prefix='anonymous/%s' % job_desc['Simulation'])
    if not l.has_key('Contents'):
        _log.info("S3 Simulation:  No keys match %s" % 'anonymous/%s' %
                  job_desc['Simulation'])
        return

    _log.debug("FLOWSHEET NODES")
    for nkey in dat.flowsheet.nodes:
        if dat.flowsheet.nodes[nkey].turbApp is None:
            continue
        assert len(dat.flowsheet.nodes[nkey].turbApp) == 2, \
            'DAT Flowsheet nodes turbApp is %s' %dat.flowsheet.nodes[nkey].turbApp
        node = dat.flowsheet.nodes[nkey]
        model_name = node.modelName
        sinter_filename = 'anonymous/%s/%s/%s.json' % (job_desc['Simulation'],
                                                       nkey, model_name)
        s3_key_list = map(lambda i: i['Key'], l['Contents'])
        assert sinter_filename in s3_key_list, 'missing sinter configuration "%s" not in %s' % (
            sinter_filename, str(s3_key_list))
        simulation_name = job_desc.get('Simulation')
        #sim_list = node.gr.turbConfig.getSimulationList()
        sim_list = turbine_simulation_script.main_list(
            [node.gr.turbConfig.getFile()])

        print("===" * 20)
        print(sim_list)
        print("===" * 20)
        sim_d = filter(lambda i: i['Name'] == model_name, sim_list)
        assert len(
            sim_d
        ) < 2, 'Expecting 0 or 1 entries for simulation %s' % simulation_name
        if len(sim_d) == 0:
            sim_d = None
        else:
            sim_d = sim_d[0]

        if dat.flowsheet.nodes[nkey].turbApp[0] == 'ACM':
            model_filename = 'anonymous/%s/%s/%s.acmf' % (simulation_name,
                                                          nkey, model_name)
            assert model_filename in s3_key_list, 'missing sinter configuration "%s"' % sinter_filename
        else:
            raise NotImplementedError, 'Flowsheet Node model type: "%s"' % (
                str(dat.flowsheet.nodes[nkey].turbApp))

        prefix = 'anonymous/%s/%s/' % (job_desc['Simulation'], nkey)
        entry_list = filter(
            lambda i: i['Key'] != prefix and i['Key'].startswith(prefix),
            l['Contents'])
        sinter_local_filename = None
        update_required = False
        for entry in entry_list:
            _log.debug("ENTRY: %s", entry)
            key = entry['Key']
            etag = entry.get('ETag', "").strip('"')
            file_name = key.split('/')[-1]
            file_path = os.path.join(workingDirectory, file_name)
            # Upload to TurbineLite
            # if ends with json or acmf
            si_metadata = []
            if key.endswith('.json'):
                _log.debug('CONFIGURATION FILE')
                sinter_local_filename = file_path
                if sim_d:
                    si_metadata = filter(
                        lambda i: i['Name'] == 'configuration',
                        sim_d["StagedInputs"])
            elif key.endswith('.acmf'):
                _log.debug('ACMF FILE')
                if sim_d:
                    si_metadata = filter(lambda i: i['Name'] == 'aspenfile',
                                         sim_d["StagedInputs"])
            else:
                raise NotImplementedError, 'Not allowing File "%s" to be staged in' % key

            assert len(
                si_metadata
            ) < 2, 'Turbine Error:  Too many entries for "%s", "%s"' % (
                simulation_name, file_name)

            # NOTE: Multipart uploads have different ETags ( end with -2  or something )
            #     THus the has comparison will fail
            #     FOr now ignore it, but fixing this check is performance optimization.
            #
            if len(si_metadata) == 1:
                file_hash = si_metadata[0]['MD5Sum']
                if file_hash.lower() != etag.lower():
                    _log.debug("Compare %s:  %s != %s" %
                               (file_name, etag, file_hash))
                    _log.debug('s3 download(%s): %s' % (workingDirectory, key))
                    s3.download_file(bucket_name, key, file_path)
                    update_required = True
                else:
                    _log.debug("MATCH")
                    s3.download_file(bucket_name, key, file_path)
            else:
                _log.debug("Add to Turbine Simulation(%s) File: %s" %
                           (simulation_name, file_name))
                s3.download_file(bucket_name, key, file_path)
                update_required = True

        assert sinter_local_filename is not None, 'missing sinter configuration file'

        if model_name not in map(lambda i: i['Name'], sim_list):
            _log.debug('Adding Simulation "%s"' % model_name)
            node.gr.turbConfig.uploadSimulation(model_name,
                                                sinter_local_filename,
                                                update=False)
        elif update_required:
            # NOTE: Requires the configuration file on update, so must download_file it above...
            _log.debug('Updating Simulation "%s"' % model_name)
            node.gr.turbConfig.uploadSimulation(model_name,
                                                sinter_local_filename,
                                                update=True)
        else:
            _log.debug('No Update Required for Simulation "%s"' % model_name)

    db.job_change_status(guid, "running")
    gt = dat.flowsheet.runAsThread()
    terminate = False
    while gt.isAlive():
        gt.join(10)
        status = db.consumer_status()
        if status == 'terminate':
            terminate = True
            db.job_change_status(guid, "error")
            gt.terminate()
            break
    if terminate:
        return
    if gt.res[0]:
        dat.flowsheet.loadValues(gt.res[0])
    else:
        dat.flowsheet.errorStat = 19
    dat.saveFlowsheetValues(ofile)
    db.job_save_output(guid, workingDirectory)
    dat.save(filename=rfile,
             updateCurrentFile=False,
             changeLogMsg="Saved Turbine Run",
             bkp=False,
             indent=0)
    if dat.flowsheet.errorStat == 0:
        db.job_change_status(guid, "success")
        db.add_message(
            "consumer={0}, job {1} finished, success"\
                .format(db.consumer_id, jid), guid)
    else:
        db.job_change_status(guid, "error")
        db.add_message(
            "consumer={0}, job {1} finished, error"\
                .format(db.consumer_id, jid), guid)
    logging.getLogger("foqus." + __name__)\
        .info("Job {0} finished"\
        .format(jid))

    #stop all Turbine consumers
    dat.flowsheet.turbConfig.stopAllConsumers()
    dat.flowsheet.turbConfig.closeTurbineLiteDB()
    sys.exit(exit_code)
예제 #6
0
 def test_GET_List(self):
     l = tss.main_list([self.config_name], func=None)
     self.log.debug('simulation names %s' %
                    map(lambda f: (f['Name'], f['Id']), l))
     self.assertTrue(type(l) is list, 'return type should be list')