Example #1
0
def check_ocaml():
    try:
        # Test for ocaml 3.12.*
        status('Testing for OCaml 3.12.* or greater')
        from sh import ocaml, ocamlbuild
        ver = ocaml('-version')
        print ver
        assert '3.12' in ver or '4.' in ver
    except:
        raise OCamlVersionError()
    
    try:
        status('Testing for ocamlfind')
        from sh import ocamlfind
        print '..OK!'
    except:
        raise OCamlFindlibError()
    
    def assert_lib_version(lib, ver_req):
        def lib_version(lib):
            status('Testing for %s' % lib)
            info = ocamlfind('query', '-l', lib)
            return [l.split()[-1] for l in info.splitlines() if l.startswith('version')][0]
        try:
            ver = lib_version(lib)
            ver = float( '.'.join( ver.split('.')[0:2] ) )
            assert ver >= ver_req
        except:
            raise OCamlLibMissingError('%s >= %.1f' % (lib, ver_req))
    
    assert_lib_version('sexplib', 7.0)
    # We don't actually use batteries right now
    #assert_lib_version('batteries', 1.4)
    return ocamlbuild
Example #2
0
def create_m3u_playlist(playlists, walkman_dir, walkman_prefix):
    util.status('Creating m3u playlists on Walkman')
    progress = tqdm(playlists)
    for playlist in progress:
        progress.set_description('{}'.format(os.path.basename(playlist)))
        songs_in_playlists = util.get_files_in_playlist(playlist)
        songs_with_prefix = util.format_playlist_with_prefix(
            songs_in_playlists, walkman_prefix)
        with open(os.path.join(walkman_dir, '{}.m3u'.format(playlist)),
                  'w') as f:
            f.write('\n'.join(songs_with_prefix))
            f.write('\n')
Example #3
0
def sync_playlist(playlists, walkman_dir, remove_unmatched=False):
    # Get music list
    util.status('Getting songs in playlists from iTunes')
    songs_in_playlists = util.get_files_in_playlist(playlists)
    itunes_folder, songs_in_playlists_rel = util.split_filepath(
        songs_in_playlists, setting.ITUNES_MEDIA_ROOT)
    util.status('Getting songs on Walkman')
    music_on_walkman = util.scan_directory(walkman_dir, setting.MUSIC_FILE_EXT)
    util.status('Comparing song lists')
    to_be_updated, to_be_removed, _ = util.compare_filelists(
        songs_in_playlists_rel,
        music_on_walkman,
        root_src=itunes_folder,
        root_dst=walkman_dir)

    # Update music
    util.status('Syncing songs from local to Walkman')
    util.sync_filelists(to_be_updated,
                        to_be_removed,
                        src_dir=itunes_folder,
                        dst_dir=walkman_dir,
                        remove_unmatched=remove_unmatched)
Example #4
0
def sync_lyrics(playlists, lyrics_dir, walkman_dir, remove_unmatched=False):
    # Get lyrics list
    util.status('Getting list of lyrics files locally')
    lyrics_on_local = util.get_lyrics_files_in_playlist(playlists)
    _, lyrics_on_local_rel = util.split_filepath(lyrics_on_local)

    util.status('Getting list ofj lyrics files on Walkman')
    lyrics_on_walkman = util.scan_directory(walkman_dir,
                                            setting.LYRICS_FILE_EXT)

    # Update lyrics
    util.status('Comparing lyrics lists')
    to_be_updated, to_be_removed, _ = util.compare_filelists(
        lyrics_on_local_rel,
        lyrics_on_walkman,
        root_src=lyrics_dir,
        root_dst=walkman_dir)

    util.status('Syncing lyrics files from local to Walkman')
    util.sync_filelists(to_be_updated,
                        to_be_removed,
                        src_dir=lyrics_dir,
                        dst_dir=walkman_dir,
                        remove_unmatched=remove_unmatched)
Example #5
0
def check_llvm():
    try:
        llvm_path = './llvm/Release+Asserts'
        assert isfile(llvm_path + '/lib/ocaml/llvm.cma')  # is llvm.cma there?
        llvm_config = Command(llvm_path + '/bin/llvm-config')
        ver = llvm_config('--version')  # try calling llvm-config
        print ver
        assert '3.2' in ver
        return True
    except:
        return False


# Test for ocaml 3.12.*
status('Testing for OCaml 3.12.* or greater')
from pbs import ocaml, ocamlbuild
ver = ocaml('-version')
print ver
assert '3.12' in ver or '4.' in ver
print '...OK!'

platform = sys.platform.lower()
if 'linux' in platform:
    try:
        status('Testing for g++')
        from pbs import which
        assert which('g++')
        print '...OK!'

        status('Testing for package libc6-dev-i386')
Example #6
0
def FragilityStamper(compiled_fc, MJ_fc):

    status("STARTING PROCESS TO ADD MACJAC FRAGILITY RESULT")

    # create dict with {original field: MJ field}
    fieldlist_MJ = {}

    for field in config.calc_fields:
        fieldlist_MJ[field] = "MJ_" + field

    status("Adding and populating 'MJ_' fields")
    # add MJ fields and populate values from MacJac fragility result
    ID_field = "GLOBALID"
    field_names = getField_Names(compiled_fc)
    for name in fieldlist_MJ.keys():
        if fieldlist_MJ[name] not in field_names:
            if name == "Liq_Prob":
                status(" - adding " + fieldlist_MJ[name])
                arcpy.AddField_management(compiled_fc, fieldlist_MJ[name],
                                          "TEXT")
            else:
                status(" - adding " + fieldlist_MJ[name])
                arcpy.AddField_management(compiled_fc, fieldlist_MJ[name],
                                          "DOUBLE")
        else:
            status(fieldlist_MJ[name] + " already exists - no field added")

        status("-- calculating " + fieldlist_MJ[name])
        CopyFieldFromFeature(MJ_fc, ID_field, name, compiled_fc, ID_field,
                             fieldlist_MJ[name])

    status("PROCESS COMPLETE")
Example #7
0
def process(buf):
    from cfg import ESPConfig
    cfg = ESPConfig()

    (src, dst, cmd, args, msg, atexit) = ('', '', '', '', '', '')

    try:
        topic = str(buf[0], 'utf8').split('/')[1:]
        msg = str(buf[1], 'utf8')
    except Exception as e:
        print('Error processing cmd from "%s": %s' % (buf, e))
        return

    # we can get the time and keep it accurate
    #   (and see how it drifts)
    # and we can also notice others clocks innacuracies

    if topic[0] == 'time':
        try:
            from cmd.cmdtime import cmdtime
            cmdtime(topic, msg)
        except Exception as e:
            from sys import print_exception as prexc
            prexc(e)
            del prexc
        finally:
            del cmdtime
    elif topic[0] == cfg.mqtt_root:
        if topic[1].upper() == 'ALL':
            dst = 'all'
            #print('/%s/ALL: %s - %s' % (cfg.mqtt_root, topic[2:], msg))
        elif topic[1] == cfg.uid:
            dst = 'direct'
            #print('/%s/%s: %s - %s' % (cfg.mqtt_root, cfg.uid, topic[2:], msg))
        else:
            return
        cmd = topic[2].lower()

        if len(topic) > 3:
            args = topic[3:]

        if cmd == 'status':
            from util import status
            status(2)
            del status
        elif cmd == 'peers':
            print('Asked for a peerlist')
            from mqtt import pub
            pub('/sys/%s/peers' % cfg.uid, cfg.peers)
            del pub
        elif cmd == 'ping':
            print('Got PING')
            if src == cfg.hostname: return
            from mqtt import peerpong
            peerpong()
            del peerpong
        elif cmd == 'pong':
            from json import loads
            from utime import localtime
            rep = loads(msg)
            if rep['hostname'] in list(cfg.peers.keys()):
                print('Got PONG')
                cfg.peers[rep['hostname']]['lastseen'] = localtime()
                cfg.peers[rep['hostname']]['rssi'].append(rep['rssi'])
            elif rep['hostname'] != cfg.hostname:
                print('Adding new peer: %s' % rep['hostname'])
                from mqtt import pub
                pub('/sys/%s/peer/%s' % (cfg.uid, rep['uid']),
                    [rep['hostname'], rep['version']])
                rssi = rep['rssi']
                del rep['rssi']
                rep.update({
                    'rssi': [rssi],
                    'firstseen': localtime(),
                    'lastseen': localtime()
                })
                cfg.peers[rep['hostname']] = rep
            del loads
        elif cmd == 'update':
            print('Updating: ', end='')
            try:
                if not msg and args != 'all':
                    print('derp, bad args')
                    return

                if (msg == 'all' or args == 'all'):
                    args = cfg.__all__
                    atexit = 'reset'
                elif args == 'reset':
                    atexit = 'reset'
                else:
                    args = msg.split(' ')

                print(' %s\n' % args)

                from net import wget
                for f in args:
                    try:
                        print('%-6s>>  %10s\r' % ('.....', f), end='')
                        wget(f)
                    except Exception as e:
                        print('WGET %s: %s' % (f, e))
                    finally:
                        del wget
            except Exception as e:
                print('UPDATE: %s' % e)
        elif cmd == 'reset':
            print('Got RESET')
            atexit = 'reset'
        elif cmd == 'cfg':
            print('Asked for config')
            from mqtt import pub
            pub('/sys/%s/config' % cfg.uid, {x: eval('cfg.%s' % x)}
                for x in dir(cfg))

        elif cmd == 'wifiscan':
            print('Performing Wireless Scan')
            from mqtt import pub
            from cmd.wifiscan import wifiscan
            pub('/sys/%s/wifiscan' % cfg.uid, wifiscan(0))
            del wifiscan, pub

    if atexit == 'reset':
        print('Resetting per MQTT request')
        from utime import sleep
        from machine import reset
        sleep(1)
        machine.reset()
Example #8
0
def check_llvm():
    try:
        llvm_path = "./llvm/Release+Asserts"
        assert isfile(llvm_path + "/lib/ocaml/llvm.cma")  # is llvm.cma there?
        llvm_config = Command(llvm_path + "/bin/llvm-config")
        ver = llvm_config("--version")  # try calling llvm-config
        print ver
        assert "3.1" in ver
        return True
    except:
        return False


# Test for ocaml 3.12.*
status("Testing for OCaml 3.12.*")
from pbs import ocaml, ocamlbuild

ver = ocaml("-version")
print ver
assert "3.12" in ver
print "...OK!"

# Submodule update/init
# TODO: make --recursive optional
status("Checking out submodules")
git("submodule", "update", "--init", "--recursive")

# TODO: always run make -C llvm, just to make sure it's up to date. Does configure cache its settings when a reconfigure is forced?
# TODO: make install in subdir, with docs
#        requires graphviz, doxygen; target ocamlbuild to alt dir?; make clean?
Example #9
0
def check_llvm():
    try:
        llvm_path = "./llvm/Release+Asserts"
        assert isfile(llvm_path + "/lib/ocaml/llvm.cma")  # is llvm.cma there?
        llvm_config = Command(llvm_path + "/bin/llvm-config")
        ver = llvm_config("--version")  # try calling llvm-config
        print ver
        assert "3.2" in ver
        return True
    except:
        return False


# Test for ocaml 3.12.*
status("Testing for OCaml 3.12.* or greater")
from pbs import ocaml, ocamlbuild

ver = ocaml("-version")
print ver
assert "3.12" in ver or "4." in ver
print "...OK!"

platform = sys.platform.lower()
if "linux" in platform:
    try:
        status("Testing for g++")
        from pbs import which

        assert which("g++")
        print "...OK!"
Example #10
0
            ver = lib_version(lib)
            ver = float( '.'.join( ver.split('.')[0:2] ) )
            assert ver >= ver_req
        except:
            raise OCamlLibMissingError('%s >= %.1f' % (lib, ver_req))
    
    assert_lib_version('sexplib', 7.0)
    # We don't actually use batteries right now
    #assert_lib_version('batteries', 1.4)
    return ocamlbuild

ocamlbuild = None
platform = sys.platform.lower()
if 'linux' in platform:
    try:
        status('Testing for g++')
        from sh import which
        assert which('g++')
        print '...OK!'

        status('Testing for package libc6-dev-i386')
        assert isfile('/usr/include/x86_64-linux-gnu/gnu/stubs-32.h')
        print '...OK!'
        
        try:
            ocamlbuild = check_ocaml()
            print '...OK!'
        except OCamlVersionError as e:
            print 'e'
    
        status('Testing for clang')
def FragilityMacJac():

    # MATERIAL VALUE PATCH
    # creates a lookup dictionary from the Nulls spreadsheet
    # use to fill the MATERIAL field for the records that match the key val Compkeys
    # use "if compkey = x and origval = y then set = to newval - this serves as a check that you're not overwriting valid values
    patch_dict = util.createMaterialPatch_dict(config.materialPatch_xls)


    # CORE -------------------------------------------------------------------------

    status("STARTING FRAGILITY EXTRACTION")

    status("Getting list of MacJac compkeys/globalids")
    # get list of COMPKEYs (if COMPKEY != None), GLOBALIDs (if GLOABALID != 0)
    compkeylist = []
    globallist = []
    with arcpy.da.SearchCursor(config.MacJac_combo, ["COMPKEY", "GLOBALID"]) as cursor:
        for row in cursor:
            if row[0] != None:
                compkeylist.append(row[0])
            else:
                if row[1] != 0:
                    globallist.append(row[1])

    # removing facilities like inlets and laterals
    status("Subsetting to sewer/ storm pipes")
    pipes = arcpy.MakeFeatureLayer_management(config.collection_lines, "pipes", "LAYER_GROUP in ( 'SEWER PIPES' , 'STORM PIPES' )")
    print str(arcpy.GetCount_management(pipes)) + " pipes"

    # subset collection lines to segments in MacJac backbone
    status("Subsetting collection system to MacJac")
    compkey_piece = str(tuple(compkeylist))
    globalid_piece = str(tuple(globallist))
    MacJac_pipes = arcpy.MakeFeatureLayer_management(config.collection_lines, "pipes", "COMPKEY in {0} or GLOBALID in {1}".format(compkey_piece, globalid_piece))
    print str(arcpy.GetCount_management(MacJac_pipes)) + " pipes"

    # save copy of pipes to output
    datestamp = datetime.datetime.today().strftime('%Y%m%d')
    outfile = "fragility_MJA_backbone_" + datestamp
    full_outfile = os.path.join(config.resiliency_gdb, "fragility_MJA_backbone_" + datestamp)
    status("Copying pipes to output - called " + outfile)
    fragility_pipes = arcpy.CopyFeatures_management(MacJac_pipes, full_outfile) # THIS IS A CITY-WIDE COPY

    # add all necessary fields
    util.addFields(fragility_pipes)

    # DATA PATCHES -----------------------------------------------------------------------------
    # fix for materials that are weird - only affects a few pipes
    status("Adjusting a few erroneous pipe values")
    with arcpy.da.UpdateCursor(fragility_pipes, ["COMPKEY","MATERIAL"]) as cursor:
        for row in cursor:
            if row[0] == 132037:
                row[1] = "PVC"
            elif row[0] == 490799:
                row[1] = "CIPP"
            cursor.updateRow(row)

    # patch backbone Null values using patch_dict
    status("Patching missing Materials in backbone segments")
    util.patch_Materials(fragility_pipes, patch_dict)


    # CONDITION AND EXTRACT DATA --------------------------------------------------------------------

    status("Calculating values from MacJac data")
    # run using COMPKEY
    util.calcRate(config.MacJac_combo, "COMPKEY", "MJA_PGV_min", "MJA_PGV_max", fragility_pipes, "COMPKEY", "PGV", config.MJ_rate)
    util.calcRate(config.MacJac_combo, "COMPKEY", "MJA_latspr_min", "MJA_latspr_max", fragility_pipes, "COMPKEY", "PGD_LS", config.MJ_rate)
    util.calcRate(config.MacJac_combo, "COMPKEY", "MJA_liq_min", "MJA_liq_max", fragility_pipes, "COMPKEY", "PGD_Set", config.MJ_rate)
    util.calcRate(config.MacJac_combo, "COMPKEY", "MJA_landslide_min", "MJA_landslide_max", fragility_pipes, "COMPKEY", "PGD_Landslide", config.MJ_rate)
    # run again using GLOBALID
    util.calcRate(config.MacJac_combo, "GLOBALID", "MJA_PGV_min", "MJA_PGV_max", fragility_pipes, "COMPKEY", "PGV", config.MJ_rate)
    util.calcRate(config.MacJac_combo, "GLOBALID", "MJA_latspr_min", "MJA_latspr_max", fragility_pipes, "COMPKEY", "PGD_LS", config.MJ_rate)
    util.calcRate(config.MacJac_combo, "GLOBALID", "MJA_liq_min", "MJA_liq_max", fragility_pipes, "COMPKEY", "PGD_Set", config.MJ_rate)
    util.calcRate(config.MacJac_combo, "GLOBALID", "MJA_landslide_min", "MJA_landslide_max", fragility_pipes, "COMPKEY", "PGD_Landslide", config.MJ_rate)

    # convert PGD field values from feet to inches
    status("Converting PGD values from feet to inches")
    convertfields = ("PGD_LS", "PGD_Set", "PGD_Landslide")
    for field in convertfields:
        with arcpy.da.UpdateCursor(fragility_pipes, [field]) as cursor:
            for row in cursor:
                if row[0] is not None:
                    row[0] = row[0]*12
                cursor.updateRow(row)

    # calculate aggregate PGD (LS + Set)
    status("Calculating PGD_Liq_Tot")
    with arcpy.da.UpdateCursor(fragility_pipes, ["PGD_Liq_Tot", "PGD_LS", "PGD_Set"]) as cursor:
            for row in cursor:
                if row[1] is not None and row[2] is not None:
                    row[0] = pow((pow(row[1],2) + pow(row[2],2)),0.5)
                elif row[1] is None and row[2] is not None:
                    row[0] = row[2]
                elif row[1] is not None and row[2] is None:
                    row[0] = row[1]
                cursor.updateRow(row)


    # calculate K values using materials and dictionaries
    util.calcValues(fragility_pipes)


    status("Updating Decision field")
    util.updateDecisionField(fragility_pipes, "PGD_Liq_Tot", "RR_Don_FIN", "PGD_Set")


    # -------------------------------------------------------------------------------------------------------------------

    status("FRAGILITY EXTRACTION COMPLETE")
    print "Output saved to: " + full_outfile
Example #12
0
def photo():
  # Rather cruddy force lowercase
  for root, dirnames, filenames in os.walk(PHOTO_HIRES):
    for filename in filenames:
      path = os.path.join(root, filename)
      if path != path.lower():
        if os.path.exists(path.lower()) and os.stat(path.lower()).st_size > 0:
          assert open(path.lower(), 'rb').read() == open(path, 'rb').read(), 'match %s=>%s' % (path, path.lower())
        print 'Rename', path, path.lower()
        os.rename(path, path.lower())

  hires_list = []
  jpg = set()
  for root, dirnames, filenames in os.walk(PHOTO_HIRES):
    for filename in filenames:
      hires = os.path.join(root, filename)
      assert hires.startswith(PHOTO_HIRES)
      assert hires.lower() == hires
      hires_list.append(hires)

      if hires.endswith('.jpg'):
        jpg.add(hires)

  for hires in hires_list:
    thumb = hires.replace(PHOTO_HIRES, PHOTO_THUMB)
    lores = hires.replace(PHOTO_HIRES, PHOTO_LORES)
    assert thumb != hires
    assert lores != hires

    for output in [thumb, lores]:
      if not os.path.isdir(os.path.dirname(output)):
        os.makedirs(os.path.dirname(output))

    if hires.endswith('.jpg'):
      status('Processing: %s' % hires)
      for path, conversion in [
        (lores, '-quality 75 -resize "2560x1600"'),
        (thumb, '-quality 60 -resize "320x213" -gravity "center" -background "black" -extent "320x213"'),
      ]:
        if os.path.exists(path) and os.stat(path).st_size > 0:
          print path, 'already exists. Skipping.'
        else:
          local('convert %s %s %s' % (conversion, hires, path))
    elif hires.endswith('.arw') and hires.replace('.arw', '.jpg') in jpg:
      print 'Skipping raw:', hires
    else:
      print 'Skipping:', hires

  status('Write photo browser index files')

  exclude = set()
  exclude_tags = ('delete', 'private')
  for tag in exclude_tags:
    exclude.update(load_tags(tag))

  photo_index(
    include=set(),
    exclude=exclude,
  )

  for filename in os.listdir(TAGS):
    if filename not in exclude_tags and not filename.startswith('.'):
      photo_index(
        browse_base=filename,
        include=load_tags(filename),
        exclude=exclude,
      )
  photo_index(
    browse_base='all',
    exclude=exclude,
  )

  s3_sync()
Example #13
0
def s3_sync():
  status('Sync files to S3')
  local('s3cmd --acl-public sync %s/%s s3://%s/%s' % (PHOTO_THUMB, '', BUCKET_THUMB, ''))
  local('s3cmd --acl-public sync %s/ s3://%s/' % (PHOTO_BROWSE, BUCKET_BROWSE))
  local('s3cmd --acl-public sync %s/%s s3://%s/%s' % (PHOTO_LORES, RESTRICT, BUCKET_LORES, RESTRICT))
  local('s3cmd --acl-public sync %s/%s s3://%s/%s' % (PHOTO_HIRES, RESTRICT, BUCKET_HIRES, RESTRICT))
Example #14
0
def Fragility():

    status("STARTING FRAGILITY EXTRACTION")

    # subset collection lines to pipes only
    status("Subsetting collection system to pipes only")
    pipes = arcpy.MakeFeatureLayer_management(config.collection_lines, "pipes", "LAYER_GROUP in ( 'SEWER PIPES' , 'STORM PIPES' )")
    print str(arcpy.GetCount_management(pipes)) + " pipes"

    # save copy of pipes to output
    datestamp = datetime.datetime.today().strftime('%Y%m%d')
    outfile = "fragility_WB_" + datestamp
    full_outfile = os.path.join(config.resiliency_gdb, outfile)
    status("Copying pipes to output - called " + outfile)
    fragility_pipes = arcpy.CopyFeatures_management(pipes, full_outfile) # THIS IS A CITY-WIDE VERSION

    # MATERIAL VALUE PATCH
    # creates a lookup dictionary from the Nulls spreadsheet
    # use to fill the MATERIAL field for the records that match the key val Compkeys
    # use "if compkey = x and origval = y then set = to newval - this serves as a check that you're not overwriting valid values
    patch_dict = util.createMaterialPatch_dict(config.materialPatch_xls)

    # add all necessary fields
    util.addFields(fragility_pipes)

    # DATA PATCHES -----------------------------------------------------------------------------
    # fix for materials that are weird - only affects a few pipes
    status("Adjusting a few erroneous pipe values")
    with arcpy.da.UpdateCursor(fragility_pipes, ["COMPKEY","MATERIAL"]) as cursor:
        for row in cursor:
            if row[0] == 132037:
                row[1] = "PVC"
            elif row[0] ==490799:
                row[1] = "CIPP"
            cursor.updateRow(row)

    # patch backbone Null values using patch_dict
    status("Patching missing Materials in backbone segments")
    util.patch_Materials(fragility_pipes, patch_dict)


    # CONDITION AND EXTRACT DATA --------------------------------------------------------------------

    # get PGV value from raster
    # convert pipes to points
    status("Converting pipes to points")
    pipe_points = arcpy.FeatureToPoint_management(pipes,"in_memory\pipe_points")
    # extract raster values to points
    status("Extracting DOGAMI PGV raster values to points")
    arcpy.CheckOutExtension("Spatial")
    PGV_values = arcpy.sa.ExtractValuesToPoints(pipe_points, config.DOG_PGV, "in_memory\PGV_values", "NONE", "VALUE_ONLY")
    # assign value to fragility_pipes
    status("Assigning PGV values to fragility_pipes")
    CopyFieldFromFeature(PGV_values, "COMPKEY", "RASTERVALU", fragility_pipes, "COMPKEY", "PGV")

    # get other values from vectors
    status("Extracting Liq_Prob values") # this one is not aggregated as it is a text value
    targetFC = fragility_pipes
    targetField = "Liq_Prob"
    ID = "COMPKEY"
    overlapFC = config.PWB_Liq
    overlapField = "LiqExpl"
    result = arcpy.Intersect_analysis([targetFC,overlapFC],"in_memory\sect_result","NO_FID","","LINE")
    values={}
    with arcpy.da.SearchCursor(result,[ID,overlapField]) as cursor:
        for row in cursor:
            if row[0] != None:
                values[row[0]] = row[1]

    with arcpy.da.UpdateCursor(targetFC,[ID, targetField]) as cursor:
        for row in cursor:
            if row[0] in values:
                if values[row[0]] != None:
                    row[1] = values[row[0]]
            cursor.updateRow(row)

    # these are aggregated (MAX value taken)
    status("Extracting PGD_LS values")
    calcField_fromOverlap(fragility_pipes, "PGD_LS", "COMPKEY", config.PWB_LS, "LATERALSPREAD_80pct")
    status("Extracting PGD_Set values")
    calcField_fromOverlap(fragility_pipes, "PGD_Set", "COMPKEY", config.PWB_GS, "Ground_Settlement_80pct")
    status("Extracting PGD_Landslide values")
    calcField_fromOverlap(fragility_pipes, "PGD_Landslide", "COMPKEY", config.PWB_LD, "DEF_FEET_80pct")

    # convert PGD field values from feet to inches
    status("Converting PGD values from feet to inches")
    convertfields = ("PGD_LS", "PGD_Set", "PGD_Landslide")
    for field in convertfields:
        with arcpy.da.UpdateCursor(fragility_pipes, [field]) as cursor:
            for row in cursor:
                if row[0] is not None:
                    row[0] = row[0]*12
                cursor.updateRow(row)

    status("Re-setting lowest range Landslide values to 0")
    with arcpy.da.UpdateCursor(fragility_pipes, ["PGD_Landslide"]) as cursor:
            for row in cursor:
                if row[0] == config.PGD_Landslide_val:
                    row[0] = 0
                cursor.updateRow(row)

    # calculate aggregate PGD (LS + Set) - nothing can stop my pythagorean style
    status("Calculating PGD_Liq_Tot")
    with arcpy.da.UpdateCursor(fragility_pipes, ["PGD_Liq_Tot", "PGD_LS", "PGD_Set"]) as cursor:
            for row in cursor:
                if row[1] is not None and row[2] is not None:
                    row[0] = pow((pow(row[1],2) + pow(row[2],2)),0.5)
                elif row[1] is None and row[2] is not None:
                    row[0] = row[2]
                elif row[1] is not None and row[2] is None:
                    row[0] = row[1]
                cursor.updateRow(row)


    # calculate K values using materials and dictionaries
    util.calcValues(fragility_pipes)


    status("Updating Decision field")
    util.updateDecisionField(fragility_pipes, "PGD_Liq_Tot", "RR_Don_FIN", "PGD_Set")


    status("FRAGILITY EXTRACTION COMPLETE")
    print "Output saved to: " + full_outfile

    return fragility_pipes
Example #15
0
 def lib_version(lib):
     status('Testing for %s' % lib)
     info = ocamlfind('query', '-l', lib)
     return [l.split()[-1] for l in info.splitlines() if l.startswith('version')][0]
Example #16
0
def create_local_lyrics(playlists, lyrics_dir, lyrics_source_dir):
    util.status('Creating local lyrics directory')
    tracks_in_playlist = util.get_tracks_in_playlist(playlists)
    util.struct_lyrics_dir(tracks_in_playlist,
                           src_dir=lyrics_source_dir,
                           dst_dir=lyrics_dir)
Example #17
0
def check_llvm():
    try:
        llvm_path = './llvm/Release+Asserts'
        assert isfile(llvm_path + '/lib/ocaml/llvm.cma')  # is llvm.cma there?
        llvm_config = Command(llvm_path + '/bin/llvm-config')
        ver = llvm_config('--version')  # try calling llvm-config
        print ver
        assert '3.1' in ver
        return True
    except:
        return False


# Test for ocaml 3.12.*
status('Testing for OCaml 3.12.*')
from pbs import ocaml, ocamlbuild
ver = ocaml('-version')
print ver
assert '3.12' in ver
print '...OK!'

# Submodule update/init
# TODO: make --recursive optional
status('Checking out submodules')
git('submodule', 'update', '--init', '--recursive')

# TODO: always run make -C llvm, just to make sure it's up to date. Does configure cache its settings when a reconfigure is forced?
# TODO: make install in subdir, with docs
#        requires graphviz, doxygen; target ocamlbuild to alt dir?; make clean?
# Build llvm
Example #18
0
  def handle_read(self):
    data = self.recv(1024)
    if data == '':
      self.handle_close()
      return
    log.l.LogIt('RTC006', 'D', 'cmd i: %s', (data))
    args = string.split(data)
    if len(args) == 0:
      return;
    self.mode = args[0]
    self.target = args[1]
    args = args[2:]
    log.l.LogIt('RTC007', '1', '%s', (str(args)))
    if len(args) == 0:
      return

    #--------------------
    if args[0] == 'close':
      self.senddata.append(util.close(self.mode == 'router', self.target, args[1:]))

    #--------------------
    elif args[0] == 'data':
      if self.mode == 'domain':
        self.senddata.append('command: '+args[0]+': address only to router')
      if self.mode == 'router':
        self.senddata.append(util.data(args[1:]))

    elif args[0] == 'dump':
      if self.mode == 'domain':
        self.senddata.append('command: '+args[0]+': address only to router')
      if self.mode == 'router':
        self.senddata.append(util.dump())

    #--------------------
    elif args[0] == 'event':
      if self.mode == 'domain':
        self.senddata.append('command: '+args[0]+': address only to router')
      if self.mode == 'router':
        self.senddata.append(util.event(args[1:]))

    #--------------------
    elif args[0] == 'fb' or args[0] == 'fallback' or args[0] == 'secondary':
      if len(args) > 1 and args[1] == 'auto':
        auto = 1
      else:
        auto = 0
      if self.mode == 'domain':
        cfg.domain[self.target]['fallback'] = 1
        self.senddata.append(util.switch_secondary(self.target, auto))
      if self.mode == 'router':
        for i in cfg.domain.keys():
          if cfg.domain[i]['apr'] == cfg.name:
            cfg.domain[i]['fallback'] = 1
            self.senddata.append(i+' '+util.switch_secondary(i, auto))

    #--------------------
    elif args[0] == 'ff' or args[0] == 'fallforward' or args[0] == 'primary':
      if len(args) > 1 and args[1] == 'auto':
        auto = 1
      else:
        auto = 0
      if self.mode == 'domain':
        cfg.domain[self.target]['fallback'] = 0
        self.senddata.append(util.switch_primary(self.target, auto))
      if self.mode == 'router':
        for i in cfg.domain.keys():
          if cfg.domain[i]['apr'] == cfg.name:
            cfg.domain[i]['fallback'] = 0
            self.senddata.append(i+' '+util.switch_primary(i, auto))

    #--------------------
    elif args[0] == 'pvc':
      if self.mode == 'domain':
        self.senddata.append('command: '+args[0]+': address only to router')
      if self.mode == 'router':
        self.senddata.append(util.pvc(args[1:]))

    #--------------------
    elif args[0] == 'refresh':
      evt_hdlr.refresh()
      self.senddata.append('status refreshed')

    #--------------------
    elif args[0] == 'set':
      self.senddata = util.set(self.mode == 'router', self.target, args[1:])

    #--------------------
    elif args[0] == 'sna':
      if self.mode == 'domain':
        self.senddata.append('command: '+args[0]+': address only to router')
      if self.mode == 'router':
        self.senddata.append(util.sna(args[1:]))

    #--------------------
    elif args[0] == 'status':
      if self.mode == 'domain':
        self.senddata.append(util.status(self.target))
      if self.mode == 'router':
        for i in cfg.domain.keys():
          if cfg.domain[i]['apr'] == cfg.name:
            self.senddata.append(i+' '+util.status(i))
      if len(self.senddata) == 0:
        self.senddata.append('not active')

    #--------------------
    elif args[0] == 'stop':
      if self.mode == 'domain':
        self.senddata.append('command: '+args[0]+': address only to router')
      if self.mode == 'router':
        log.l.LogIt('RTC008', 'I', 'command termination', ())
        cfg.stopping = 1
        msg = '%s terminating' % self.target
        self.senddata.append(msg)
        for i in cfg.domain.keys():
          util.closeall(i)

    #--------------------
    elif args[0] == 'trace':
      if self.mode == 'domain':
        self.senddata.append('command: '+args[0]+': address only to router')
      if self.mode == 'router':
        if len(args) > 1:
          log.l.SetTraceLevel(int(args[1]))
          self.senddata.append('trace level %s' % (args[1]))
          log.l.LogIt('RTC009', 'I', 'command trace %s', (args[1]))
        else:
          level = log.l.GetTraceLevel()
          self.senddata.append('trace level %d' % (level))
          log.l.LogIt('RTC010', 'I', 'command get trace: %d', (level))

    #--------------------
    elif args[0] == 'version':
      msg = ver.getVersion()
      if cfg.snasrv_version != '':
        msg = msg + ' snasrv: ' + cfg.snasrv_version
      self.senddata.append(msg)

    #--------------------
    else:
      self.senddata.append('command: '+args[0]+': not implemented')
def FragilityFinalValues(compiled_fc):

    status("STARTING PROCESS TO ADD FINAL FRAGILITY RESULT")

    #create dict with {original field: final field}
    fieldlist_FinalVal = {}

    for field in config.calc_fields[:-2]:
        fieldlist_FinalVal[field] = "Final_" + field

    status("Adding 'Final_' fields")
    field_names = getField_Names(compiled_fc)
    for name in fieldlist_FinalVal.keys():
        if fieldlist_FinalVal[name] not in field_names:
            if name == "Liq_Prob":
                status(" - adding " + fieldlist_FinalVal[name])
                arcpy.AddField_management(compiled_fc,
                                          fieldlist_FinalVal[name], "TEXT")
            else:
                status(" - adding " + fieldlist_FinalVal[name])
                arcpy.AddField_management(compiled_fc,
                                          fieldlist_FinalVal[name], "DOUBLE")
        else:
            status(" - " + fieldlist_FinalVal[name] +
                   " already exists - no field added")

    prefixes = ("wLandslide_", "MJ_wLandslide_", "MJ_", "Final_")
    newlist = []
    fulllist = []
    for field in config.calc_fields[:-2]:
        newlist.append(field)
        if field in ("RR_Don_FIN", "RR_Don_breaknum"):
            for item in prefixes:
                newlist.append(item + field)
            fulllist.append(newlist)
            newlist = []
        else:
            for item in prefixes[-2:]:
                newlist.append(item + field)
            fulllist.append(newlist)
            newlist = []

    # priority order: MJ_wLandslide_, MJ_, wLandslide_, WB
    # OR : MJ_, WB (if no depth/landslide patch was done for that field ie not RR_Don_FIN or RR_Don_breaknum)

    status("Populating 'Final_' fields using field prioritization")
    for target_fields in fulllist:
        with arcpy.da.UpdateCursor(compiled_fc, target_fields) as cursor:
            for row in cursor:
                if len(cursor.fields) == 5:
                    status(" - Populating values for " + target_fields[4])
                    for row in cursor:
                        if row[2] != None:
                            row[4] = row[2]
                        elif row[2] == None and row[3] != None:
                            row[4] = row[3]
                        elif row[2] == None and row[
                                3] == None and row[1] != None:
                            row[4] = row[1]
                        else:
                            row[4] = row[0]
                        cursor.updateRow(row)
                elif len(cursor.fields) == 3:
                    status(" - Populating values for " + target_fields[2])
                    for row in cursor:
                        if row[1] != None:
                            row[2] = row[1]
                        else:
                            row[2] = row[0]
                        cursor.updateRow(row)

    status("Populating 'Decision' field")
    updateDecisionField(compiled_fc, "Final_PGD_Liq_Tot", "Final_RR_Don_FIN",
                        "Final_PGD_Set")

    status("PROCESS COMPLETE")