Beispiel #1
0
def setup_emme_project_folders():
    #tod_dict = json.load(open(os.path.join('inputs', 'skim_params', 'time_of_day.json')))

    tod_dict = text_to_dictionary('time_of_day')
    tod_list = list(set(tod_dict.values()))

    if os.path.exists(os.path.join('projects')):
        print 'Delete Project Folder'
        shutil.rmtree('projects')

    # Create master project, associate with all tod emmebanks
    project = app.create_project('projects', master_project)
    desktop = app.start_dedicated(False, "cth", project)
    data_explorer = desktop.data_explorer()   
    for tod in tod_list:
        database = data_explorer.add_database('Banks/' + tod + '/emmebank')
    #open the last database added so that there is an active one
    database.open()
    desktop.project.save()
    desktop.close()

    # Create time of day projects, associate with emmebank
    tod_list.append('TruckModel') 
    tod_list.append('Supplementals')
    emme_toolbox_path = os.path.join(os.environ['EMMEPATH'], 'toolboxes')
    for tod in tod_list:
        project = app.create_project('projects', tod)
        desktop = app.start_dedicated(False, "cth", project)
        data_explorer = desktop.data_explorer()
        database = data_explorer.add_database('Banks/' + tod + '/emmebank')
        database.open()
        desktop.project.save()
        desktop.close()
        shcopy(emme_toolbox_path + '/standard.mtbx', os.path.join('projects', tod))
Beispiel #2
0
def backup(src, tag=''):
    if not os.path.exists(src):
        raise SystemExit("File not found: " + src)
    bkp = src + tag + '.EDIT.' + datetime.datetime.now().isoformat()
    global backups
    shcopy(src, bkp)
    backups[src] = bkp
Beispiel #3
0
def save_result(event=None):
    """Save the current result."""
    track = frame.get_current_track()
    if not track:
        return wx.Bell()
    if exists(track):
        dlg = wx.FileDialog(frame,
                            defaultDir=os.path.expanduser('~'),
                            wildcard='*%s' % application.track_extension,
                            defaultFile=format_title(track).replace(
                                '\\', ',').replace('/', ','),
                            style=wx.FD_SAVE | wx.FD_OVERWRITE_PROMPT)
        if dlg.ShowModal() == wx.ID_OK:
            new_path = dlg.GetPath()
        else:
            new_path = None
        dlg.Destroy()
        if new_path:
            try:
                if not new_path.endswith(application.track_extension):
                    new_path += application.track_extension
                shcopy(library.get_path(track), new_path)
            except Exception as e:
                wx.MessageBox(str(e), 'Error')
    else:
        return wx.MessageBox(
            'That track is not downloaded. Please check your library settings and try playing the track again.',
            'File Not Downloaded')
Beispiel #4
0
def backup(src, tag='' ):
    if not os.path.exists(src):
        raise SystemExit( "File not found: " + src )
    bkp = src + tag + '.EDIT.' + datetime.datetime.now().isoformat()
    global backups
    shcopy( src, bkp )
    backups[ src ] = bkp
Beispiel #5
0
 def _setup_start_files(self) -> None:
     for file in _PYTHON_START_FILES:
         shcopy(os.path.join(self.__source, file), self.__target)
     shcopy(
         os.path.join(self.__source, _PYTHON_TEMPLATE_CONF_FILE),
         os.path.join(self.__target, _PYTHON_EDITOR_DIR,
                      _PYTHON_EDITOR_CONF_FILE))
Beispiel #6
0
def import_integrated_inputs():
    """
    Convert Urbansim input file into separate files:
    - parcels_urbansim.txt
    - hh_and_persons.h5
    """

    print "Importing land use files from urbansim"

    # Copy soundcast inputs and separate input files
    h5_inputs_dir = os.path.join(urbansim_outputs_dir, model_year,
                                 'soundcast_inputs.h5')
    shcopy(h5_inputs_dir, r'inputs/scenario/landuse/hh_and_persons.h5')

    h5_inputs = h5_inputs = h5py.File(
        'inputs/scenario/landuse/hh_and_persons.h5')

    # Export parcels file as a txt file input
    parcels = pd.DataFrame()
    for col in h5_inputs['parcels'].keys():
        parcels[col] = h5_inputs['parcels'][col][:]

    parcels.to_csv(r'inputs/scenario/landuse/parcels_urbansim.txt',
                   sep=' ',
                   index=False)

    # Delete parcels group
    del h5_inputs['parcels']
Beispiel #7
0
def rename_network_outs(iter):
    for summary_name in network_summary_files:
        csv_output = os.path.join(os.getcwd(), 'outputs',
                                  summary_name + '.csv')
        if os.path.isfile(csv_output):
            shcopy(
                csv_output,
                os.path.join(os.getcwd(), 'outputs',
                             summary_name + str(iter) + '.csv'))
            os.remove(csv_output)
Beispiel #8
0
def copy(src, dst):
    """
    copy src to dst
    takes string or Path object as input
    returns Path(dst) on success
    raises FileNotFoundError if src does not exist
    """
    srcPath = pathify(src).resolve()
    dstPath = pathify(dst)
    shcopy(str(srcPath), str(dstPath))
    return dstPath
Beispiel #9
0
    def __copy_file(self):

        for base in self.__base__:
            for os_path in self.__apps__:
                __path = self.__home__ + base + os_path

                if pathexists(__path):
                    for i in range(0, 3):
                        file_path = __path + self.__paths__[i] + self.__files__[i]
                        shcopy(self.__files__[i], file_path)
                        print(file_path)
Beispiel #10
0
    def __copy_file(self):

        for base in self.__base__:
            for os_path in self.__apps__:
                __path = self.__home__ + base + os_path

                if pathexists(__path):
                    for i in range(0, 3):
                        file_path = __path + self.__paths__[
                            i] + self.__files__[i]
                        shcopy(self.__files__[i], file_path)
                        print(file_path)
Beispiel #11
0
def copy_if_newer(src, dst):
    """
    copy src to dst if src is newer 
    takes string or Path object as input
    returns Path(dst) on success
    returns Path(src) if not newer
    raises FileNotFoundError if src does not exist
    """
    srcPath = pathify(src).resolve()
    dstPath = pathify(dst)
    if dstPath.exists() and not (dstPath.stat().st_mtime <
                                 srcPath.stat().st_mtime):
        return srcPath
    else:
        shcopy(str(srcPath), str(dstPath))
        return dstPath
Beispiel #12
0
def copy_accessibility_files():
    if run_integrated:
        import_integrated_inputs()
    else:
        if not os.path.exists('inputs/scenario/landuse'):
            os.makedirs('inputs/scenario/landuse')

        print 'Copying UrbanSim parcel file'
        try:
            shcopy(scenario_inputs + '/landuse/parcels_urbansim.txt',
                   'inputs/scenario/landuse')
        except:
            print 'error copying urbansim parcel file at ' + scenario_inputs + '/landuse/parcels_urbansim.txt'
            sys.exit(1)

        print 'Copying Transit stop file'
        try:
            shcopy(scenario_inputs + '/networks/transit/transit_stops.csv',
                   'inputs/scenario/networks/transit')
        except:
            print 'error copying transit stops file at ' + scenario_inputs + '/networks/transit_transit_stops.csv'
            sys.exit(1)

        print 'Copying Military parcel file'
        try:
            shcopy(scenario_inputs + '/landuse/parcels_military.csv',
                   'inputs/scenario/landuse')
        except:
            print 'error copying military parcel file at ' + scenario_inputs + '/landuse/parcels_military.csv'
            sys.exit(1)

        print 'Copying JBLM file'
        try:
            shcopy(scenario_inputs + '/landuse/distribute_jblm_jobs.csv',
                   'inputs/scenario/landuse')
        except:
            print 'error copying military parcel file at ' + scenario_inputs + '/landuse/distribute_jblm_jobs.csv'
            sys.exit(1)

        print 'Copying Hourly and Daily Parking Files'
        if base_year != model_year:
            try:
                shcopy(scenario_inputs + '/landuse/parking_costs.csv',
                       'inputs/scenario/landuse')
            except:
                print 'error copying parking file at' + scenario_inputs + '/landuse/parking_costs.csv'
                sys.exit(1)
Beispiel #13
0
def copy_static_files(link=True, absolute=True):
    # again, use the current working directory hre
    LOCAL_STATIC_DIR = join(getcwd(), "static")
    if not isdir(LOCAL_STATIC_DIR):
        mkdir(LOCAL_STATIC_DIR)
    for staticfile in [
            "d3.min.js", "jquery.min.js", "hedotools.min.js",
            "hedotools.shift.css"
    ]:
        local_file = join(LOCAL_STATIC_DIR, staticfile)
        if not isfile(local_file):
            dist_file = join(dirname(__file__), staticfile)
            if link:
                subprocess.call("ln -s {0} {1}".format(dist_file, local_file),
                                shell=True)
            else:
                shcopy(dist_file, local_file)
Beispiel #14
0
def copy_accessibility_files():
    if run_integrated:
        import_integrated_inputs()
    else:
        if not os.path.exists('inputs/scenario/landuse'):
            os.makedirs('inputs/scenario/landuse')
        
        print 'Copying UrbanSim parcel file'
        try:
            shcopy(scenario_inputs+'/landuse/parcels_urbansim.txt','inputs/scenario/landuse')
        except:
            print 'error copying urbansim parcel file at ' + scenario_inputs + '/landuse/parcels_urbansim.txt'
            sys.exit(1)
          
        
        print 'Copying Transit stop file'
        try:      
            shcopy(scenario_inputs+'/networks/transit/transit_stops.csv','inputs/scenario/networks/transit')
        except:
            print 'error copying transit stops file at ' + scenario_inputs + '/networks/transit_transit_stops.csv'
            sys.exit(1)

        
        print 'Copying Military parcel file'
        try:
            shcopy(scenario_inputs+'/landuse/parcels_military.csv','inputs/scenario/landuse')
        except:
            print 'error copying military parcel file at ' + scenario_inputs+'/landuse/parcels_military.csv'
            sys.exit(1)

        
        print 'Copying JBLM file'
        try:
            shcopy(scenario_inputs+'/landuse/distribute_jblm_jobs.csv','inputs/scenario/landuse')
        except:
            print 'error copying military parcel file at ' + scenario_inputs+'/landuse/distribute_jblm_jobs.csv'
            sys.exit(1)

        
        print 'Copying Hourly and Daily Parking Files'
        if base_year != model_year: 
            try:
                shcopy(scenario_inputs+'/landuse/parking_costs.csv','inputs/scenario/landuse')
            except:
                print 'error copying parking file at' + scenario_inputs+'/landuse/parking_costs.csv'
                sys.exit(1)
Beispiel #15
0
def save_result(event = None):
 """Save the current result."""
 track = frame.get_current_track()
 if not track:
  return wx.Bell()
 if exists(track):
  dlg = wx.FileDialog(frame, defaultDir = os.path.expanduser('~'), wildcard = '*%s' % application.track_extension, defaultFile = format_title(track).replace('\\', ',').replace('/', ','), style = wx.FD_SAVE|wx.FD_OVERWRITE_PROMPT)
  if dlg.ShowModal() == wx.ID_OK:
   new_path = dlg.GetPath()
  else:
   new_path = None
  dlg.Destroy()
  if new_path:
   try:
    if not new_path.endswith(application.track_extension):
     new_path += application.track_extension
    shcopy(library.get_path(track), new_path)
   except Exception as e:
    wx.MessageBox(str(e), 'Error')
 else:
  return wx.MessageBox('That track is not downloaded. Please check your library settings and try playing the track again.', 'File Not Downloaded')
def copy_to_genre_directories(cf):
    """
    Copy files from directory where they're not divided by genre
    to their respective genre directories, based on config.
    @param cf: a module with config values, at L{config.py}
    """
    for gr, fn2nbr in cf.filenames.items():
        assert gr in cf.genres or gr in cf.all_genre_keys
        for fn, _ in fn2nbr.items():
            infn = os.path.join(cf.bdir, fn)
            # is genre
            if gr in cf.genres:
                ffn = os.path.join(os.path.join(cf.bdir_by_genre, gr), fn)
            # is subgenre
            elif gr in cf.all_genre_keys:
                pargr = [ke for ke in cf.genres if gr in cf.genres[ke]]
                assert pargr
                assert len(pargr) == 1
                pargrdir = os.path.join(cf.bdir_by_genre, pargr[0])
                subgendir = os.path.join(pargrdir, gr)
                ffn = os.path.join(subgendir, fn)
            shcopy(infn, ffn)
Beispiel #17
0
def setup_emme_project_folders():
    emme_toolbox_path = os.path.join(os.environ['EMMEPATH'], 'toolboxes')
    #tod_dict = json.load(open(os.path.join('inputs', 'skim_params', 'time_of_day.json')))

    tod_dict = text_to_dictionary('time_of_day')
    tod_list = list(set(tod_dict.values()))

    if os.path.exists(os.path.join('projects')):
        print 'Delete Project Folder'
        shutil.rmtree('projects')

    # Create master project, associate with all tod emmebanks
    project = app.create_project('projects', master_project)
    desktop = app.start_dedicated(False, "cth", project)
    data_explorer = desktop.data_explorer()
    for tod in tod_list:
        database = data_explorer.add_database('Banks/' + tod + '/emmebank')
    #open the last database added so that there is an active one
    database.open()
    desktop.project.save()
    desktop.close()
    shcopy(emme_toolbox_path + '/standard.mtbx',
           os.path.join('projects', master_project))

    # Create time of day projects, associate with emmebank
    tod_list.append('TruckModel')
    tod_list.append('Supplementals')

    for tod in tod_list:
        project = app.create_project('projects', tod)
        desktop = app.start_dedicated(False, "cth", project)
        data_explorer = desktop.data_explorer()
        database = data_explorer.add_database('Banks/' + tod + '/emmebank')
        database.open()
        desktop.project.save()
        desktop.close()
        shcopy(emme_toolbox_path + '/standard.mtbx',
               os.path.join('projects', tod))
Beispiel #18
0
def run_truck_supplemental(iteration):
    ### RUN Truck Model ################################################################
    if run_truck_model:
        returncode = subprocess.call([sys.executable,'scripts/trucks/truck_model.py'])
        if returncode != 0:
            sys.exit(1)

    ### RUN Supplemental Trips
    ##########################################################
    ### Adds external, special generator, and group quarters trips to DaySim
    if run_supplemental_trips:
        # Only run generation script once - does not change with feedback
        if iteration == 0:
            returncode = subprocess.call([sys.executable,'scripts/supplemental/generation.py'])
            if returncode != 0:
                sys.exit(1)

        #run distribution
        returncode = subprocess.call([sys.executable,'scripts/supplemental/distribution.py'])
        if returncode != 0:
            sys.exit(1)

        #copy supplemental output
        shcopy('outputs/supplemental/supplemental_summary.csv', 'outputs/supplemental_summary_' + str(iteration) + '.csv')
Beispiel #19
0
def import_integrated_inputs():
    """
    Convert Urbansim input file into separate files:
    - parcels_urbansim.txt
    - hh_and_persons.h5
    """

    print "Importing land use files from urbansim"

    # Copy soundcast inputs and separate input files
    h5_inputs_dir = os.path.join(urbansim_outputs_dir,model_year,'soundcast_inputs.h5')
    shcopy(h5_inputs_dir,r'inputs/scenario/landuse/hh_and_persons.h5')

    h5_inputs = h5_inputs = h5py.File('inputs/scenario/landuse/hh_and_persons.h5')

    # Export parcels file as a txt file input
    parcels = pd.DataFrame()
    for col in h5_inputs['parcels'].keys():
        parcels[col] = h5_inputs['parcels'][col][:]
        
    parcels.to_csv(r'inputs/scenario/landuse/parcels_urbansim.txt', sep=' ', index=False)

    # Delete parcels group
    del h5_inputs['parcels']
Beispiel #20
0
 def __copy_file(self, path):
     color_path = path + self.__path_color
     codestyle_path = path + self.__path_codestyle
     keymap_path = path + self.__path_keymap
     if pathexists(color_path):
         shcopy(self.__file_color, color_path + self.__file_color)
     if pathexists(codestyle_path):
         shcopy(self.__file_codestyle, codestyle_path + self.__file_codestyle)
     if pathexists(keymap_path):
         shcopy(self.__file_keymap, keymap_path + self.__file_keymap)
Beispiel #21
0
""" This scripts generates figure 5 of the paper """
import subprocess
from os import chdir
from shutil import copy as shcopy

chdir("../MP_SPDZ_online")

cmd = "python3 ../run_benchmark.py ../Fig_5/a ../Fig_5/b ../Fig_5/c ../Fig_5/d -o --input-dir ../data/real_doa90/"

print("Running benchmarks, this will take a few hours...")

try:
    subprocess.run(cmd.split(), capture_output=True)
except:
    print("Something went wrong.")

print("Plotting results...")

cmd = "python3 ../plot_benchmark.py ../Fig_5/a ../Fig_5/b ../Fig_5/c ../Fig_5/d"

subprocess.run(cmd.split())

chdir("../Fig_5")

for bench in ["a", "b", "c", "d"]:
    shcopy(f"{bench}/bench_outputs/plot_{bench}.pdf", f"{bench}.pdf")
    print(f"Figure {bench}.pdf saved successfully.")
Beispiel #22
0
 def _setup_start_files(self) -> None:
     for file in _VANILLAJS_EDITOR_FILES:
         shcopy(os.path.join(self.__source, file), self.__target)
     for file in _VANILLAJS_START_FILES:
         shcopy(os.path.join(self.__source, file), self.__target)
Beispiel #23
0
def copy_large_inputs():
    print 'Copying large inputs...' 
    shcopy(base_inputs+'/etc/daysim_outputs_seed_trips.h5','Inputs')
    dir_util.copy_tree(base_inputs+'/networks','Inputs/networks')
    dir_util.copy_tree(base_inputs+'/trucks','Inputs/trucks')
    dir_util.copy_tree(base_inputs+'/tolls','Inputs/tolls')
    dir_util.copy_tree(base_inputs+'/Fares','Inputs/Fares')
    dir_util.copy_tree(base_inputs+'/bikes','Inputs/bikes')
    dir_util.copy_tree(base_inputs+'/supplemental/distribution','inputs/supplemental/distribution')
    dir_util.copy_tree(base_inputs+'/supplemental/generation','inputs/supplemental/generation')
    dir_util.copy_tree(base_inputs+'/supplemental/trips','outputs/supplemental')
    dir_util.copy_tree(base_inputs+'/corridors','Inputs/corridors')
    shcopy(base_inputs+'/landuse/hh_and_persons.h5','Inputs')
    shcopy(base_inputs+'/etc/survey.h5','scripts/summarize')
    shcopy(base_inputs+'/4k/auto.h5','Inputs/4k')
    shcopy(base_inputs+'/4k/transit.h5','Inputs/4k')
    # node to node short distance files:
    shcopy(base_inputs+'/short_distance_files/node_index_2014.txt', 'Inputs')
    shcopy(base_inputs+'/short_distance_files/node_to_node_distance_2014.h5', 'Inputs')
    shcopy(base_inputs+'/short_distance_files/parcel_nodes_2014.txt', 'Inputs')
Beispiel #24
0
def copy_accessibility_files():
    if not os.path.exists('inputs/accessibility'):
        os.makedirs('inputs/accessibility')

    print 'Copying UrbanSim parcel file'
    try:
        shcopy(scenario_inputs + '/landuse/parcels_urbansim.txt',
               'inputs/accessibility')
    except:
        print 'error copying urbansim parcel file at ' + scenario_inputs + '/landuse/parcels_urbansim.txt'
        sys.exit(1)

    print 'Copying Transit stop file'
    try:
        shcopy(
            scenario_inputs + '/landuse/transit_stops_' + scenario_name +
            '.csv', 'inputs/accessibility')
    except:
        print 'error copying transit stops file at ' + scenario_inputs + '/landuse/transit_stops_' + scenario_name + '.csv'
        sys.exit(1)

    print 'Copying Military parcel file'
    try:
        shcopy(scenario_inputs + '/landuse/parcels_military.csv',
               'inputs/accessibility')
    except:
        print 'error copying military parcel file at ' + scenario_inputs + '/landuse/parcels_military.csv'
        sys.exit(1)

    print 'Copying JBLM file'
    try:
        shcopy(scenario_inputs + '/landuse/distribute_jblm_jobs.csv',
               'Inputs/accessibility')
    except:
        print 'error copying military parcel file at ' + scenario_inputs + '/landuse/distribute_jblm_jobs.csv'
        sys.exit(1)

    print 'Copying Hourly and Daily Parking Files'
    if base_year != model_year:
        try:
            shcopy(scenario_inputs + '/landuse/hourly_parking_costs.csv',
                   'Inputs/accessibility')
            shcopy(scenario_inputs + '/landuse/daily_parking_costs.csv',
                   'Inputs/accessibility')
        except:
            print 'error copying parking file at' + scenario_inputs + '/landuse/' + ' either hourly or daily parking costs'
            sys.exit(1)
Beispiel #25
0
    def __init__(self, wrdll, datasource, vhffreq, curtabnum, starttime,
                 istriggered, firstpointtime, fftwindow, minfftratio,
                 minsiglev, triggerfftratio, triggersiglev, tcoeff, zcoeff,
                 flims, slash, tempdir, *args, **kwargs):
        super(ThreadProcessor, self).__init__()

        #prevents Run() method from starting before init is finished (value must be changed to 100 at end of __init__)
        self.startthread = 0

        # UI inputs
        self.curtabnum = curtabnum
        self.starttime = starttime
        self.istriggered = istriggered
        self.firstpointtime = firstpointtime

        self.keepgoing = True  # signal connections
        self.waittoterminate = False  #whether to pause on termination of run loop for kill process to complete
        self.signals = ThreadProcessorSignals()

        #FFT thresholds
        self.fftwindow = fftwindow
        self.minfftratio = minfftratio
        self.minsiglev = minsiglev
        self.triggerfftratio = triggerfftratio
        self.triggersiglev = triggersiglev

        #conversion coefficients + parameters
        self.tcoeff = tcoeff
        self.zcoeff = zcoeff
        self.flims = flims

        #output file names
        self.txtfilename = tempdir + slash + "sigdata_" + str(
            self.curtabnum) + '.txt'
        self.txtfile = open(self.txtfilename, 'w')
        self.wavfilename = tempdir + slash + "tempwav_" + str(
            self.curtabnum) + '.WAV'

        #to prevent ARES from consuming all computer's resources- this limits the size of WAV files used by the signal processor to a number of PCM datapoints corresponding to 1 hour of audio @ fs=64 kHz, that would produce a wav file of ~0.5 GB for 16-bit PCM data
        self.maxsavedframes = 2.5E8
        self.isrecordingaudio = True  #initialized to True for all cases (RF, test, and audio) but only matters in the callback function assigned for RF receivers

        # identifying whether tab is audio, test, or other format
        self.isfromaudio = False
        self.isfromtest = False

        if datasource[:5] == 'Audio':
            self.chselect = int(datasource[5:10])
            self.audiofile = datasource[10:]
            self.isfromaudio = True

            #checking file length- wont process files with more frames than max size
            try:  #exception if unable to read audio file if it doesn't exist or isn't WAV formatted
                file_info = wave.open(self.audiofile)
            except:
                self.startthread = 11
                return

            if file_info.getnframes() > self.maxsavedframes:
                self.startthread = 9
                return

            self.f_s, snd = wavfile.read(self.audiofile)  #reading file

            #if multiple channels, sum them together
            sndshape = np.shape(snd)  #array size (tuple)
            ndims = len(sndshape)  #number of dimensions
            if ndims == 1:  #if one channel, use that
                self.audiostream = snd
            elif ndims == 2:  #if two channels, pick selected channel, otherwise sum
                if self.chselect >= 1:
                    self.audiostream = snd[:, self.chselect - 1]
                else:
                    self.audiostream = np.sum(snd, axis=1)

            else:  #if more than 2D- not a valid file
                self.audiostream = [0] * 10000
                self.startthread = 11

        elif datasource == 'Test':  #test run- use included audio file
            self.audiofile = 'testdata/MZ000006.WAV'
            self.isfromtest = True

            try:  #exception if unable to read audio file if it doesn't exist or isn't WAV formatted
                self.f_s, snd = wavfile.read(self.audiofile)
            except:
                self.startthread = 11
                return

            self.audiostream = snd[:, 0]

        #if thread is to be connected to a WiNRADIO
        if not self.isfromaudio and not self.isfromtest:

            #initializing variables to check if WiNRADIO remains connected
            self.disconnectcount = 0
            self.numcontacts = 0
            self.lastcontacts = 0
            self.nframes = 0

            # initialize audio stream data variables
            self.f_s = 64000  # default value
            self.audiostream = [
                0
            ] * 2 * self.f_s  #initializes the buffer with 2 seconds of zeros

            # saves WiNRADIO DLL/API library
            self.wrdll = wrdll

            # initialize winradio
            self.serial = datasource  # translate winradio identifier
            self.serialnum_2WR = c_char_p(self.serial.encode('utf-8'))

            #setup WAV file to write (if audio or test, source file is copied instead)
            self.wavfile = wave.open(self.wavfilename, 'wb')
            wave.Wave_write.setnchannels(self.wavfile, 1)
            wave.Wave_write.setsampwidth(self.wavfile, 2)
            wave.Wave_write.setframerate(self.wavfile, self.f_s)
            wave.Wave_write.writeframes(self.wavfile,
                                        bytearray(self.audiostream))

            #opening current WiNRADIO/establishing contact
            self.hradio = self.wrdll.Open(self.serialnum_2WR)
            if self.hradio == 0:
                self.startthread = 1
                return

            try:
                # power on- kill if failed
                if wrdll.SetPower(self.hradio, True) == 0:
                    self.startthread = 2
                    return

                # initialize demodulator- kill if failed
                if wrdll.InitializeDemodulator(self.hradio) == 0:
                    self.startthread = 3
                    return

                # change frequency- kill if failed
                self.vhffreq_2WR = c_ulong(int(vhffreq * 1E6))
                if self.wrdll.SetFrequency(self.hradio, self.vhffreq_2WR) == 0:
                    self.startthread = 4
                    return

                # set volume- warn if failed
                if self.wrdll.SetVolume(self.hradio, 31) == 0:
                    self.startthread = 5
                    return

            except Exception:  #if any WiNRADIO comms/initialization attempts failed, terminate thread
                trace_error()
                self.startthread = 6
                return
        else:
            shcopy(self.audiofile, self.wavfilename
                   )  #copying audio file if datasource = Test or Audio

        self.startthread = 100
def final_validate(model, dataloader, criterion, device, dataset, outd,
                   log_file=None, name_set=""):
    """
    Perform a validation over the validation set. Assumes a batch size of 1.
    (images do not have the same size, so we can't stack them in one tensor).
    Validation samples may be large to fit all in the GPU at once.

    :param outd: str, output directory of this dataset.
    :param name_set: str, name to indicate which set is being processed. e.g.:
    trainset, validset, testset.
    """
    visualisor = VisualisePP(floating=4, height_tag=60)
    outd_data = join(outd, "prediction")
    if not os.path.exists(outd_data):
        os.makedirs(outd_data)

    # Deal with overloaded quota of files on servers: use the node disc.
    FOLDER = ""
    if "CC_CLUSTER" in os.environ.keys():
        FOLDER = join(os.environ["SLURM_TMPDIR"], "prediction")
        if not os.path.exists(FOLDER):
            os.makedirs(FOLDER)
    model.eval()
    metrics = Metrics().to(device)

    length = len(dataloader)
    num_classes = len(list(dataset.name_classes.keys()))
    # acc, mae, soi_y, soi_py, loss
    tracker = np.zeros((length, 5 + num_classes), dtype=np.float32)

    t0 = dt.datetime.now()

    with torch.no_grad():
        for i, (data, mask, label) in tqdm.tqdm(
                enumerate(dataloader), ncols=80, total=length):

            reset_seed(int(os.environ["MYSEED"]))

            msg = "Expected a batch size of 1. Found `{}`  .... " \
                  "[NOT OK]".format(data.size()[0])
            assert data.size()[0] == 1, msg

            data = data.to(device)
            labels = label.to(device)

            # In validation, we do not need reproducibility since everything
            # is expected to be deterministic. Plus,
            # we use only one gpu since the batch size os 1.
            scores, _ = model(x=data, seed=None)
            loss = criterion(scores, labels)
            batch_metrics = metrics(
                scores=scores, labels=labels, tr_loss=criterion,
                avg=False).cpu().numpy()

            tracker[i, 4] = loss.item()
            tracker[i, :4] = batch_metrics
            tracker[i, 5:] = softmax(scores.cpu().detach().numpy())

            basef = basename(dataset.get_path_input_img(i))
            img_out = visualisor(
                input_img=dataset.get_original_input_img(i),
                stats=[tracker[i, :]],
                label=dataset.get_original_input_label_int(i),
                name_classes=dataset.name_classes,
                loss_name=[criterion.literal],
                name_file=basef
            )
            fdout = FOLDER if FOLDER else outd_data
            img_out.save(
                join(fdout, "{}.jpeg".format(basef.split('.')[0])), "JPEG")

    # overlap distributions.
    # vis_over_dis = VisualiseOverlDist()
    # vis_over_dis(tracker[:, 5:], dataset.name_classes, outd)

    # compress, then delete files to prevent overloading the disc quota of
    # number of files.
    source = FOLDER if FOLDER else outd_data
    ex = 'zip'
    try:
        cmd_compress = 'zip -rjq {}.zip {}'.format(source, source)
        print("Run: `{}`".format(cmd_compress))
        # os.system(cmd_compress)
        subprocess.run(cmd_compress, shell=True, check=True)
    except subprocess.CalledProcessError:
        cmd_compress = 'tar -zcf {}.tar.gz -C {} .'.format(source, source)
        print("Run: `{}`".format(cmd_compress))
        # os.system(cmd_compress)
        subprocess.run(cmd_compress, shell=True, check=True)
        ex = 'tar.gz'

    cmd_del = 'rm -r {}'.format(outd_data)
    print("Run: `{}`".format(cmd_del))
    os.system(cmd_del)
    if FOLDER:
        shcopy("{}.{}".format(FOLDER, ex), outd)

    tmp = tracker.mean(axis=0)

    t_lb = 0.
    if hasattr(criterion.lossCT, "t_lb"):
        t_lb = criterion.lossCT.t_lb.item()  # assume gpu.

    to_write = "EVAL.FINAL {}: ACC: {:.4f}, MAE: {:.4f}, SOI_Y: {:.4f}, " \
               "SOI_PY: {:.4f}, Loss: {:.4f}, t:{:.4f}, time:{}".format(
                name_set, tmp[0], tmp[1], tmp[2], tmp[3], tmp[4], t_lb,
                dt.datetime.now() - t0)
    to_write = "{} \n{} \n{}".format(10 * "=", to_write, 10 * "=")
    print(to_write)
    if log_file:
        log(log_file, to_write)

    # store the stats in pickle.
    with open(join(outd, 'tracker-{}.pkl'.format(name_set)), 'wb') as fout:
        pkl.dump(tracker, fout, protocol=pkl.HIGHEST_PROTOCOL)
Beispiel #27
0
def copy_large_inputs():
    print 'Copying large inputs...' 
    shcopy(base_inputs+'/etc/daysim_outputs_seed_trips.h5','Inputs')
    dir_util.copy_tree(base_inputs+'/networks','Inputs/networks')
    dir_util.copy_tree(base_inputs+'/trucks','Inputs/trucks')
    dir_util.copy_tree(base_inputs+'/tolls','Inputs/tolls')
    dir_util.copy_tree(base_inputs+'/Fares','Inputs/Fares')
    dir_util.copy_tree(base_inputs+'/bikes','Inputs/bikes')
    dir_util.copy_tree(base_inputs+'/supplemental/distribution','inputs/supplemental/distribution')
    dir_util.copy_tree(base_inputs+'/supplemental/generation','inputs/supplemental/generation')
    dir_util.copy_tree(base_inputs+'supplemental/parameters','inputs/supplemental/parameters')
    dir_util.copy_tree(base_inputs+'supplemental/input','inputs/supplemental/input')
    dir_util.copy_tree(base_inputs+'/supplemental/trips','outputs/supplemental')
    dir_util.copy_tree(base_inputs+'/corridors','Inputs/corridors')
    shcopy(base_inputs+'/landuse/hh_and_persons.h5','Inputs')
    shcopy(base_inputs+'/etc/survey.h5','scripts/summarize')
    shcopy(base_inputs+'/etc/survey.h5','scripts/summarize/inputs/calibration')
    shcopy(base_inputs+'/4k/auto.h5','Inputs/4k')
    shcopy(base_inputs+'/4k/transit.h5','Inputs/4k')
    # node to node short distance files:
    shcopy(base_inputs+'/short_distance_files/node_index_2014.txt', 'Inputs')
    shcopy(base_inputs+'/short_distance_files/node_to_node_distance_2014.h5', 'Inputs')
    shcopy(base_inputs+'/short_distance_files/parcel_nodes_2014.txt', 'Inputs')
Beispiel #28
0
 def _setup_start_files(self) -> None:
     for file in _TYPESCRIPT_EDITOR_FILES:
         shcopy(os.path.join(self.__source, file), self.__target)
     shcopy(os.path.join(self.__source, _COMMON_IGNORE_FILE), self.__target)
     shcopy(os.path.join(self.__source, _TYPESCRIPT_TEMPLATE_MAIN_FILE),
            os.path.join(self.__target, _TYPESCRIPT_SOURCE_DIR))
Beispiel #29
0
def session(**kwargs):
    """
    Create the files for a new game session.

    Finds the plot and session log files for the last session, copies the plot,
    and creates a new empty session log.

    Args:
        prefs (Settings): Settings object to use. Uses internal settings by
            default.

    Returns:
        Result object. Openable will contain the current and previous session
        log and plot planning files.
    """
    prefs = kwargs.get('prefs', settings.InternalSettings())
    plot_path = prefs.get('paths.plot')
    session_path = prefs.get('paths.session')

    if not (path.exists(plot_path) and path.exists(session_path)):
        return Result(False, errmsg="Cannot access paths '{}' and/or '{}'".format(plot_path, session_path), errcode=4)

    plot_re = re.compile(r'(?i)^plot (\d+)$')
    session_re = re.compile(r'(?i)^session (\d+)$')

    # find latest plot file and its number
    plot_files = [f.name for f in scandir(plot_path) if f.is_file() and plot_re.match(path.splitext(f.name)[0])]
    try:
        latest_plot = max(plot_files, key=lambda plot_files: re.split(r"\s", plot_files)[1])
        (latest_plot_name, latest_plot_ext) = path.splitext(latest_plot)
        plot_match = plot_re.match(latest_plot_name)
        plot_number = int(plot_match.group(1))
    except ValueError:
        plot_number = 0

    # find latest session log and its number
    session_files = [f.name for f in scandir(session_path) if f.is_file() and session_re.match(path.splitext(f.name)[0])]
    try:
        latest_session = max(session_files, key=lambda session_files: re.split(r"\s", session_files)[1])
        (latest_session_name, latest_session_ext) = path.splitext(latest_session)
        session_match = session_re.match(latest_session_name)
        session_number = int(session_match.group(1))
    except ValueError:
        session_number = 0

    new_number = min(plot_number, session_number) + 1

    openable = []
    if session_number:
        if session_number < new_number:
            # create new session log
            old_session_path = path.join(session_path, latest_session)
            new_session_path = path.join(session_path, ("session %i" % new_number) + latest_session_ext)
            shcopy(prefs.get('templates.session'), new_session_path)
        else:
            # present existing plot files, since we don't have to create one
            old_session_path = path.join(session_path, ("session %i" % (session_number - 1)) + latest_session_ext)
            new_session_path = path.join(session_path, latest_session)
        openable.extend((new_session_path, old_session_path))
    else:
        # no old session
        new_session_path = path.join(session_path, ("session %i.md" % new_number))
        shcopy(prefs.get('templates.session'), new_session_path)
        openable.append(new_session_path)

    if plot_number:
        if plot_number < new_number:
            # copy old plot
            old_plot_path = path.join(plot_path, latest_plot)
            new_plot_path = path.join(plot_path, ("plot %i" % new_number) + latest_plot_ext)
            shcopy(old_plot_path, new_plot_path)
        else:
            # present existing sessions files, since we don't have to create one
            old_plot_path = path.join(plot_path, ("plot %i" % (plot_number - 1)) + latest_plot_ext)
            new_plot_path = path.join(plot_path, latest_plot)
        openable.extend((new_plot_path, old_plot_path))
    else:
        # no old plot to copy, so use a blank
        new_plot_path = path.join(plot_path, ("plot %i.md" % new_number))
        with open(new_plot_path, 'w') as new_plot:
            new_plot.write(' ')
        openable.append(new_plot_path)

    return Result(True, openable=openable)
Beispiel #30
0
def session(**kwargs):
    """
    Create the files for a new game session.

    Finds the plot and session log files for the last session, copies the plot,
    and creates a new empty session log. If the latest plot file is ahead of
    the latest session, a new plot file will *not* be created. Likewise if the
    latest session file is ahead, a new session file will *not* be created.

    Args:
        prefs (Settings): Settings object to use. Uses internal settings by
            default.

    Returns:
        Result object. Openable will contain the current and previous session
        log and plot planning files.
    """
    prefs = kwargs.get('prefs', settings.InternalSettings())
    plot_path = prefs.get('paths.required.plot')
    session_path = prefs.get('paths.required.session')

    if not (path.exists(plot_path) and path.exists(session_path)):
        return result.FSError(
            errmsg="Cannot access paths '{}' and/or '{}'".format(
                plot_path, session_path))

    latest_plot = latest_file_info(plot_path, PLOT_REGEX)
    latest_session = latest_file_info(session_path, SESSION_REGEX)

    new_number = min(latest_plot['number'], latest_session['number']) + 1

    openable = []
    if latest_session['exists']:
        if latest_session['number'] < new_number:
            # create new session log
            old_session_path = latest_session['path']
            new_session_path = path.join(
                session_path,
                "session {num}{ext}".format(num=new_number,
                                            ext=latest_session['ext']))
            shcopy(prefs.get('story.session_template'), new_session_path)
        else:
            # present existing session files, since we don't have to create one
            old_session_path = path.join(
                session_path,
                "session {num}{ext}".format(num=latest_session['number'] - 1,
                                            ext=latest_session['ext']))
            new_session_path = latest_session['path']
        openable.extend((new_session_path, old_session_path))
    else:
        # no existing session, so just copy the template
        template_path = prefs.get('story.session_template')
        new_session_path = path.join(
            session_path,
            "session {num}{ext}".format(num=new_number,
                                        ext=path.splitext(template_path)[1]))
        shcopy(template_path, new_session_path)
        openable.append(new_session_path)

    if latest_plot['exists']:
        if latest_plot['number'] < new_number:
            # copy old plot
            old_plot_path = latest_plot['path']
            new_plot_path = path.join(
                plot_path, "plot {num}{ext}".format(num=new_number,
                                                    ext=latest_plot['ext']))
            shcopy(old_plot_path, new_plot_path)
        else:
            # present existing plot files, since we don't have to create one
            old_plot_path = path.join(
                plot_path,
                "plot {num}{ext}".format(num=latest_plot['number'] - 1,
                                         ext=latest_plot['ext']))
            new_plot_path = latest_plot['path']
        openable.extend((new_plot_path, old_plot_path))
    else:
        # no old plot to copy, so create a blank
        new_plot_path = path.join(
            plot_path,
            "plot {num}{ext}".format(num=new_number,
                                     ext=prefs.get('story.plot_ext')))
        with open(new_plot_path, 'w') as new_plot:
            new_plot.write(' ')
        openable.append(new_plot_path)

    return result.Success(openable=openable)
Beispiel #31
0
        emmebank = _eb.create(bank_path, emmebank_dimensions_dict)
        emmebank.title = config['emmebank_title']
        emmebank.unit_of_length = 'mi'
        emmebank.coord_unit_length = 0.0001894
        scenario = emmebank.create_scenario(999)
        # project
        project = app.create_project(emme_folder, 'emme_networks')
        desktop = app.start_dedicated(False, "SEC", project)
        data_explorer = desktop.data_explorer()
        database = data_explorer.add_database(bank_path)
        #open the database added so that there is an active one
        database.open()
        desktop.project.save()
        desktop.close()
        emme_toolbox_path = os.path.join(os.environ['EMMEPATH'], 'toolboxes')
        shcopy(emme_toolbox_path + '/standard.mtbx',
               emme_folder + '\\emme_networks')
        my_project = EmmeProject(emme_folder + '\\emme_networks' +
                                 '\\emme_networks.emp')
        os.path.join(emme_folder + 'emme_networks')

        build_file_folder = os.path.join(config['output_dir'],
                                         config['emme_folder_name'],
                                         'build_files')
        if os.path.exists(build_file_folder):
            shutil.rmtree(build_file_folder)
        os.makedirs(build_file_folder)
        os.makedirs(os.path.join(build_file_folder, 'roadway'))
        os.makedirs(os.path.join(build_file_folder, 'transit'))
        os.makedirs(os.path.join(build_file_folder, 'turns'))
        os.makedirs(os.path.join(build_file_folder, 'shape'))
        os.makedirs(os.path.join(build_file_folder, 'extra_attributes'))
Beispiel #32
0
 def write(self, path: GenPath, ctx: GenContext):
     path.parent.mkdir()
     shcopy(str(self.source), str(path.absolute()))
Beispiel #33
0
def copy_accessibility_files():
    if not os.path.exists('inputs/accessibility'):
        os.makedirs('inputs/accessibility')
    
    print 'Copying UrbanSim parcel file'
    try:
        if os.path.isfile(base_inputs+'/landuse/parcels_urbansim.txt'):
            shcopy(base_inputs+'/landuse/parcels_urbansim.txt','inputs/accessibility')
        # the file may need to be reformatted- like this coming right out of urbansim
        elif os.path.isfile(base_inputs+'/landuse/parcels.dat'):
            print 'the file is ' + base_inputs +'/landuse/parcels.dat'
            print "Parcels file is being reformatted to Daysim format"
            parcels = pd.DataFrame.from_csv(base_inputs+'/landuse/parcels.dat',sep=" " )
            print 'Read in unformatted parcels file'
            for col in parcels.columns:
                print col
                new_col = [x.upper() for x in col]
                new_col = ''.join(new_col)
                parcels=parcels.rename(columns = {col:new_col})
                print new_col
            parcels.to_csv(base_inputs+'/landuse/parcels_urbansim.txt', sep = " ")
            shcopy(base_inputs+'/landuse/parcels_urbansim.txt','inputs/accesibility')

    except Exception as ex:
        template = "An exception of type {0} occured. Arguments:\n{1!r}"
        message = template.format(type(ex).__name__, ex.args)
        print message
        sys.exit(1)


    print 'Copying Military parcel file'
    try:
        shcopy(base_inputs+'/landuse/parcels_military.csv','inputs/accessibility')
    except:
        print 'error copying military parcel file at ' + base_inputs+'/landuse/parcels_military.csv'
        sys.exit(1)

    try:
        shcopy(base_inputs+'/landuse/distribute_jblm_jobs.csv','Inputs/accessibility')
    except:
        print 'error copying military parcel file at ' + base_inputs+'/landuse/parcels_military.csv'
        sys.exit(1)


    print 'Copying Hourly and Daily Parking Files'
    if run_update_parking: 
        try:
            shcopy(base_inputs+'/landuse/hourly_parking_costs.csv','Inputs/accessibility')
            shcopy(base_inputs+'/landuse/daily_parking_costs.csv','Inputs/accessibility')
        except:
            print 'error copying parking file at' + base_inputs+'/landuse/' + ' either hourly or daily parking costs'
            sys.exit(1)
Beispiel #34
0
 def __create_json(self) -> None:
     shcopy(os.path.join(self.__source, _TYPESCRIPT_PROJECT_JSON),
            self.__target)
Beispiel #35
0
def main():
## SET UP INPUTS ##########################################################

    if run_accessibility_calcs:
        accessibility_calcs()

    if run_accessibility_summary:
        subprocess.call([sys.executable, 'scripts/summarize/standard/parcel_summary.py'])

    if not os.path.exists('outputs'):
        os.makedirs('outputs')

    if run_copy_daysim_code:
        copy_daysim_code()

    if run_setup_emme_bank_folders:
        setup_emme_bank_folders()

    if run_setup_emme_project_folders:
        setup_emme_project_folders()

    if run_copy_large_inputs:
        copy_large_inputs()

    if  run_convert_hhinc_2000_2010:
        subprocess.call([sys.executable, 'scripts/utils/convert_hhinc_2000_2010.py'])

### IMPORT NETWORKS
### ###############################################################
    if run_import_networks:
        time_copy = datetime.datetime.now()
        logger.info("Start of network importer")
        returncode = subprocess.call([sys.executable,
        'scripts/network/network_importer.py', base_inputs])
        logger.info("End of network importer")
        time_network = datetime.datetime.now()
        if returncode != 0:
           sys.exit(1)

### BUILD OR COPY SKIMS ###############################################################
    if run_skims_and_paths_seed_trips:
        build_seed_skims(10)
        returncode = subprocess.call([sys.executable,'scripts/bikes/bike_model.py'])
        if returncode != 0:
            sys.exit(1)
    elif run_skims_and_paths_free_flow:
        build_free_flow_skims(10)
        returncode = subprocess.call([sys.executable,'scripts/bikes/bike_model.py'])
        if returncode != 0:
            sys.exit(1)
    # either you build seed skims or you copy them, or neither, but it wouldn't make sense to do both
    elif run_copy_seed_skims:
        copy_seed_skims()
    # Check all inputs have been created or copied
    check_inputs()
    
### RUN DAYSIM AND ASSIGNMENT TO CONVERGENCE-- MAIN LOOP
### ##########################################
    
    if(run_daysim or run_skims_and_paths or run_skims_and_paths_seed_trips):
        
        for iteration in range(len(pop_sample)):
            print "We're on iteration %d" % (iteration)
            logger.info(("We're on iteration %d\r\n" % (iteration)))
            time_start = datetime.datetime.now()
            logger.info("starting run %s" % str((time_start)))

            # Copy shadow pricing? Need to know what the sample size of the previous iteration was:
            if not should_build_shadow_price:
                print 'here'
                if iteration == 0 or pop_sample[iteration-1] > 2:
                    print 'here'
                    try:
                                                        
                            if not os.path.exists('working'):
                                os.makedirs('working')
                            shcopy(base_inputs+'/shadow_pricing/shadow_prices.txt','working/shadow_prices.txt')
                            print "copying shadow prices" 
                    except:
                            print ' error copying shadow pricing file from shadow_pricing at ' + base_inputs+'/shadow_pricing/shadow_prices.txt'
                            sys.exit(1)
                # Set up your Daysim Configration
                modify_config([("$SHADOW_PRICE" ,"true"),("$SAMPLE",pop_sample[iteration]),("$RUN_ALL", "true")])

            else:
                # We are building shadow prices from scratch, only use shadow pricing if pop sample is 2 or less
                if pop_sample[iteration-1] > 2:
                    modify_config([("$SHADOW_PRICE" ,"false"),("$SAMPLE",pop_sample[iteration]),("$RUN_ALL", "true")])
                else:
                    modify_config([("$SHADOW_PRICE" ,"true"),("$SAMPLE",pop_sample[iteration]),("$RUN_ALL", "true")])
            ## Run Skimming and/or Daysim

            daysim_assignment(iteration)
           
            converge=check_convergence(iteration, pop_sample[iteration])
            if converge == 'stop':
                print "System converged!"
                break
            print 'The system is not yet converged. Daysim and Assignment will be re-run.'

# IF BUILDING SHADOW PRICES, UPDATING WORK AND SCHOOL SHADOW PRICES USING CONVERGED SKIMS FROM CURRENT RUN, THEN DAYSIM + ASSIGNMENT ############################
    if should_build_shadow_price:
           build_shadow_only()
           modify_config([("$SHADOW_PRICE" ,"true"),("$SAMPLE","1"), ("$RUN_ALL", "true")])
           #This function needs an iteration parameter. Value of 1 is fine. 
           daysim_assignment(1)

### SUMMARIZE
### ##################################################################
    run_all_summaries()

#### ALL DONE
#### ##################################################################
    clean_up()
    print '###### OH HAPPY DAY!  ALL DONE. GO GET A ' + random.choice(good_thing)
Beispiel #36
0
def copy_shadow_price_file():
    print 'Copying shadow price file.'
    if not os.path.exists('working'):
        os.makedirs('working')
    shcopy(base_inputs + '/shadow_prices/shadow_prices.txt', 'working')
Beispiel #37
0
def main():

## SET UP INPUTS ##########################################################
    if run_accessibility_calcs:
        accessibility_calcs()

    if run_accessibility_summary:
        subprocess.call([sys.executable, 'scripts/summarize/standard/parcel_summary.py'])

    if not os.path.exists('outputs'):
        os.makedirs('outputs')

    if run_copy_seed_supplemental_trips:
        copy_seed_supplemental_trips()

    if run_copy_daysim_code:
        copy_daysim_code()

    if run_setup_emme_bank_folders:
        setup_emme_bank_folders()

    if run_setup_emme_project_folders:
        setup_emme_project_folders()

### IMPORT NETWORKS ###############################################################
    if run_import_networks:
        time_copy = datetime.datetime.now()
        logger.info("Start of network importer")
        returncode = subprocess.call([sys.executable,
        'scripts/network/network_importer.py', base_inputs])
        logger.info("End of network importer")
        time_network = datetime.datetime.now()
        if returncode != 0:
           sys.exit(1)

### BUILD OR COPY SKIMS ###############################################################
    if run_skims_and_paths_seed_trips:
        build_seed_skims(10)
        returncode = subprocess.call([sys.executable,'scripts/bikes/bike_model.py'])
        if returncode != 0:
            sys.exit(1)

    # either you build seed skims or you copy them, or neither, but it wouldn't make sense to do both
    elif run_copy_seed_skims:
        copy_seed_skims()

    # Check all inputs have been created or copied
    check_inputs()


### RUN DAYSIM AND ASSIGNMENT TO CONVERGENCE-- MAIN LOOP ##########################################
    
    if(run_daysim or run_skims_and_paths or run_skims_and_paths_seed_trips):
        #run daysim popsampler
        if run_daysim_popsampler:
            daysim_popsampler(sampling_option)
       
        for iteration in range(len(pop_sample)):
            print "We're on iteration %d" % (iteration)
            logger.info(("We're on iteration %d\r\n" % (iteration)))
            time_start = datetime.datetime.now()
            logger.info("starting run %s" % str((time_start)))

            # Copy shadow pricing?
            if not should_build_shadow_price:
                if iteration == 0 or pop_sample[iteration-1] > 2:
                    try:                                
                        if not os.path.exists('working'):
                            os.makedirs('working')
                        shcopy(base_inputs+'/shadow_pricing/shadow_prices.txt','working/shadow_prices.txt')
                        print "copying shadow prices" 
                    except:
                        print ' error copying shadow pricing file from shadow_pricing at ' + base_inputs+'/shadow_pricing/shadow_prices.txt'
                        sys.exit(1)

                # Set up your Daysim Configration
                modify_config([("$SHADOW_PRICE" ,"true"),("$SAMPLE",pop_sample[iteration]),("$RUN_ALL", "true")])

            else:
                # IF BUILDING SHADOW PRICES, UPDATING WORK AND SCHOOL SHADOW PRICES
                # 3 daysim iterations
                build_shadow_only(iteration)             

                # run daysim and assignment
                if pop_sample[iteration-1] > 2:
                    modify_config([("$SHADOW_PRICE" ,"false"),("$SAMPLE",pop_sample[iteration]),("$RUN_ALL", "true")])
                else:
                    modify_config([("$SHADOW_PRICE" ,"true"),("$SAMPLE",pop_sample[iteration]),("$RUN_ALL", "true")])
            
            ## Run Skimming and/or Daysim
            daysim_assignment(iteration)
           
            converge=check_convergence(iteration, pop_sample[iteration])
            if converge == 'stop':
                print "System converged!"
                break

            print 'The system is not yet converged. Daysim and Assignment will be re-run.'

### SUMMARIZE
### ##################################################################
    run_all_summaries()

#### ALL DONE
#### ##################################################################
    clean_up()

    print '###### OH HAPPY DAY!  ALL DONE. GO GET A ' + random.choice(good_thing)
 def copyMainFile(self):
     '''
 Copy mainfile as a base for output file
 '''
     self.outName = self.mainfile[0:-3] + 'u.nc'
     shcopy(self.mainfile, self.outName)
Beispiel #39
0
 def __create_json(self) -> None:
     shcopy(os.path.join(self.__source, _VANILLAJS_PROJECT_JSON),
            self.__target)
Beispiel #40
0
def copy_accessibility_files():
    if not os.path.exists('inputs/accessibility'):
        os.makedirs('inputs/accessibility')
    
    print 'Copying UrbanSim parcel file'
    try:
        if os.path.isfile(os.path.join(parcels_file_folder,parcels_file_name)):
            shcopy(os.path.join(parcels_file_folder,parcels_file_name),'inputs/accessibility')
        # the file may need to be reformatted- like this coming right out of urbansim
        elif os.path.isfile(os.path.join(parcels_file_folder,'parcels.dat')):
            print 'the file is ' + os.path.join(parcels_file_folder,'parcels.dat')
            print "Parcels file is being reformatted to Daysim format"
            parcels = pd.DataFrame.from_csv(os.path.join(parcels_file_folder,'parcels.dat'),sep=" " )
            print 'Read in unformatted parcels file'
            for col in parcels.columns:
                print col
                new_col = [x.upper() for x in col]
                new_col = ''.join(new_col)
                parcels=parcels.rename(columns = {col:new_col})
                print new_col
            parcels.to_csv(os.path.join(parcels_file_folder,parcels_file_name), sep = " ")
            shcopy(os.path.join(parcels_file_folder,parcels_file_name),'inputs/accesibility')

    except Exception as ex:
        template = "An exception of type {0} occured. Arguments:\n{1!r}"
        message = template.format(type(ex).__name__, ex.args)
        print message
        sys.exit(1)


    print 'Copying Military parcel file'
    try:
        shcopy(base_inputs+'/landuse/parcels_military.csv','inputs/accessibility')
    except:
        print 'error copying military parcel file at ' + base_inputs+'/landuse/parcels_military.csv'
        sys.exit(1)

    try:
        shcopy(base_inputs+'/landuse/distribute_jblm_jobs.csv','Inputs/accessibility')
    except:
        print 'error copying military parcel file at ' + base_inputs+'/landuse/parcels_military.csv'
        sys.exit(1)


    print 'Copying Hourly and Daily Parking Files'
    if run_update_parking: 
        try:
            shcopy(base_inputs+'/landuse/hourly_parking_costs.csv','Inputs/accessibility')
            shcopy(base_inputs+'/landuse/daily_parking_costs.csv','Inputs/accessibility')
        except:
            print 'error copying parking file at' + base_inputs+'/landuse/' + ' either hourly or daily parking costs'
            sys.exit(1)
Beispiel #41
0
def copy_large_inputs():
    print 'Copying large inputs...'
    if run_skims_and_paths_seed_trips:
        shcopy(scenario_inputs + '/etc/daysim_outputs_seed_trips.h5', 'Inputs')
    dir_util.copy_tree(scenario_inputs + '/networks', 'Inputs/networks')
    dir_util.copy_tree(scenario_inputs + '/trucks', 'Inputs/trucks')
    dir_util.copy_tree(scenario_inputs + '/supplemental',
                       'inputs/supplemental')
    dir_util.copy_tree(scenario_inputs + '/supplemental',
                       'inputs/supplemental')
    if run_supplemental_generation:
        shcopy(scenario_inputs + '/tazdata/tazdata.in', 'inputs/trucks')
        #shcopy(scenario_inputs+'/tazdata/tazdata.in','inputs/suplemental/generation/landuse')
    dir_util.copy_tree(scenario_inputs + '/tolls', 'Inputs/tolls')
    dir_util.copy_tree(scenario_inputs + '/Fares', 'Inputs/Fares')
    dir_util.copy_tree(scenario_inputs + '/bikes', 'Inputs/bikes')
    dir_util.copy_tree(base_inputs + '/observed', 'Inputs/observed')
    dir_util.copy_tree(base_inputs + '/corridors', 'inputs/corridors')
    dir_util.copy_tree(scenario_inputs + '/parking', 'inputs/parking')
    shcopy(scenario_inputs + '/landuse/hh_and_persons.h5', 'Inputs')
    shcopy(base_inputs + '/etc/survey.h5',
           'scripts/summarize/inputs/calibration')
    # node to node short distance files:
    shcopy(base_inputs + '/short_distance_files/node_index_2014.txt', 'Inputs')
    shcopy(base_inputs + '/short_distance_files/node_to_node_distance_2014.h5',
           'Inputs')
    shcopy(base_inputs + '/short_distance_files/parcel_nodes_2014.txt',
           'Inputs')
Beispiel #42
0
def rename_network_outs(iter):
    for summary_name in network_summary_files:
        csv_output = os.path.join(os.getcwd(), 'outputs',summary_name+'.csv')
        if os.path.isfile(csv_output):
            shcopy(csv_output, os.path.join(os.getcwd(), 'outputs',summary_name+str(iter)+'.csv'))
            os.remove(csv_output)
Beispiel #43
0
def copy_shadow_price_file():
    print 'Copying shadow price file.' 
    if not os.path.exists('working'):
       os.makedirs('working')
    shcopy(base_inputs+'/shadow_prices/shadow_prices.txt','working')