Ejemplo n.º 1
0
def process_location_wise_dataframe(folder_path_where_location_data_is_saved):

    for file_name in os.listdir(folder_path_where_location_data_is_saved):

        if file_name == '.DS_Store':
            continue
        else:
            location_name = file_name[:len(file_name) - 4]
            location_file_path = r'{}/{}'.format(
                folder_path_where_location_data_is_saved, file_name)
            location_df = pd.read_csv(location_file_path, index_col=0)
            location_df = location_df.sort_values(['day', 'timestamp_new'],
                                                  ascending=[True, True])
            for day in range(1, 51):
                print(day)
                location_time_list = list(
                    location_df['timestamp'][location_df['day'] == day])
                for time in config.timestamp_range:
                    if time in location_time_list:
                        continue
                    else:
                        row = {
                            'geohash6': location_name,
                            'day': day,
                            'timestamp': time,
                            'demand': 0,
                            'timestamp_new': config.timestamp(time)
                        }
                        location_df = location_df.append(row,
                                                         ignore_index=True)
            print(location_df.shape)
            break
Ejemplo n.º 2
0
def merge_raw_feed_json_files_to_daily_feed_json():
    timestamp_now = config.timestamp()
    timestamp_minus_24hrs = timestamp_now - 24 * 60 * 60
    raw_feed_json_file_list = os.listdir(config.JSON_RAW_FEED_DIRECTORY_PATH)
    raw_feed_connectivity_check_list = []
    raw_feed_modem_status_list = []
    for file in raw_feed_json_file_list:
        file_timestamp = int(file[file.rfind("-") + 1:file.find(".")])
        if file_timestamp < timestamp_minus_24hrs:
            if len(file) == 34:
                raw_feed_connectivity_check_list.append(
                    config.JSON_RAW_FEED_DIRECTORY_PATH + "/" + file)
            elif len(file) == 28:
                raw_feed_modem_status_list.append(
                    config.JSON_RAW_FEED_DIRECTORY_PATH + "/" + file)

    connectivity_check_daily_feed_filename = "connectivity-check-daily-feed-" + str(
        timestamp_minus_24hrs) + ".json"
    merge_files(raw_feed_connectivity_check_list,
                connectivity_check_daily_feed_filename,
                config.JSON_DAILY_FEED_DIRECTORY_PATH + "/pending")

    modem_status_daily_feed_filename = "modem_status-daily-feed-" + str(
        timestamp_minus_24hrs) + ".json"
    merge_files(raw_feed_modem_status_list, modem_status_daily_feed_filename,
                config.JSON_DAILY_FEED_DIRECTORY_PATH + "/pending")
Ejemplo n.º 3
0
    def do_update(hdu_list, key, force):
        suffix = "_ONOFF"

        if not SILENT:
            print "Updating key:", key + suffix, "in", hdu_list.filename()

            if not force and key + suffix in hdu_list[skip].header:
                print key + suffix + " keyword found in filament header..."
                if "y" not in raw_input("Overwrite? ([no]/yes):  "):
                    return hdu_list, key

        if key in config.source_data:
            correlation_data = config.source_data[key]
        else:
            raise KeyError("No source_data for key: " + key + " in config.source_data")

        # print key, key+suffix, correlation_data.shape
        N = len(hdu_list)
        for i, hdu in enumerate(hdu_list[skip:]):
            rht.update_progress(float(i / N), message=key + suffix + ": " + str(i))
            hdr = hdu.header
            if (
                not force
                and (key + suffix + "_AVG" in hdr)
                and (key + suffix + "_MED" in hdr)
                and (key + suffix + "_TOT" in hdr)
                and (key + suffix in hdr)
            ):
                continue

            ONMASK, OFFMASK, LL, UR = config.Cloud.on_and_off_masks_from_HDU(
                hdu, transpose=True, shape=correlation_data.shape
            )
            # mask_slice = np.s_[hdr['MIN_Y']:hdr['MAX_Y']+1, hdr['MIN_X']:hdr['MAX_X']+1]
            mask_slice = np.s_[LL[1] : UR[1] + 1, LL[0] : UR[0] + 1]

            inset = correlation_data[mask_slice]
            on_nonzero = np.nonzero(ONMASK)
            off_nonzero = np.nonzero(OFFMASK)

            on_avg = np.nanmean(inset[on_nonzero])
            off_avg = np.nanmean(inset[off_nonzero])
            on_med = scipy.stats.nanmedian(inset[on_nonzero])
            off_med = scipy.stats.nanmedian(inset[off_nonzero])

            hdr[key + suffix + "_AVG"] = float(on_avg - off_avg)
            hdr[key + suffix + "_MED"] = float(on_med - off_med)
            hdr[key + suffix + "_TOT"] = hdr[key + suffix + "_AVG"] * hdr["LITPIX"]
            hdr[key + suffix] = config.timestamp()

        hdu_list.flush()
        rht.update_progress(1.0)
        return
Ejemplo n.º 4
0
def process_timestamp(df):
    string_timestamp_list = df.loc[:, 'timestamp']
    datetime_timestamp_list = list()
    for i in range(0, len(string_timestamp_list)):
        if i % 100 == 0:
            print(i)
        datetime_timestamp_list.append(
            config.timestamp(string_timestamp_list[i]))
    datetime_timestamp_series = pd.Series(datetime_timestamp_list)
    df['timestamp_new'] = datetime_timestamp_series
    # storing the new dataframe
    df.to_csv(r'{}/training_new.csv'.format(os.getcwd()), index=False)
    return True
Ejemplo n.º 5
0
def c_out(signum, frame):
#    print signum, frame
    print "\n\n\nstopping",timestamp(),"\n\n"
    filler.stopLoop()
    tm.stopLoop()
    filler.join()
    tm.join()
    myexit.exitme()
    print "\n\n########## stopped ##########"
    myThreads = threading.enumerate()
    print str(tm._threadCounter(myThreads))
    print "##########"
#    print str(myThreads)
    print "\n\nit may take some time to completely exit all threads"
Ejemplo n.º 6
0
    def run_pbs(self):
        import pbs_utils
        import paramiko

        tarball_name = '%s_%s' % (self.__class__.__name__, config.timestamp())

        pbs_body = """
#!/bin/sh

#PBS -N %s
#PBS -M [email protected]
#PBS -q psi
#PBS -e localhost:/dev/null
#PBS -o localhost:/dev/null

cd $PBS_O_WORKDIR
exec &> pbs.log
. %s/bin/activate
./run.py --pbs False
""" % (tarball_name, config.pbs_virtualenv)

        with open('pbs.sh', 'w') as pbs_f:
            print >> pbs_f, pbs_body

        tarball_dest = os.path.join(config.root, '..',
                                    '%s.tar.gz' % tarball_name)
        pbs_utils.make_tarball(
            config.root,
            tarball_dest,
            ignore=[config.experiments_dir_name, config.latest_link_file_name])

        ssh = paramiko.SSHClient()
        if config.pbs_ssh_promiscuous:
            ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
        ssh.connect(config.pbs_host, username=config.pbs_user)
        sftp = ssh.open_sftp()

        remote_tarball_dest = os.path.join(config.pbs_work_dir,
                                           '%s.tar.gz' % tarball_name)
        remote_root = os.path.join(config.pbs_work_dir, tarball_name)
        sftp.put(tarball_dest, remote_tarball_dest)

        commands = [
            'cd %s' % config.pbs_work_dir,
            'tar xzf %s' % remote_tarball_dest,
            'cd %s' % remote_root, 'qsub pbs.sh'
        ]
        ssh.exec_command('; '.join(commands))
Ejemplo n.º 7
0
  def run_pbs(self):
    import pbs_utils
    import paramiko

    tarball_name = '%s_%s' % (self.__class__.__name__, config.timestamp())

    pbs_body = """
#!/bin/sh

#PBS -N %s
#PBS -M [email protected]
#PBS -q psi
#PBS -e localhost:/dev/null
#PBS -o localhost:/dev/null

cd $PBS_O_WORKDIR
exec &> pbs.log
. %s/bin/activate
./run.py --pbs False
""" % (tarball_name, config.pbs_virtualenv)

    with open('pbs.sh', 'w') as pbs_f:
      print >>pbs_f, pbs_body

    tarball_dest = os.path.join(config.root, '..', '%s.tar.gz' % tarball_name)
    pbs_utils.make_tarball(config.root, 
                           tarball_dest, ignore=[config.experiments_dir_name,
                                                 config.latest_link_file_name])

    ssh = paramiko.SSHClient()
    if config.pbs_ssh_promiscuous:
      ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
    ssh.connect(config.pbs_host, username=config.pbs_user)
    sftp = ssh.open_sftp()

    remote_tarball_dest = os.path.join(config.pbs_work_dir, '%s.tar.gz' % tarball_name)
    remote_root = os.path.join(config.pbs_work_dir, tarball_name)
    sftp.put(tarball_dest, remote_tarball_dest)

    commands = ['cd %s' % config.pbs_work_dir,
                'tar xzf %s' % remote_tarball_dest,
                'cd %s' % remote_root,
                'qsub pbs.sh']
    ssh.exec_command('; '.join(commands))
Ejemplo n.º 8
0
    def do_update(hdu_list, key, correlation_data, force):
        if not SILENT:
            print "Updating key:", key, "in", hdu_list.filename()

            if not force and (key == "" or key in hdu_list[skip].header):
                print key + " keyword found in filament header..."
                if "y" not in raw_input("Overwrite? ([no]/yes):  "):
                    return hdu_list, key

        # correlation_data can be None if and only if the functions you will call expect Cloud objects
        # else, it must correspond to a source_data and applicable_methods entry
        if correlation_data is None:
            if key in config.source_data:
                correlation_data = config.source_data[key]
            else:
                raise KeyError("No source_data for key: " + key + " in config.source_data")

        if key not in config.applicable_methods:
            raise KeyError("No applicable_methods for key: " + key + " in config.applicable_methods")

        # for i, hdu in enumerate(hdu_list[skip:]):
        for hdu in hdu_list[skip:]:
            hdr = hdu.header
            if correlation_data is not None:
                # Assumes correlation_data can be indexed into using ndarray notation [LowerLeft to UpperRight+1]
                # Assumes config.Cloud.nonzero_data_from_HDU will return the pixel coords offset properly for the above masked region
                # Assumes func can take this weird ndarray view as input and return a scalar value
                for suffix in config.applicable_methods[key]:
                    func = config.methods[suffix]
                    hdr[key + suffix] = func(
                        correlation_data[hdr["MIN_Y"] : hdr["MAX_Y"] + 1, hdr["MIN_X"] : hdr["MAX_X"] + 1][
                            config.Cloud.nonzero_data_from_HDU(hdu, transpose=True)
                        ]
                    )
                hdr[key] = config.timestamp()
            else:
                # Assumes all func require a config.Cloud object to work
                tempCloud = config.Cloud(hdu)
                for suffix in config.applicable_methods[key]:
                    func = config.Cloud.functions[suffix][0]
                    hdr[key + suffix] = func(tempCloud)

        hdu_list.flush()
Ejemplo n.º 9
0
    myconfig.read(myfile)
    threads=ConfigSectionMap(myconfig, 'Threadmanagement')
    mysql=ConfigSectionMap(myconfig, 'MySQL')
#    paths=ConfigSectionMap(myconfig, 'paths')

#    threads['timestamp']
    THREAD_LIMIT = int(stripQuotes(threads['threadlimit']))
    m_intervall = float(stripQuotes(threads['monitor_intervall']))
    tm_intervall = float(stripQuotes(threads['threadmanager_intervall']))
    filler_intervall = float(stripQuotes(threads['queuefiller_intervall']))
    tm_threshold = int(stripQuotes(threads['queuelength_threshold']))

    db = PySQLPool.getNewConnection(username=stripQuotes(mysql['db_username']), password=stripQuotes(mysql['db_password']), host=stripQuotes(mysql['db_host']), db=stripQuotes(mysql['db_name']))
    http_pool = HTTPSConnectionPool("https://sessionserver.mojang.com/session/minecraft/profile", port=443, maxsize=1000000, timeout=urllib3.Timeout(connect=2, read=3))
    myexit=exittest()
    print "starting ...",timestamp()
    print str(threads)
    paths="dead Variable"

    filler = filler.filler(queue=workerqueue,db=db, intervall=filler_intervall, name='filler')
    tm = threadmanager.threadmanager(queue=workerqueue,db=db,paths=paths,http_pool=http_pool, threshold=tm_threshold, intervall=tm_intervall, name='threadmanager',limit=THREAD_LIMIT)

    time.sleep(0.2)
    print "init Queue Filler"
    filler.start()
    print "starting threadmanager"
    tm.start()
    print "starting monitors"
    m = Thread(target=infomon, name='infomon', kwargs={'threads': tm,'intervall':m_intervall,'myexit':myexit}).start()

#    "Interrupt signalhandling"