Ejemplo n.º 1
0
def run(host, port, config_filename):
    """

    Runs the server.

    @param host The host for the server

    @param port The port for the server

    """

    import config
    config.load_config_file(config_filename)

    global triangulation, detectionserver, fingerprint

    import localization
    import detectionserver
    import fingerprint

    import db
    import detectionserver
    import pageserver

    db.init()
    config.app.run(host=host, port=int(port), debug=True)
Ejemplo n.º 2
0
def run(host, port, config_filename):
    """

    Runs the server.

    @param host The host for the server

    @param port The port for the server

    """

    import config
    config.load_config_file(config_filename)

    global triangulation, detectionserver, fingerprint

    import localization
    import detectionserver
    import fingerprint

    import db
    import detectionserver
    import pageserver

    db.init()
    config.app.run(host=host, port=int(port), debug=True)
Ejemplo n.º 3
0
    def init_preferences_widgets(self):
        '''初始化首选项对话框'''

        self.file_chooser_dialog = gtk.FileChooserDialog("选择一个文件", None,
                                   gtk.FILE_CHOOSER_ACTION_OPEN,
                                   (gtk.STOCK_OPEN, gtk.RESPONSE_OK,
                                   gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL))
        self.folder_chooser_dialog = gtk.FileChooserDialog("选择一个文件夹", None,
                                   gtk.FILE_CHOOSER_ACTION_SELECT_FOLDER,
                                   (gtk.STOCK_OPEN, gtk.RESPONSE_OK,
                                   gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL))
        load_config_file()
        self.update_player_widgets()
        self.update_preferences_widgets()
Ejemplo n.º 4
0
def run(host, port, config_file):
    import config
    config.load_config_file(config_file)

    global triangulation, detectionserver, fingerprint

    import localize
    import remserver
    import fingerprint

    import db
    import detectionserver
    import pageserver

    db.init()
    config.app.run(host=host, port=int(port), debug=True)
Ejemplo n.º 5
0
def check_data(obsid, beg=None, dur=None, base_dir=None):

    if base_dir is None:
        comp_config = config.load_config_file()
        base_dir = comp_config['base_data_dir']
    comb_dir = "{0}{1}/combined".format(base_dir, obsid)

    if type(beg) is not int:
        beg = int(beg)
    if type(dur) is not int:
        dur = int(dur)

    #Check to see if the files are combined properly
    if beg is not None and dur is not None:
        logger.info("Checking recombined files beginning at {0} and ending at {1}. Duration: {2} seconds"\
                    .format(beg, (beg+dur), dur))
        error = checks.check_recombine(obsid,
                                       startsec=beg,
                                       n_secs=dur,
                                       directory=comb_dir)
    else:
        logger.warn(
            "No start time information supplied. Comparing files with full obs"
        )
        error = checks.check_recombine(obsid, directory=comb_dir)

    if error == True:
        logger.error("Recombined files check has failed. Cannot continue")
        sys.exit(1)
    else:
        logger.info("Recombined check passed")

    return
Ejemplo n.º 6
0
    def KeyPressEvent(self):
        cmd = Command.lookup(self._event_data['keycode'], self._event_data['modifiers'])

        if not cmd:
            return

        x = cmd.get_global_command()

        if x == 'quit':
            for tiler in state.iter_tilers():
                tiler.cmd_untile()

            self._stop = True
        elif x == 'debug':
            state.print_hierarchy(*state.get_active_wsid_and_mid())
        elif x == 'refresh_workarea':
            state.update_property('_NET_WORKAREA')
        elif x == 'reload_configuration':
            config.load_config_file()
            state.update_NET_DESKTOP_GEOMETRY(True)
            state.apply_config()
        else:
            Tile.dispatch(state.get_active_monitor(), cmd)
Ejemplo n.º 7
0
def lambda_handler(event, context):
    dic = config.load_config_file(config_bucket, config_key_name)
    rss_config = RSSConfig.of(dic['functions']['collect_rss_entries'])
    global logger
    logger = log.get_logger(dic['globals']['log_level'])

    new_entry = 0
    for rss_config_item in rss_config.get_items():
        try:
            res = feedparser.parse(rss_config_item.get_url())
            new_entry += handle_entries(res.entries, rss_config_item, topic,
                                        rss_config.get_format())
        except Exception as e:
            logger.error(e)
            continue
    return {'new_entry': new_entry}
Ejemplo n.º 8
0
def main():

    config.check_config_file()
    app_config = config.load_config_file()
    pushbullet_api_key = app_config.get('api_key')

    regex_to_test = getattr(args, 'regex', app_config.get('regex'))
    if regex_to_test:
        regex_to_test = re.compile(regex_to_test)
    site_url = getattr(args, 'url', app_config.get('site_to_parse'))
    if getattr(args, 'mode', False):
        match_if_present = False if getattr(args, 'mode', None) == 'NOT_FOUND' else True
    else:
        match_if_present = False if app_config.get('match_mode') == 'NOT_FOUND' else True

    if not pushbullet_api_key or not regex_to_test or not site_url:
        print "Some config values or arguments not given. Exiting..."
        exit(-1)

    while True:
        print "\n", datetime.now()
        response = get_response_from_site(site_url)
        site_content = BeautifulSoup(response.text)
        page_title = site_content.find('title').text
        matching_content = site_content.find_all(text=regex_to_test)
        matching_content += site_content.find_all('a', href=regex_to_test)

        if match_if_present:
            if matching_content:
                print REGEX_FOUND_MESSAGE
                send_notification(pushbullet_api_key, page_title, site_url)
                break
            print REGEX_NOT_FOUND_MESSAGE

        else:
            if not matching_content:
                print REGEX_NOT_FOUND_MESSAGE
                send_notification(pushbullet_api_key, page_title, site_url)
                break
            print REGEX_FOUND_MESSAGE

        sleep(10)

    return
Ejemplo n.º 9
0
def lambda_handler(event, context):
    dic = config.load_config_file(config_bucket, config_key_name)
    website_config = WebsiteConfig.of(
        dic['functions']['detect_website_changes'])
    global logger
    logger = log.get_logger(dic['globals']['log_level'])

    changed = 0
    error = 0
    for item in website_config.get_items():
        try:
            website_changed = check_if_website_updated(
                item, website_config.get_format())
            if website_changed:
                changed += 1
        except:
            print(traceback.format_exc())
            error += 1
    return {'changed': changed, 'error': error}
Ejemplo n.º 10
0
    def creat_model(self, config_file="ner.config"):
        from config import load_config_file
        task_dict, hyper_parameters_dict, extra_parameters_dict, = load_config_file(
            configFile=config_file)
        u""" 读取预处理的word2id文件(实际上是每个字分配一个id) """
        word2id = read_dictionary(
            os.path.join('embeddings', extra_parameters_dict["word2id"]))

        ## paths setting
        """ 处理对模型结果等文件的保存名字及路径, 以及logger的保存位置 """
        ckpt_file = tf.train.latest_checkpoint(self.model_path)
        self.model = ZHNer(hyper_parameters_dict,
                           tag2label,
                           word2id,
                           config=config)
        self.saver = tf.train.import_meta_graph(ckpt_file + ".meta")
        self.sess = tf.Session(config=config)
        self.saver.restore(self.sess, ckpt_file)
        print("--------- INFO:checkpoint还原模型完成!,checkpoint = {}".format(
            self.model_path))
Ejemplo n.º 11
0
def submit_dspsr(run_params):
    #run dspsr

    launch_line = "stokes_fold.py -m f -d {0} -p {1} -b {2} -s {3} -L {4} --mwa_search {5}\
                    --vcs_tools {6}"\
                    .format(run_params.pointing_dir, run_params.pulsar, run_params.nbins,\
                    run_params.subint, run_params.loglvl, run_params.mwa_search,\
                    run_params.vcs_tools)
    if run_params.stop == True:
        launch_line += " -S"

    commands = []
    commands.append("psrcat -e {0} > {1}/{0}.eph".format(
        run_params.pulsar, run_params.pointing_dir))
    commands.append("echo 'Running DSPSR folding...\n'")
    commands.append("dspsr -cont -U 4000 -A -L {0} -E {3}/{1}.eph -K -b {2} -O {3}/{1}_subint_{0} {3}/*.fits"\
                    .format(run_params.subint, run_params.pulsar, run_params.nbins, run_params.pointing_dir))
    commands.append("echo 'Attempting to find rotation measure.\nOutputting result to {0}/{1}_rmfit.txt\n'"\
                    .format(run_params.pointing_dir, run_params.pulsar))
    commands.append("rmfit {0}/{1}_subint_{2}.ar -t > {0}/{1}_rmfit.txt"\
                    .format(run_params.pointing_dir, run_params.pulsar, run_params.subint))

    #rerun the script
    commands.append(launch_line)

    name = "dspsr_RM_{0}_{1}".format(run_params.pulsar, run_params.obsid)
    comp_config = config.load_config_file()
    batch_dir = "{0}{1}/batch/".format(comp_config['base_product_dir'],
                                       run_params.obsid)
    submit_slurm(name, commands,\
                batch_dir=batch_dir,\
                slurm_kwargs={"time": "08:00:00"},\
                module_list=["mwa_search/{0}".format(run_params.mwa_search),\
                            "dspsr/master", "psrchive/master"],\
                submit=True, vcstools_version=run_params.vcs_tools)

    logger.info("Job submitted for dspsr using\n\
                pointing directory:         {0}\n\
                pulsar:                     {1}"\
                .format(run_params.pointing_dir, run_params.pulsar))
Ejemplo n.º 12
0
def submit_RM_correct(run_params):
    #correct for RM and submit plot


    launch_line = "stokes_fold.py -m p -d {0} -p {1} -b {2} -s {3} -L {4} --mwa_search {5}\
                    --vcs_tools {6}"\
                    .format(run_params.pointing_dir, run_params.pulsar, run_params.nbins,\
                    run_params.subint, run_params.loglvl, run_params.mwa_search,\
                    run_params.vcs_tools)#, run_params.stop)

    if run_params.stop == True:
        launch_line += " -S"

    commands = []
    #correct for RM
    commands.append("echo 'Correcting for input rotation measure\n'")
    commands.append("pam -e ar2 -R {0} {1}/{2}_subint_{3}.ar"\
    .format(run_params.RM, run_params.pointing_dir, run_params.pulsar, run_params.subint))
    #Turn the archive into a readable ascii file
    commands.append("echo 'Wiritng result to text file\n'")
    commands.append("pdv -FTt {0}/{1}_subint_{2}.ar2 > {0}/{1}_archive.txt".\
    format(run_params.pointing_dir, run_params.pulsar, run_params.subint))

    #launch plotting
    commands.append(launch_line)

    name = "RMcor_plt_{0}_{1}".format(run_params.pulsar, run_params.obsid)
    comp_config = config.load_config_file()
    batch_dir = "{0}{1}/batch/".format(comp_config['base_product_dir'],
                                       run_params.obsid)
    submit_slurm(name, commands,\
                batch_dir=batch_dir,\
                slurm_kwargs={"time": "02:00:00"},\
                module_list=["mwa_search/{0}".format(run_params.mwa_search),
                            "psrchive/master"],\
                submit=True, vcstools_version=run_params.vcs_tools)
Ejemplo n.º 13
0
##################################################
#    ____                ________  ________      #
#   / __ \___  ___ ___  / ___/ _ \/ __/ __/      #
#  / /_/ / _ \/ -_) _ \/ (_ / ___/\ \_\ \        #
#  \____/ .__/\__/_//_/\___/_/  /___/___/        #
#      /_/           by NotSoOld, 2017 (c)       #
#                                                #
#         route|process|gather stats             #
#                                                #
# OpenGPSS Interpreter.py - starts all action!   #
#                                                #
##################################################



from modules import interpreter, errors
import os
import config

config.load_config_file()
interpreter.print_logo()
print 'name of file with system to simulate:'
f = raw_input()
filepath = os.path.dirname(os.path.abspath(__file__))+'/'+f+'.ogps'
if not os.path.isfile(filepath):
	errors.print_error(1, '', [filepath])
interpreter.start_interpreter(filepath)
Ejemplo n.º 14
0
def submit_prepfold(run_params, nbins=100, finish=False):

    if nbins is not int:
        nbins = int(float(nbins))

    comp_config = config.load_config_file()

    #Check to see if there is a 100 bin fold already
    bin_lim = bin_sampling_limit(run_params.pulsar)
    if len(glob.glob("*_100_bins**{0}*bestprof".format(
            run_params.pulsar))) > 0 and bin_lim > 100 and nbins is not 100:
        #add a prepfold command for 100 bins
        logger.info("Folding on 100 bins for pointing {0}".format(
            run_params.pointing))
        commands = []
        commands = add_to_prepfold(commands,
                                   run_params.pointing_dir,
                                   run_params.pulsar,
                                   run_params.obsid,
                                   nbins=100)

        name = "binfinder_prepfold_only_{0}_100".format(run_params.pulsar)
        batch_dir = "{0}{1}/batch/".format(comp_config['base_product_dir'],
                                           run_params.obsid)
        submit_slurm(name, commands,\
                    batch_dir=batch_dir,\
                    slurm_kwargs={"time": "2:00:00"},\
                    module_list=['mwa_search/{0}'.format(run_params.mwa_search),\
                                'presto/no-python'],\
                    submit=True, vcstools_version="{0}".format(run_params.vcs_tools))
        logger.info("Prepfold job successfully submitted: {0}".format(name))


    launch_line = "binfinder.py -d {0} -t {1} -O {2} -o {3} -L {4} --prevbins {5} --vcs_tools {6}\
                    --mwa_search {7} -p {8}"\
                    .format(run_params.pointing_dir, run_params.threshold, run_params.cal_id,\
                    run_params.obsid, run_params.loglvl, nbins, run_params.vcs_tools,\
                    run_params.mwa_search, run_params.pulsar)

    if run_params.stop == True:
        launch_line += " -S"

    logger.info("Submitting job for {0} bins".format(nbins))
    #create slurm job:
    commands = []
    if firstrun == True:
        commands = add_to_prepfold(commands,
                                   run_params.pointing_dir,
                                   run_params.pulsar,
                                   run_params.obsid,
                                   nbins=100)
    commands = add_prepfold_to_commands(commands,
                                        run_params.pointing_dir,
                                        run_params.pulsar,
                                        run_params.obsid,
                                        nbins=nbins)

    if finish == False:
        #Rerun this script
        commands.append(
            'echo "Running script again. Passing prevbins = {0}"'.format(
                nbins))
        launch_line += " -m f"
    else:
        #Run again only once and without prepfold
        commands.append(
            'echo "Running script again without folding. Passing prevbins = {0}"'
            .format(nbins))
        launch_line += " -m e"

    commands.append(launch_line)

    comp_config = config.load_config_file()
    name = "binfinder_{0}_{1}".format(run_params.pulsar, nbins)
    batch_dir = "{0}{1}/batch/".format(comp_config['base_product_dir'],
                                       run_params.obsid)
    submit_slurm(name, commands,\
                batch_dir=batch_dir,\
                slurm_kwargs={"time": "2:00:00"},\
                module_list=['mwa_search/{0}'.format(run_params.mwa_search),\
                            'presto/no-python'],\
                submit=True, vcstools_version="{0}".format(run_params.vcs_tools))
    logger.info("Job successfully submitted: {0}".format(name))
Ejemplo n.º 15
0
def submit_multifold(run_params, nbins=100):

    job_ids = []
    comp_config = config.load_config_file()

    #Check beam coverage for the pulsar
    start, end = pulsar_beam_coverage(run_params.obsid, run_params.pulsar)
    logger.info(
        "start and end of pulsar beam coverage for on-disk files:{0}, {1}".
        format(start, end))
    if start >= 1. or end < 0.:
        logger.error(
            "pulsar is not in beam for any of the on-disk files. Ending...")
        sys.exit(1)

    for i, pointing in enumerate(run_params.pointing_dir):
        logger.info("submitting pointing:{0}".format(pointing))
        #os.chdir(pointing)
        #create slurm job:
        commands = []
        commands = add_prepfold_to_commands(commands, pointing, run_params.pulsar, run_params.obsid,\
                    start=start, end=end, nbins=nbins)

        name = "multifold_binfind_{0}_{1}".format(run_params.pulsar, i)
        batch_dir = "{0}{1}/batch/".format(comp_config['base_product_dir'],
                                           run_params.obsid)
        myid = submit_slurm(name, commands,\
                    batch_dir=batch_dir,\
                    slurm_kwargs={"time": "2:00:00"},\
                    module_list=['mwa_search/{0}'.format(run_params.mwa_search),\
                                'presto/no-python'],\
                    submit=True, vcstools_version="{0}".format(run_params.vcs_tools))

        job_ids.append(myid)

    #Now submit the check script
    if run_params.stop == True:
        stop = "-S"
    else:
        stop = ""

    p = ""
    for pointing in run_params.pointing_dir:
        p += " " + pointing

    commands = []
    commands.append("binfinder.py -m b -d {0} -O {1} -p {2} -o {3} -L {4} {5} --vcs_tools {6}\
                    --mwa_search {7} --force_initial -p {8}"\
                    .format(p, run_params.cal_id, run_params.pulsar, run_params.obsid, run_params.loglvl,\
                    stop, run_params.vcs_tools, run_params.mwa_search, run_params.pulsar))

    name = "best_pointing_{0}".format(run_params.pulsar)
    batch_dir = "{0}{1}/batch/".format(comp_config['base_product_dir'],
                                       run_params.obsid)
    myid = submit_slurm(name, commands,\
            batch_dir=batch_dir,\
            slurm_kwargs={"time": "00:30:00"},\
            module_list=['mwa_search/{0}'.format(run_params.mwa_search),\
                        "presto/no-python"],\
            submit=True, depend=job_ids, depend_type="afterany",\
            vcstools_version="master")
Ejemplo n.º 16
0
def find_fwhm_and_plot(obsid, pointing):
    pointing_list = []
    sn = []
    comp_config = config.load_config_file()
    for d in glob.glob("{0}/{1}/pointings/*".format(comp_config['base_product_dir'],
                                obsid)):
        bestprof_file = glob.glob("{0}/{1}*_PSR_2330-2005.pfd.bestprof".format(d, obsid))
        if len(bestprof_file) == 1:
            with open(bestprof_file[0]) as bestfile:
                for i in bestfile.readlines():
                    if i.startswith("# Prob(Noise)"):
                        pointing_list.append(d.split("/")[-1])
                        sn.append(float(i.split("(")[-1][1:-7]))

    #find max for a FWHM test
    #max_index = sn.index(max(sn))
    ra_hex = pointing.split("_")[0]
    dec_hex = pointing.split("_")[1]

    print(ra_hex, dec_hex)
    coord = SkyCoord(ra_hex,dec_hex,unit=(u.hourangle,u.deg))
    dec_centre = coord.dec.degree
    ra_centre = coord.ra.degree

    ras = []; decs = []
    ra_line = []; ra_sn_line = []
    dec_line = []; dec_sn_line = []
    print(sn)
    for i in range(len(sn)):
        rah, dech = pointing_list[i].split("_")
        coord = SkyCoord(rah,dech,unit=(u.hourangle,u.deg))
        ras.append(coord.ra.degree)
        decs.append(coord.dec.degree)
        if decs[i] == dec_centre:
            ra_line.append(ras[i])
            ra_sn_line.append(sn[i])
        if ras[i] == ra_centre:
            dec_line.append(decs[i])
            dec_sn_line.append(sn[i])

    max_ra_i = np.argmax(ra_sn_line)
    max_dec_i = np.argmax(dec_sn_line)
    max_coord = SkyCoord(ra_line[max_ra_i], dec_line[max_dec_i],unit=(u.deg,u.deg))
    ra_max_hex = max_coord.ra.to_string(unit=u.hour, sep=':')
    dec_max_hex = max_coord.dec.to_string(unit=u.degree, sep=':')

    print("sn max coord: {0}_{1}".format(ra_max_hex, dec_max_hex))



    #sort and calc FWHM
    ra_sn_line = [x for _,x in sorted(zip(ra_line,ra_sn_line))]
    ra_line = sorted(ra_line)
    print(ra_sn_line,ra_line)

    dec_sn_line = [x for _,x in sorted(zip(dec_line,dec_sn_line))]
    dec_line = sorted(dec_line)
    print(dec_sn_line,dec_line )

    ra_sn_line = np.array(ra_sn_line) ; dec_sn_line = np.array(dec_sn_line)

    spline = UnivariateSpline(ra_line, ra_sn_line-np.max(ra_sn_line)/2., s=0)
    print(spline.roots())
    if len(spline.roots()) == 2:
        r1, r2 = spline.roots()
        ra_FWHM = abs(r1-r2)
        print("raw ra FHWM: " + str(ra_FWHM))
        cor_ra_FWHM = float(ra_FWHM)*math.cos(np.radians(dec_centre))
        print("corrected ra FWHM: {0}".format(cor_ra_FWHM))
    else:
        print("No detectable ra FWHM (too many roots)")

    spline = UnivariateSpline(dec_line, dec_sn_line-np.max(dec_sn_line)/2., s=0)
    if len(spline.roots()) == 2:
        r1, r2 = spline.roots()
        dec_FWHM = abs(r1-r2)
        print("raw dec FHWM: " + str(dec_FWHM))
        cor_dec_FWHM = float(dec_FWHM)*math.cos(np.radians(dec_centre) + np.radians(26.7))**2
        print("corrected dec FWHM: {0}".format(cor_dec_FWHM))
    else:
        print("No detectable dec FWHM (too many roots)")



    diff = 10**20
      
    # Find the min diff by comparing difference 
    # of all possible pairs in given array 
    n = len(dec_line)
    for i in range(n-1): 
        for j in range(i+1,n): 
            if abs(dec_line[i]-dec_line[j]) < diff: 
                diff = abs(dec_line[i] - dec_line[j])  
    n = len(ra_line)
    for i in range(n-1): 
        for j in range(i+1,n): 
            if abs(ra_line[i]-ra_line[j]) < diff: 
                diff = abs(ra_line[i] - ra_line[j])  
    diff = 0.01
    print("Diff: {}".format(diff))
    
    ras = np.array(ras); decs = np.array(decs)
    fig = plt.figure(figsize=(7, 7))
    ax = fig.add_subplot(111)

    plt.grid(True)
    
    #sort by sn
    ras = [x for _,x in sorted(zip(sn,ras))]
    decs = [x for _,x in sorted(zip(sn,decs))]
    sn = sorted(sn)

    cm = plt.cm.get_cmap('plasma',20)
    ax.grid(color='r', linestyle='-', linewidth=2)
    sp = plt.scatter(ras, decs, c=sn, s=(1000*diff)**2, cmap = cm)
    #plt.gray()
    plt.colorbar(sp)
    plt.savefig("{0}_position_heatmap.png".format(obsid))
    plt.show()
Ejemplo n.º 17
0
              (epoch, num_epochs, epoch_train_loss, epoch_train_accuracy,
               epoch_end - epoch_start))

    # outer for loop ends

    return


if __name__ == "__main__":
    parser = argparse.ArgumentParser()
    parser.add_argument('exp_name', nargs='?', default='PeterUnetInception2')
    args = parser.parse_args()

    exp_name = args.exp_name

    cfg = config.load_config_file(exp_name)
    # train_data_loader, train_tile_borders = get_small_loader(
    train_data_loader, train_tile_borders = get_train_loader(
        cfg['train']['batch_size'], cfg['train']['paddings'],
        cfg['train']['tile_size'], cfg['train']['hflip'],
        cfg['train']['shift'], cfg['train']['color'], cfg['train']['rotate'],
        cfg['train']['scale'], cfg['train']['fancy_pca'],
        cfg['train']['edge_enh'])

    # val_data_loader, val_tile_borders = get_small_loader(
    val_data_loader, val_tile_borders = get_val_loader(
        cfg['test']['batch_size'], cfg['test']['paddings'],
        cfg['test']['tile_size'], False, False, False, False, False, False,
        False)

    trainer(exp_name,
Ejemplo n.º 18
0
import numpy

from transformation import *
from geometry import *
from timing import Timing
import config
from context import Context
from drawingutils import *

parser = argparse.ArgumentParser()
parser.add_argument("configuration_filepath",
                    metavar="CONFIG_FILE",
                    nargs='?',
                    help="JSON configuration file to load")
options = parser.parse_args()
config = config.load_config_file(options.configuration_filepath)

compor_cena = None
processar_teclado = None

try:
    student_module = importlib.import_module(config.module_name)
    compor_cena = getattr(student_module, config.callback_name)
    processar_teclado = getattr(student_module, "processar_teclado", None)
except ImportError:
    print("*** Atencao: Arquivo %s.py nao foi encontrado." %
          config.module_name,
          file=sys.stderr)
except AttributeError:
    print("*** Atencao: Arquivo %s.py nao possui funcao '%s'." %
          (config.module_name, config.callback_name),
Ejemplo n.º 19
0
def api_get_disk_info(disk='/'):

    if not disk.startswith('/'):
        disk = '/' + disk

    ctx = api.get_disk_info(disk=disk)

    return Response(response=json_dumps(ctx), mimetype="application/json")


@app.route('/api/process/', defaults={'process_filter': PROCESSES})
@app.route('/api/process/<process_filter>')
def api_get_filtered_processes(process_filter=None):

    if not process_filter:
        process_filter = PROCESSES if PROCESSES else ''

    process_filter = process_filter.split(',')

    ctx = api.get_filtered_processes(process_filter)

    return Response(response=json_dumps(ctx), mimetype="application/json")


if __name__ == '__main__':
    config.check_config_file()
    app_config = config.load_config_file()
    PROCESSES = app_config.get('processes')
    DISKS = app_config.get('disks', '').split(',')
    app.run(threaded=True, host='0.0.0.0', debug=True)
Ejemplo n.º 20
0
        '--num_threads',
        type=int,
        help=
        'number of parallel threads running in PPO (0=automatic number of cpus)',
        default=0)
    parser.add_argument(
        '-t',
        '--thread',
        action="store_true",
        help='do not use: technical parameter for parallel run')

    args = parser.parse_args()
    if args.gpus:
        args.gpus = [int(s) for s in args.gpus.split(',')]
        torch.cuda.set_device(args.gpus[0])
    config = load_config_file(args.algorithm)

    experiment = Config(config[args.env][str(args.config)],
                        "{0}_{1}".format(args.env, str(args.config)))
    update_config(args, experiment)

    if args.load != '':
        env_class = set_env_class(args.algorithm, args.env, experiment)
        env_class.test(experiment, args.load)
    else:
        if args.thread:
            experiment.trials = 1

        if args.parallel:
            if args.num_processes == 0:
                num_cpus = psutil.cpu_count(logical=True)
Ejemplo n.º 21
0
from rev import CANSparkMax, MotorType
from ctre import WPI_TalonSRX

import navx

from components.limelight import Limelight
from components.navx import NavX
from components.neo import Neo
from components.shooter import Shooter

from config import load_config_file
from dash import put_tuple
from oi import DriverController, SysopController

config_data = load_config_file()

subsystems_enabled = {
    "limelight": "limelight" in config_data['enabled_devices'],
    "navx": "navx" in config_data['enabled_devices'],
    "shooter": "shooter" in config_data['enabled_devices'],
    "neo": "neo" in config_data['enabled_devices'],
    "camera": "camera" in config_data['enabled_devices']
}

class Robot(magicbot.MagicRobot):
    if subsystems_enabled['limelight']:
        limelight_component: Limelight
    
    if subsystems_enabled['navx']:
        navx_component: NavX
Ejemplo n.º 22
0
    parser.add_argument("--consume", dest='consume_videos', help="override the ini conf to consume videos", action="store_true")
    parser.add_argument("--no-consume", dest='consume_videos', help="override the ini conf to consume videos", action="store_false")
    parser.set_defaults(consume_videos=None)
    parser.add_argument("--speed", type=int, metavar='6', help="override the ini conf for the speed of the simulation")
    parser.add_argument("--out", metavar='/path/to/stats', help="override the ini conf for the output folder")
    parser.add_argument("--proxy", metavar='FIFOProxy', help="override the ini conf for the proxy to use")
    parser.add_argument("--parallel", help="use true parallelism when comparing", action="store_true")
    parser.set_defaults(parallel=False)
    parser.add_argument("--compare-to", dest='proxy2', metavar='LRUProxy', help="compare the first proxy to this one")
    args = parser.parse_args()

    if args.verbosity:
        print("verbosity turned on")

    if args.config:
        if not config.load_config_file(args.config):
            sys.exit()
    else:
        if not config.load_config_file():
            sys.exit()

    if args.skip_inactivity != None:
        config.set_skip_activity(args.skip_inactivity)

    if args.consume_videos != None:
        config.set_consume_videos(args.consume_videos)

    if args.trace:
        config.set_trace_file(args.trace)
    if args.db:
        config.set_db_file(args.db)
Ejemplo n.º 23
0
def submit_slurm(name,
                 commands,
                 tmpl=SLURM_TMPL,
                 slurm_kwargs=None,
                 module_list=[],
                 vcstools_version="master",
                 batch_dir="batch/",
                 depend=None,
                 depend_type='afterok',
                 submit=True,
                 outfile=None,
                 queue="cpuq",
                 export="NONE",
                 gpu_res=None,
                 mem=1024,
                 cpu_threads=1,
                 temp_mem=None,
                 nice=0,
                 shebag='#!/bin/bash -l'):
    """
    Making this function to cleanly submit SLURM jobs using a simple template.

    Parameters
    ----------
    name : str
        The base name that is used to create the "`name`.batch" and "`name`.out" files.

    commands : list of strs
        The actual bash script commands you wnat to run.
        Expects a list where each element is a single line of the bash script.

    tmpl : str
        A template header string with format place holders: export, outfile,
        cluster, header and script.
        This is used to create the final string to be written to the job script.
        For this function, it is required to be SLURM compliant.
        Default: `SLURM_TMPL`

    slurm_kwargs : dict [optional]
        A dictionary of SLURM keyword, value pairs to fill in whatever is not
        in the template supplied to `tmpl`.
        Default: `{}` (empty dictionary, i.e. no additional header parameters)

    module_list : list of str [optional]
        A list of module names (including versions if applicable) that will
        be included in the header for the batch
        scripts. e.g. ["vcstools/master", "mwa-voltage/master", "presto/master"] would append
            module load vcstools/master
            module load mwa-voltage/master
            module load presto/master
        to the header of the batch script. This can also invoke "module use ..." commands.
        NOTE: /group/mwa/software/modulefiles is used and vcstools/master is loaded by default.

    vcstools_version :  str
        The version of vcstools to load. Default: master.

    batch_dir : str [optional]
        The LOCAL directory where you want to write the batch scripts
        (i.e. it will write to `$PWD/batch_dir`).
        Default: "batch/"

    depend : list or None [optional]
        A list of the SLURM job IDs that your would like this job to depend on.
        If `None` then it is assumed there is no dependency on any other job.
        Default: `None`

    depend_type : str [optional]
        The type of slurm dependancy required. For example if you wanted the
        job to run after the jobs have been terminated use 'afterany'.
        Default: "afterok"

    submit : boolean [optional]
        Whether to write and submit the job scripts (`True`) or only write the scripts (`False`).
        Default: `True`

    outfile : str [optional]
        The output file name if "`name`.out" is not desirable.
        Default: `None` (i.e. "`batch_dir`/`name`.out")

    queue : str [optional]
        The type of queue you require (cpuq, gpuq or copyq) then the script will
        choose the correct partitions and clusters for the job to run on
        Default: "cpuq"

    export : str [optional]
        Switch that lets SLURM use your login environment on the compute
        nodes ("ALL") or not ("NONE").
        Default: "None"

    gpu_res : int [optional]
        Number of GPUs that the SLURM job will reserve.
        Default: "None"

    mem : int [optional]
        The MB of ram required for your slurm job.
        Default: 8192

    cpu_threads : int [optional]
        The number of cpu threads required for your slurm job.
        Default: 1


    Returns
    -------
    jobid : int
        The unique SLURM job ID associated with the submitted job.
    """
    if slurm_kwargs is None:
        slurm_kwargs = {}

    #Work out which partition and cluster to use based on the supercomputer
    #(in config file) and queue required
    comp_config = config.load_config_file()
    if queue == 'cpuq':
        cluster = comp_config['cpuq_cluster']
        partition = comp_config['cpuq_partition']
    elif queue == 'gpuq':
        cluster = comp_config['gpuq_cluster']
        partition = comp_config['gpuq_partition']
        if gpu_res is None:
            # No gpus reserved so change it to a default of 1
            gpu_res = 1
    elif queue == 'copyq':
        cluster = comp_config['copyq_cluster']
        partition = comp_config['copyq_partition']
    elif queue == 'zcpuq':
        # Download and checks should be done on Zeus's cpuq. This will only work
        # on Galaxy as the Ozstar workflow is different
        cluster = comp_config['zcpuq_cluster']
        partition = comp_config['zcpuq_partition']
    else:
        logger.error("No queue found, please use cpuq, gpuq or copyq")

    header = []

    if batch_dir.endswith("/") is False:
        batch_dir += "/"

    # define file names (both the batch job file and the output file)
    jobfile = batch_dir + name + ".batch"
    if not outfile:
        outfile = batch_dir + name + ".out"

    # create the header from supplied arguments
    for k, v in slurm_kwargs.items():
        if len(k) > 1:
            k = "--" + k + "="
        else:
            k = "-" + k + " "

        header.append("#SBATCH {0}{1}".format(k, v))

    # check if there are dependencies, and if so include that in the header
    if depend is not None:
        #assumes append is a list but if not will make an educated guess of how to reformat it
        if isinstance(depend, int):
            #assume it's ben given a single job id
            header.append("#SBATCH --dependency={0}:{1}".format(
                depend_type, depend))
        if isinstance(depend, str):
            if ":" in depend:
                #assume it has been given an already formated string
                if depend.startswith(":"):
                    depend = depend[1:]
            #or a single jobid
            header.append("#SBATCH --dependency={0}:{1}".format(
                depend_type, depend))
        if isinstance(depend, list):
            depend_str = ""
            for job_id in depend:
                depend_str += ":" + str(job_id)
            header.append("#SBATCH --dependency={0}{1}".format(
                depend_type, depend_str))

    # add a gpu res to header
    if gpu_res is not None:
        header.append('#SBATCH --gres=gpu:{0}'.format(gpu_res))

    # add temp SSD memory to combat I/O issues. Only availble on Ozstar
    hostname = socket.gethostname()
    if temp_mem is not None and \
        (hostname.startswith('john') or hostname.startswith('farnarkle')):
        header.append("#SBATCH --tmp={0}GB".format(temp_mem))

    # now join the header into one string
    header = "\n".join(header)

    # construct the module loads
    modules = []
    switches = []
    for m in module_list:
        if m == "vcstools":
            # don't do anything as vcstools is loaded automatically
            continue
        if "module switch" in m:
            # if a module switch command is included rather than just a module name, then add it to a separate list
            switches.append(m)
        elif "module" in m:
            modules.append("{0}\n".format(m))
        else:
            modules.append("module load {0}\n".format(m))

    # join the module loads and switches into a single string
    switches = "\n".join(switches)
    modules = "\n".join(modules)

    # join the commands into a single string
    commands = "\n".join(commands)

    # load computer dependant config file
    comp_config = config.load_config_file()

    # some little hacks to make jobs work on the shanghai server
    if hostname.startswith('x86') or hostname.startswith('arm'):
        if vcstools_version == 'master':
            vcstools_version = 'cpu-master'
        if export == "NONE":
            export = "ALL"
        if shebag == "#!/bin/bash -l":
            shebag = "#!/bin/bash"

    # format the template script
    tmpl = tmpl.format(shebag=shebag,
                       script=commands,
                       outfile=outfile,
                       header=header,
                       switches=switches,
                       modules=modules,
                       version=vcstools_version,
                       cluster=cluster,
                       partition=partition,
                       export=export,
                       account=comp_config['group_account'],
                       module_dir=comp_config['module_dir'],
                       threads=cpu_threads,
                       mem=mem,
                       nice=nice)

    # write the formatted template to the job file for submission
    with open(jobfile, "w") as fh:
        fh.write(tmpl)

    # submit the jobs
    batch_submit_line = "sbatch {0}".format(jobfile)
    jobid = None
    if submit:
        submit_cmd = subprocess.Popen(batch_submit_line,
                                      shell=True,
                                      stdout=subprocess.PIPE)
        for line in submit_cmd.stdout:
            if b"Submitted" in line:
                jobid = str(line.split(b" ")[3].decode())
        if jobid is None:
            logger.debug(batch_submit_line)
            logger.debug(submit_cmd.stdout)
            return
        else:
            return jobid
    else:
        return
Ejemplo n.º 24
0
def add_prepfold_to_commands(commands,
                             pointing,
                             pulsar,
                             obsid,
                             use_mask=True,
                             start=None,
                             end=None,
                             nbins=100,
                             ntimechunk=120,
                             dmstep=1,
                             period_search_n=1):

    #find the beginning and end of the pulsar's beam coverage for this obs
    if start == None or end == None:
        start, end = pulsar_beam_coverage(obsid, pulsar)
        logger.info(
            "start and end of pulsar beam coverage for on-disk files:{0}, {1}".
            format(start, end))
        if start >= 1. or end < 0.:
            logger.error(
                "pulsar is not in beam for any of the on-disk files. Ending..."
            )
            sys.exit(1)

    comp_config = config.load_config_file()
    #Figure out whether or not to input a mask
    if use_mask == True:
        check_mask = glob.glob("{0}{1}/incoh/*.mask".format(
            comp_config['base_product_dir'], obsid))
        if check_mask:
            mask = "-mask " + check_mask[0]
        else:
            mask = ""
    else:
        mask = ""

    #make the prepfold command
    constants = "-pstep 1 -pdstep 2 -ndmfact 1 -noxwin -nosearch -runavg -noclip -nsub 256 1*fits "
    variables = "-o {0}_{1}_bins ".format(obsid, nbins)
    variables += mask
    variables += "-n {0} ".format(nbins)
    variables += "-start {0} -end {1} ".format(start, end)
    variables += "-dmstep {0} ".format(dmstep)
    variables += "-npart {0} ".format(ntimechunk)
    variables += "-npfact {0} ".format(period_search_n)

    #load presto module here because it uses python 2
    commands.append('cd {0}'.format(pointing))
    commands.append('echo "Folding on known pulsar {0}"'.format(pulsar))
    commands.append('psrcat -e {0} > {0}.eph'.format(pulsar))
    commands.append("sed -i '/UNITS           TCB/d' {0}.eph".format(pulsar))
    commands.append("prepfold -timing {0}.eph {1} {2}"\
                    .format(pulsar, variables, constants))
    commands.append('errorcode=$?')
    commands.append('pulsar={}'.format(pulsar[1:]))
    pulsar_bash_string = '${pulsar}'

    #Some old ephems don't have the correct ra and dec formating and
    #causes an error with -timing but not -psr
    commands.append('if [ "$errorcode" != "0" ]; then')
    commands.append('   echo "Folding using the -psr option"')
    commands.append('   prepfold -psr {0} {1} {2}'\
                    .format(pulsar, variables, constants))
    commands.append('   pulsar={}'.format(pulsar))
    commands.append('fi')
    commands.append('rm {0}.eph'.format(pulsar))

    return commands
Ejemplo n.º 25
0
def create_app(config_name):
    global api, jwt
    app = Flask(__name__)
    app.config.from_object(Config())
    load_config_file(app.config, config_name)

    with app.app_context():
        db.init_app(app)
        db.app = app
        jwt.init_app(app)

    api = Api(app)
    current_app = app

    logging.basicConfig()
    handler = RotatingFileHandler('idlak-server.log',
                                  maxBytes=100000,
                                  backupCount=1)
    handler.setLevel(app.config['LOGGING'])
    app.logger.addHandler(handler)
    app.logger.setLevel(app.config['LOGGING'])

    if not app.config['AUTHORIZATION']:
        app.logger.warning('AUTHORIZATION is turned off, this should only ' +
                           'be used during debugging!!! Turn it back on in ' +
                           'the config file!')

    with app.app_context():
        from app import models, endpoints, reqlogging  # noqa
        from app.models.user import User  # noqa

        # if database is not created
        if not os.path.isfile(app.config['DATABASE_NAME'] + '.db'):
            db.create_all()

        # check if there are any users, if there are none, create an admin
        if len(User.query.all()) == 0:
            admin_user = User.new_user_full('admin', 'admin', True)
            app.logger.info(
                "An initial admin user has been created: {}".format(
                    admin_user))

    # url endpoints
    from app.endpoints.auth import Auth, Auth_Expire
    api.add_resource(Auth, '/auth')
    api.add_resource(Auth_Expire, '/auth/expire')
    from app.endpoints.language import Languages, Accents
    api.add_resource(Languages, '/languages')
    api.add_resource(Accents, '/languages/<lang_iso>/accents')
    from app.endpoints.speech import Speech
    api.add_resource(Speech, '/speech')
    from app.endpoints.user import Users, Users_Password, Users_Delete, Toggle_Admin
    api.add_resource(Users, '/users')
    api.add_resource(Users_Password, '/users/<user_id>/password')
    api.add_resource(Users_Delete, '/users/<user_id>')
    api.add_resource(Toggle_Admin, '/users/<user_id>/admin')
    from app.endpoints.voice import Voices, VoiceDetails
    api.add_resource(Voices, '/voices')
    api.add_resource(VoiceDetails, '/voices/<voice_id>')

    return app
Ejemplo n.º 26
0
    ch.setFormatter(formatter)
    logger.addHandler(ch)
    logger.propagate = False

    #option parsing
    if not args.obsid:
        print("Please input observation id by setting -o or --obsid. Exiting")
        quit()

    if not args.cal_obs:
        print(
            "Please input calibration observation id by setting -O or --cal_obs. Exiting"
        )
        quit()

    comp_config = config.load_config_file()
    if not args.DI_dir:
        args.DI_dir = "{0}/{1}/cal/{2}/rts/".format(
            comp_config['base_product_dir'], args.obsid, args.cal_obs)
        print("No DI_dir given so assuming {0} is the directory".format(
            args.DI_dir))

    if args.begin and args.end:
        beg = args.begin
        end = args.end
    elif args.all:
        beg, end = obs_max_min(args.obsid)
    else:
        find_beg_end(args.obsid, base_path=comp_config['base_product_dir'])

    #Perform data checks
Ejemplo n.º 27
0
import logging
import os
import subprocess
from logging.handlers import RotatingFileHandler
from flask import Flask
from config import Config,load_config_file
from flask_sqlalchemy import SQLAlchemy
from flask_migrate import Migrate, init as db_init, migrate as db_migrate, upgrade as db_upgrade, show
from flask_restful import Api, request
from flask_jwt_simple import JWTManager
from datetime import datetime

app = Flask(__name__)
app.config.from_object(Config())
load_config_file(app.config)
api = Api(app)

db = SQLAlchemy(app)
migrate = Migrate(app, db)

jwt = JWTManager(app)

logging.basicConfig()
handler = RotatingFileHandler('idlak-server.log', maxBytes=100000, backupCount=1)
handler.setLevel(logging.DEBUG)
app.logger.addHandler(handler)
app.logger.setLevel(logging.DEBUG)

from app import models, endpoints, reqlogging
from app.models.user import User
Ejemplo n.º 28
0
def submit_to_db(run_params, ideal_bins):

    logger.info("submitting profile to database: {0}".format(
        run_params.bestprof))
    #Add path to filenames for submit script
    cwd = os.getcwd()
    ppps = cwd + "/" + glob.glob("*{0}_bins*{1}*.pfd.ps".format(
        ideal_bins, run_params.pulsar[1:]))[0]
    bestprof_name = cwd + "/" + glob.glob("*{0}_bins*{1}*.pfd.bestprof".format(
        ideal_bins, run_params.pulsar[1:]))[0]
    png_output = cwd + "/" + glob.glob("*{0}_bins*{1}*.png".format(
        ideal_bins, run_params.pulsar[1:]))[0]
    pfd = cwd + "/" + glob.glob("*{0}_bins*{1}*.pfd".format(
        ideal_bins, run_params.pulsar[1:]))[0]

    #do the same for 100 bin profiles
    ppps_100 = cwd + "/" + glob.glob("*_100_bins*{0}*.pfd.ps".format(
        run_params.pulsar[1:]))[0]
    bestprof_name_100 = cwd + "/" + glob.glob(
        "*_100_bins*{0}*.pfd.bestprof".format(run_params.pulsar[1:]))[0]
    png_output_100 = cwd + "/" + glob.glob("*_100_bins*{0}*.png".format(
        run_params.pulsar[1:]))[0]
    pfd_100 = cwd + "/" + glob.glob("*_100_bins*{0}*.pfd".format(
        run_params.pulsar[1:]))[0]

    products = [ppps, bestprof_name, png_output, pfd,\
            ppps_100, bestprof_name_100, png_output_100, pfd_100]

    #move all of these data products to a suitable directory
    data_dir = "/group/mwaops/vcs/{0}/data_products/{1}".format(
        run_params.obsid, run_params.pulsar)
    for product in products:
        data_process_pipeline.copy_data(product, data_dir)

    commands = []
    commands.append('submit_to_database.py -o {0} --cal_id {1} -p {2} --bestprof {3} --ppps {4}'\
    .format(run_params.obsid, run_params.cal_id, run_params.pulsar, bestprof_name, ppps))
    commands.append(
        'echo "submitted profile to database: {0}"'.format(bestprof_name))

    if run_params.stop == False:
        #Run stokes fold
        commands.append("data_process_pipeline.py -d {0} -O {1} -p {2} -o {3} -b {4} -L {5}\
                        --mwa_search {6} --vcs_tools {7} -m s"\
                        .format(run_params.pointing_dir, run_params.cal_id, run_params.pulsar,\
                        run_params.obsid, run_params.best_bins, run_params.loglvl, run_params.mwa_search,\
                        run_params.vcs_tools))

    #commands.append('echo "Searching for pulsar using the pipeline to test the pipelines effectivness"')
    #commands.append('mwa_search_pipeline.py -o {0} -a --search --pulsar {1} -O {2}\
    #                --code_comment "Known pulsar auto test"'.format(run_params.obsid, run_params.pulsar,\
    #                run_params.cal_id))

    name = "Submit_{0}_{1}".format(run_params.pulsar, run_params.obsid)
    comp_config = config.load_config_file()
    batch_dir = "{0}{1}/batch/".format(comp_config['base_product_dir'],
                                       run_params.obsid)

    submit_slurm(name, commands,\
                 batch_dir=batch_dir,\
                 slurm_kwargs={"time": "00:05:00"},\
                 module_list=['mwa_search/{0}'.format(run_params.mwa_search)],\
                 submit=True, vcstools_version="{0}".format(run_params.vcs_tools))