Ejemplo n.º 1
0
def upload_results(dir):
  
    try:
	db = database.Database('default', autocommit=False)

	if not os.path.exists(dir) or not os.listdir(dir):
	    errormsg = 'ERROR: Results directory, %s, does not exist or is empty' % dir
            raise upload.UploadNonFatalError(errormsg)

	
	fitsfiles = glob.glob(os.path.join(dir, "*.fits"))
	data = datafile.autogen_dataobj(fitsfiles)
	version_number = JobUploader.get_version_number(dir)

	hdr = header.get_header(fitsfiles)
	print "\tHeader parsed."

	cands, tempdir = candidates.get_candidates(version_number, dir)
	print "\tPeriodicity candidates parsed."
	sp_cands = sp_candidates.get_spcandidates(version_number, dir)
	print "\tSingle pulse candidates parsed."

	for c in (cands + sp_cands):
	    hdr.add_dependent(c)
	diags = diagnostics.get_diagnostics(data.obs_name, 
					     data.beam_id, \
					     data.obstype, \
					     version_number, \
					     dir)
	print "\tDiagnostics parsed."

	header_id = hdr.upload(db)
	for d in diags:
	    d.upload(db)
	print "\tEverything uploaded and checked successfully. header_id=%d" % \
		    header_id

    except (upload.UploadNonFatalError):
        exceptionmsgs = traceback.format_exception(*sys.exc_info())
        errormsg  = "Error while checking results!\n"
        errormsg += "\tResults Dir: %s\n\n" % dir
        errormsg += "".join(exceptionmsgs)

        sys.stderr.write("Error while checking results!\n")
        sys.stderr.write("Database transaction will not be committed.\n")
        sys.stderr.write("\t%s" % exceptionmsgs[-1])

        # Rolling back changes. 
        db.rollback()

    except (database.DatabaseConnectionError, CornellFTP.CornellFTPTimeout,\
               upload.UploadDeadlockError, database.DatabaseDeadlockError), e:
        # Connection error while uploading. We will try again later.
        sys.stderr.write(str(e))
        sys.stderr.write("\tRolling back DB transaction and will re-try later.\n")
        
        # Rolling back changes. 
        db.rollback()
Ejemplo n.º 2
0
def gen_crypto_bib(db, confs_years, expand_crossrefs):
    if expand_crossrefs == False:
        outname = "db/crypto_crossref.bib"
    else:
        outname = "db/crypto.bib"

    with open(outname, "w") as out:
        out.write(header.get_header(config, "gen.py", confs_years))
    
        mybibtex.generator.bibtex_gen(out, db, expand_crossrefs=expand_crossrefs, include_crossrefs=not expand_crossrefs)

        out.write("\n")
        out.write("\n")

        with open("db/crypto_misc.bib") as fin:
            for line in fin:
                out.write(line)
Ejemplo n.º 3
0
def gen(out, abbrev, short=0):
    def write_abbrev(d, path=[]):
        val = get_value(d, short=short)
        if val != None:
            key = "".join(path)
            out.write("@string{{{} = {}{}}}\n".format(
                key, 
                " "*(max(0, 32-len(key)-11)), 
                val
            ))
        for k,v in d.iteritems():
            if k in ["", "@0", "@1", "@2", "@3"]:
                continue
            write_abbrev(v, path+[k])


    out.write(header.get_header(config, "gen.py"))

    write_abbrev(abbrev)
Ejemplo n.º 4
0
def upload_results(job_submit):
    """
    Uploads Results for a given submit.

        Input:
            job_submit: A row from the job_submits table.
                Results from this job submission will be
                uploaded.

        Output:
            None
    """
    print "Attempting to upload results"
    print "\tJob ID: %d, Job submission ID: %d" % \
            (job_submit['job_id'], job_submit['id'])
    if debug.UPLOAD:
        upload.upload_timing_summary = {}
        starttime = time.time()
    try:
        # Connect to the DB
        db = database.Database('SPAN512', autocommit=False)
        # Prepare for upload
        dir = job_submit['output_dir']
        if not os.path.exists(dir) or not os.listdir(dir):
            errormsg = 'ERROR: Results directory, %s, does not exist or is empty for job_id=%d' %\
                       (dir, job_submit['job_id'])
            raise upload.UploadNonFatalError(errormsg)

        fitsfiles = get_fitsfiles(job_submit)
        data = datafile.autogen_dataobj(fitsfiles)
        version_number = get_version_number(dir)

        if debug.UPLOAD: 
            parsetime = time.time()
        # Upload results
        hdr = header.get_header(fitsfiles)
        
        print "\tHeader parsed."

        rat_inst_id_cache = ratings2.utils.RatingInstanceIDCache(dbname='nancay')
        cands, tempdir = candidates.get_candidates(version_number, dir, \
                                                   timestamp_mjd=data.timestamp_mjd, \
                                                   inst_cache=rat_inst_id_cache)
        print "\tPeriodicity candidates parsed."
        sp_cands = sp_candidates.get_spcandidates(version_number, dir, \
                                                  timestamp_mjd=data.timestamp_mjd)
        print "\tSingle pulse candidates parsed."

        for c in (cands + sp_cands):
            hdr.add_dependent(c)
        diags = diagnostics.get_diagnostics(data.obs_name, 
                                             data.beam_id, \
                                             data.obstype, \
                                             version_number, \
                                             dir)
        print "\tDiagnostics parsed."
        
        if debug.UPLOAD: 
            upload.upload_timing_summary['Parsing'] = \
                upload.upload_timing_summary.setdefault('Parsing', 0) + \
                (time.time()-parsetime)

        # Perform the upload
        header_id = hdr.upload(db)
        for d in diags:
            d.upload(db)
        print "\tDB upload completed and checked successfully. header_id=%d" % \
                    header_id


    except (upload.UploadNonFatalError):
        # Parsing error caught. Job attempt has failed!
        exceptionmsgs = traceback.format_exception(*sys.exc_info())
        errormsg  = "Error while checking results!\n"
        errormsg += "\tJob ID: %d, Job submit ID: %d\n\n" % \
                        (job_submit['job_id'], job_submit['id'])
        errormsg += "".join(exceptionmsgs)
        
        sys.stderr.write("Error while checking results!\n")
        sys.stderr.write("Database transaction will not be committed.\n")
        sys.stderr.write("\t%s" % exceptionmsgs[-1])

        queries = []
        arglists = []
        queries.append("UPDATE job_submits " \
                       "SET status='upload_failed', " \
                            "details=\"%s\", " \
                            "updated_at='%s' " \
                       "WHERE id=%d"%(errormsg.replace("\"","\'"), jobtracker.nowstr(), job_submit['id']))
        queries.append("UPDATE jobs " \
                       "SET status='failed', " \
                            "details='Error while uploading results', " \
                            "updated_at='%s' " \
                       "WHERE id=%d"%(jobtracker.nowstr(), job_submit['job_id']))
        jobtracker.query(queries)
        
        # Rolling back changes. 
        db.rollback()
    except (database.DatabaseConnectionError, \
               upload.UploadDeadlockError, database.DatabaseDeadlockError), e:
        # Connection error while uploading. We will try again later.
        sys.stderr.write(str(e))
        sys.stderr.write("\tRolling back DB transaction and will re-try later.\n")
        
        # Rolling back changes. 
        db.rollback()
Ejemplo n.º 5
0
def upload_results(job_submit):
    """
    Uploads Results for a given submit.

        Input:
            job_submit: A row from the job_submits table.
                Results from this job submission will be
                uploaded.

        Output:
            None
    """
    print "Attempting to upload results"
    print "\tJob ID: %d, Job submission ID: %d" % \
            (job_submit['job_id'], job_submit['id'])
    if debug.UPLOAD:
        upload.upload_timing_summary = {}
        starttime = time.time()
    try:
        # Connect to the DB
        db = database.Database('default', autocommit=False)
        # Prepare for upload
        dir = job_submit['output_dir']
        if not os.path.exists(dir) or not os.listdir(dir):
            errormsg = 'ERROR: Results directory, %s, does not exist or is empty for job_id=%d' %\
                       (dir, job_submit['job_id'])
            raise upload.UploadNonFatalError(errormsg)

        fitsfiles = get_fitsfiles(job_submit)
        data = datafile.autogen_dataobj(fitsfiles)
        version_number = get_version_number(dir)

        if debug.UPLOAD:
            parsetime = time.time()
        # Upload results
        hdr = header.get_header(fitsfiles)

        print "\tHeader parsed."

        cands = candidates.get_candidates(version_number, dir)
        print "\tPeriodicity candidates parsed."
        sp_cands = sp_candidates.get_spcandidates(version_number, dir)
        print "\tSingle pulse candidates parsed."

        for c in (cands + sp_cands):
            hdr.add_dependent(c)
        diags = diagnostics.get_diagnostics(data.obs_name,
                                             data.beam_id, \
                                             data.obstype, \
                                             version_number, \
                                             dir)
        print "\tDiagnostics parsed."

        if debug.UPLOAD:
            upload.upload_timing_summary['Parsing'] = \
                upload.upload_timing_summary.setdefault('Parsing', 0) + \
                (time.time()-parsetime)

        # Perform the upload
        header_id = hdr.upload(db)
        for d in diags:
            d.upload(db)
        print "\tEverything uploaded and checked successfully. header_id=%d" % \
                    header_id
    except (upload.UploadNonFatalError):
        # Parsing error caught. Job attempt has failed!
        exceptionmsgs = traceback.format_exception(*sys.exc_info())
        errormsg = "Error while checking results!\n"
        errormsg += "\tJob ID: %d, Job submit ID: %d\n\n" % \
                        (job_submit['job_id'], job_submit['id'])
        errormsg += "".join(exceptionmsgs)

        sys.stderr.write("Error while checking results!\n")
        sys.stderr.write("Database transaction will not be committed.\n")
        sys.stderr.write("\t%s" % exceptionmsgs[-1])

        queries = []
        arglists = []
        queries.append("UPDATE job_submits " \
                       "SET status='upload_failed', " \
                            "details=?, " \
                            "updated_at=? " \
                       "WHERE id=?")
        arglists.append((errormsg, jobtracker.nowstr(), job_submit['id']))
        queries.append("UPDATE jobs " \
                       "SET status='failed', " \
                            "details='Error while uploading results', " \
                            "updated_at=? " \
                       "WHERE id=?")
        arglists.append((jobtracker.nowstr(), job_submit['job_id']))
        jobtracker.execute(queries, arglists)

        # Rolling back changes.
        db.rollback()
    except (database.DatabaseConnectionError, CornellFTP.CornellFTPTimeout,\
               upload.UploadDeadlockError, database.DatabaseDeadlockError), e:
        # Connection error while uploading. We will try again later.
        sys.stderr.write(str(e))
        sys.stderr.write(
            "\tRolling back DB transaction and will re-try later.\n")

        # Rolling back changes.
        db.rollback()
Ejemplo n.º 6
0
def upload_results(dir):

    try:
        db = database.Database('default', autocommit=False)

        if not os.path.exists(dir) or not os.listdir(dir):
            errormsg = 'ERROR: Results directory, %s, does not exist or is empty' % dir
            raise upload.UploadNonFatalError(errormsg)

        pdm_dir = os.path.join(
            dir, "zerodm") if config.upload.upload_zerodm_periodicity else dir
        sp_dir = os.path.join(
            dir, "zerodm") if config.upload.upload_zerodm_singlepulse else dir
        fitsfiles = glob.glob(os.path.join(dir, "*.fits"))
        data = datafile.autogen_dataobj(fitsfiles)
        version_number = JobUploader.get_version_number(dir)

        hdr = header.get_header(fitsfiles)
        print "\tHeader parsed."

        rat_inst_id_cache = ratings2.utils.RatingInstanceIDCache(
            dbname='common3')  #!!!!
        #rat_inst_id_cache = ratings2.utils.RatingInstanceIDCache(dbname='MichellePalfaCands')
        #cands, tempdir = candidates.get_candidates(version_number, dir)
        cands, tempdir = candidates.get_candidates(version_number, pdm_dir, \
                                                   timestamp_mjd=data.timestamp_mjd, \
                                                   inst_cache=rat_inst_id_cache)
        print "\tPeriodicity candidates parsed."
        #sp_cands = sp_candidates.get_spcandidates(version_number, dir)
        sp_cands, tempdir_sp = sp_candidates.get_spcandidates(version_number, sp_dir, \
                                                              timestamp_mjd=data.timestamp_mjd, \
                                                              inst_cache=rat_inst_id_cache)
        print "\tSingle pulse candidates parsed."

        for c in (cands + sp_cands):
            hdr.add_dependent(c)
        diags = diagnostics.get_diagnostics(data.obs_name,
                 data.beam_id, \
                 data.obstype, \
                 version_number, \
                 pdm_dir, sp_dir)
        print "\tDiagnostics parsed."

        header_id = hdr.upload(db)
        for d in diags:
            d.upload(db)
        print "\tEverything uploaded and checked successfully. header_id=%d" % \
             header_id

    except (upload.UploadNonFatalError):
        exceptionmsgs = traceback.format_exception(*sys.exc_info())
        errormsg = "Error while checking results!\n"
        errormsg += "\tResults Dir: %s\n\n" % dir
        errormsg += "".join(exceptionmsgs)

        sys.stderr.write("Error while checking results!\n")
        sys.stderr.write("Database transaction will not be committed.\n")
        sys.stderr.write("\t%s" % exceptionmsgs[-1])

        # Rolling back changes.
        db.rollback()

    except (database.DatabaseConnectionError, CornellFTP.CornellFTPTimeout,\
               upload.UploadDeadlockError, database.DatabaseDeadlockError), e:
        # Connection error while uploading. We will try again later.
        sys.stderr.write(str(e))
        sys.stderr.write(
            "\tRolling back DB transaction and will re-try later.\n")

        # Rolling back changes.
        db.rollback()
Ejemplo n.º 7
0
        if debug:
            pretty_print(output)


        #continue

        try:
            os.mkdir('output/%s_tps' % (settings.get('mefformset')))
            os.mkdir('output/%s' % (settings.get('mefformset')))
        except Exception:
            pass

        alist=[]
        with open('output/%s_tps/%s_output.xml' % (settings.get('mefformset'),mefform), 'w') as f:
            f.write(get_header(settings.get("mefformset"), mefform))
            for each in output.getchildren():
                if 'tps' in each.attrib:
                    alist.append(each.attrib['tps'].split('.')[-1])
                f.write('\t' + etree.tostring(each) + '\n')
            for ptfield in ptform_xml.xpath('//field[field_attributes/cid_mapping]'):
                if ptfield.attrib['id'] in alist:
                    continue
                altid = (ptfield.xpath("./field_attributes/image/refined_image/alternate_field_id/@value") or [''])[0]
                cid =  (ptfield.xpath("./field_attributes/cid_mapping/formml_field_id/@value") or [''])[0]
                f.write("\t<tps id='{0}'  type='{1}' {2} {3}/>\n".format(ptfield.attrib['id'],
                                                                            (ptfield.xpath("./field_attributes/main/type/@value") or [''])[0],
                                                                            "" if not altid else "altid='%s' " % altid.upper(),
                                                                            "" if not cid else "cid='%s' " % cid
                                                                            ))
Ejemplo n.º 8
0
def upload_results(job_submit):
    """
    Uploads Results for a given submit.

        Input:
            job_submit: A row from the job_submits table.
                Results from this job submission will be
                uploaded.

        Output:
            None
    """
    print "Attempting to upload results"

    print "\tJob ID: %d, Job submission ID: %d\n\tOutput Dir: %s" % \
            (job_submit['job_id'], job_submit['id'], job_submit['output_dir'])

    if debug.UPLOAD:
        upload.upload_timing_summary = {}
        starttime = time.time()
    try:
        # Connect to the DB
        db = database.Database('default', autocommit=False)

        # Prepare for upload
        dir = job_submit['output_dir']

        # NEW Beluga - Untar the tarball
        import tarfile
        to_keep = os.listdir(job_submit['output_dir'])
        tarball = glob.glob(job_submit['output_dir'] + '/*00.tgz')[0]
        tar = tarfile.open(tarball, 'r:gz')
        tar.extractall(path=job_submit['output_dir'])
        tar.close()

        all_files = os.listdir(job_submit['output_dir'])
        to_del = set(all_files) - set(to_keep)

        if config.upload.upload_zerodm_periodicity or config.upload.upload_zerodm_singlepulse:
            to_keep_zerodm = os.listdir(job_submit['output_dir'] + '/zerodm')
            tarball = glob.glob(job_submit['output_dir'] +
                                '/zerodm/*zerodm.tgz')[0]
            tar = tarfile.open(tarball, 'r:gz')
            tar.extractall(path=job_submit['output_dir'] + '/zerodm')
            tar.close()
            all_files_zerodm = os.listdir(job_submit['output_dir'] + '/zerodm')
            to_del_zerodm = set(all_files_zerodm) - set(to_keep_zerodm)

        pdm_dir = os.path.join(
            dir, "zerodm") if config.upload.upload_zerodm_periodicity else dir
        sp_dir = os.path.join(
            dir, "zerodm") if config.upload.upload_zerodm_singlepulse else dir

        if not os.path.exists(dir) or not os.listdir(dir):
            errormsg = 'ERROR: Results directory, %s, does not exist or is empty for job_id=%d' %\
                       (dir, job_submit['job_id'])
            raise upload.UploadNonFatalError(errormsg)
        elif len(os.listdir(dir)) == 1 and os.listdir(dir)[0] == 'zerodm' \
                                       and not os.listdir(os.path.join(dir,os.listdir(dir)[0])):
            errormsg = 'ERROR: Results directory, %s, does not exist or is empty for job_id=%d' %\
                       (dir, job_submit['job_id'])
            raise upload.UploadNonFatalError(errormsg)

        fitsfiles = get_fitsfiles(job_submit)
        try:
            data = datafile.autogen_dataobj(fitsfiles)
        except ValueError:
            raise upload.UploadNonFatalError
        version_number = get_version_number(dir)

        if debug.UPLOAD:
            parsetime = time.time()
        # Upload results
        hdr = header.get_header(fitsfiles)

        print "\tHeader parsed."

        rat_inst_id_cache = ratings2.utils.RatingInstanceIDCache(
            dbname='common3')

        cands, tempdir = candidates.get_candidates(version_number, pdm_dir, \
                                                   timestamp_mjd=data.timestamp_mjd, \
                                                   inst_cache=rat_inst_id_cache)
        print "\tPeriodicity candidates parsed. (%d cands)" % len(cands)
        sp_cands, tempdir_sp = sp_candidates.get_spcandidates(version_number, sp_dir, \
                                                              timestamp_mjd=data.timestamp_mjd, \
                                                              inst_cache=rat_inst_id_cache)
        print "\tSingle pulse candidates parsed. (%d cands)" % len(sp_cands)

        diags = diagnostics.get_diagnostics(data.obs_name,
                                             data.beam_id, \
                                             data.obstype, \
                                             version_number, \
                                             pdm_dir, sp_dir)
        print "\tDiagnostics parsed."

        for c in (cands + sp_cands):
            hdr.add_dependent(c)

        if debug.UPLOAD:
            upload.upload_timing_summary['Parsing'] = \
                upload.upload_timing_summary.setdefault('Parsing', 0) + \
                (time.time()-parsetime)

        # Perform the upload
        header_id = hdr.upload(db)
        print "Header ID: ", header_id
        for d in diags:
            d.upload(db)
        print "\tDB upload completed and checked successfully. header_id=%d" % \
                    header_id

    except (upload.UploadNonFatalError):
        # Parsing error caught. Job attempt has failed!
        exceptionmsgs = traceback.format_exception(*sys.exc_info())
        errormsg = "Error while checking results!\n"
        errormsg += "\tJob ID: %d, Job submit ID: %d\n\n" % \
                        (job_submit['job_id'], job_submit['id'])
        errormsg += "".join(exceptionmsgs)

        sys.stderr.write("Error while checking results!\n")
        sys.stderr.write("Database transaction will not be committed.\n")
        sys.stderr.write("\t%s" % exceptionmsgs[-1])

        queries = []
        arglists = []
        queries.append("UPDATE job_submits " \
                       "SET status='upload_failed', " \
                            "details=?, " \
                            "updated_at=? " \
                       "WHERE id=?")
        arglists.append((errormsg, jobtracker.nowstr(), job_submit['id']))
        queries.append("UPDATE jobs " \
                       "SET status='failed', " \
                            "details='Error while uploading results', " \
                            "updated_at=? " \
                       "WHERE id=?")
        arglists.append((jobtracker.nowstr(), job_submit['job_id']))
        jobtracker.execute(queries, arglists)

        # Rolling back changes.
        db.rollback()
    except (database.DatabaseConnectionError, ratings2.database.DatabaseConnectionError,\
               CornellFTP.CornellFTPTimeout, upload.UploadDeadlockError,\
               database.DatabaseDeadlockError), e:
        # Connection error while uploading. We will try again later.
        sys.stderr.write(str(e))
        sys.stderr.write(
            "\tRolling back DB transaction and will re-try later.\n")

        # Rolling back changes.
        db.rollback()
Ejemplo n.º 9
0
import os
import sys
import header
import preprocessor

for i in range(1, len(sys.argv)):
    if "".join(list(reversed(sys.argv[1]))[:2]) == "c." and os.path.isfile(
            sys.argv[1]):
        f = open(sys.argv[i])
        content = f.read().split("\n")
        res = header.get_header(sys.argv[i], i)

        res += preprocessor.format_preprocessor(content)
        print(sys.argv[i][:-2])

        output = open(sys.argv[i][:-2] + "_normed.c", "w+")
        for i in res:
            output.write(i + "\n")
        print("\n".join(res))
    else:
        print(sys.argv[1])
        print("Warning: Not a valid file")
Ejemplo n.º 10
0
        name = x[4]
        try:
            value = x[6]
        except IndexError:
            value = x[5]
        # path = x[2]
        cookie[name] = value
    return cookie

d = {"sessionid": "f40247e2d924e43a7dab6b56",
                       "currency": 3,
                       "subtotal": 1,
                       "fee": 2,
                       "total": 3,
                       "quantity": 1}
url = 'https://steamcommunity.com/market/buylisting/726564084060013124'
h = headers = {"Accept": "text/javascript, text/html, application/xml, text/xml, */*",
               "Accept-Encoding": "gzip,deflate,sdch",
               "Accept-Language": "en-US,en;q=0.8",
               "Host": "steamcommunity.com",
               "Referer": "http://steamcommunity.com/market/listings/730/P250%20|%20Boreal%20Forest%20%28Field-Tested%29",
               "Origin": "http://steamcommunity.com",
               "User-Agent": "Mozilla/5.0 (Windows NT 10.0; WOW64; rv:40.0) Gecko/20100101 Firefox/40.0"
               }

r = requests.post(url, data=d, headers=get_header("http://steamcommunity.com/market/listings/730/P250%20|"
                                                  "%20Boreal%20Forest%20%28Field-Tested%29"), cookies=cookie_cutter())

print(r.status_code)
print(r.text)