def upload_results(dir):
  
    try:
	db = database.Database('default', autocommit=False)

	if not os.path.exists(dir) or not os.listdir(dir):
	    errormsg = 'ERROR: Results directory, %s, does not exist or is empty' % dir
            raise upload.UploadNonFatalError(errormsg)

	
	fitsfiles = glob.glob(os.path.join(dir, "*.fits"))
	data = datafile.autogen_dataobj(fitsfiles)
	version_number = JobUploader.get_version_number(dir)

	hdr = header.get_header(fitsfiles)
	print "\tHeader parsed."

	cands, tempdir = candidates.get_candidates(version_number, dir)
	print "\tPeriodicity candidates parsed."
	sp_cands = sp_candidates.get_spcandidates(version_number, dir)
	print "\tSingle pulse candidates parsed."

	for c in (cands + sp_cands):
	    hdr.add_dependent(c)
	diags = diagnostics.get_diagnostics(data.obs_name, 
					     data.beam_id, \
					     data.obstype, \
					     version_number, \
					     dir)
	print "\tDiagnostics parsed."

	header_id = hdr.upload(db)
	for d in diags:
	    d.upload(db)
	print "\tEverything uploaded and checked successfully. header_id=%d" % \
		    header_id

    except (upload.UploadNonFatalError):
        exceptionmsgs = traceback.format_exception(*sys.exc_info())
        errormsg  = "Error while checking results!\n"
        errormsg += "\tResults Dir: %s\n\n" % dir
        errormsg += "".join(exceptionmsgs)

        sys.stderr.write("Error while checking results!\n")
        sys.stderr.write("Database transaction will not be committed.\n")
        sys.stderr.write("\t%s" % exceptionmsgs[-1])

        # Rolling back changes. 
        db.rollback()

    except (database.DatabaseConnectionError, CornellFTP.CornellFTPTimeout,\
               upload.UploadDeadlockError, database.DatabaseDeadlockError), e:
        # Connection error while uploading. We will try again later.
        sys.stderr.write(str(e))
        sys.stderr.write("\tRolling back DB transaction and will re-try later.\n")
        
        # Rolling back changes. 
        db.rollback()
Example #2
0
def upload_results(job_submit):
    """
    Uploads Results for a given submit.

        Input:
            job_submit: A row from the job_submits table.
                Results from this job submission will be
                uploaded.

        Output:
            None
    """
    print "Attempting to upload results"
    print "\tJob ID: %d, Job submission ID: %d" % \
            (job_submit['job_id'], job_submit['id'])
    if debug.UPLOAD:
        upload.upload_timing_summary = {}
        starttime = time.time()
    try:
        # Connect to the DB
        db = database.Database('SPAN512', autocommit=False)
        # Prepare for upload
        dir = job_submit['output_dir']
        if not os.path.exists(dir) or not os.listdir(dir):
            errormsg = 'ERROR: Results directory, %s, does not exist or is empty for job_id=%d' %\
                       (dir, job_submit['job_id'])
            raise upload.UploadNonFatalError(errormsg)

        fitsfiles = get_fitsfiles(job_submit)
        data = datafile.autogen_dataobj(fitsfiles)
        version_number = get_version_number(dir)

        if debug.UPLOAD: 
            parsetime = time.time()
        # Upload results
        hdr = header.get_header(fitsfiles)
        
        print "\tHeader parsed."

        rat_inst_id_cache = ratings2.utils.RatingInstanceIDCache(dbname='nancay')
        cands, tempdir = candidates.get_candidates(version_number, dir, \
                                                   timestamp_mjd=data.timestamp_mjd, \
                                                   inst_cache=rat_inst_id_cache)
        print "\tPeriodicity candidates parsed."
        sp_cands = sp_candidates.get_spcandidates(version_number, dir, \
                                                  timestamp_mjd=data.timestamp_mjd)
        print "\tSingle pulse candidates parsed."

        for c in (cands + sp_cands):
            hdr.add_dependent(c)
        diags = diagnostics.get_diagnostics(data.obs_name, 
                                             data.beam_id, \
                                             data.obstype, \
                                             version_number, \
                                             dir)
        print "\tDiagnostics parsed."
        
        if debug.UPLOAD: 
            upload.upload_timing_summary['Parsing'] = \
                upload.upload_timing_summary.setdefault('Parsing', 0) + \
                (time.time()-parsetime)

        # Perform the upload
        header_id = hdr.upload(db)
        for d in diags:
            d.upload(db)
        print "\tDB upload completed and checked successfully. header_id=%d" % \
                    header_id


    except (upload.UploadNonFatalError):
        # Parsing error caught. Job attempt has failed!
        exceptionmsgs = traceback.format_exception(*sys.exc_info())
        errormsg  = "Error while checking results!\n"
        errormsg += "\tJob ID: %d, Job submit ID: %d\n\n" % \
                        (job_submit['job_id'], job_submit['id'])
        errormsg += "".join(exceptionmsgs)
        
        sys.stderr.write("Error while checking results!\n")
        sys.stderr.write("Database transaction will not be committed.\n")
        sys.stderr.write("\t%s" % exceptionmsgs[-1])

        queries = []
        arglists = []
        queries.append("UPDATE job_submits " \
                       "SET status='upload_failed', " \
                            "details=\"%s\", " \
                            "updated_at='%s' " \
                       "WHERE id=%d"%(errormsg.replace("\"","\'"), jobtracker.nowstr(), job_submit['id']))
        queries.append("UPDATE jobs " \
                       "SET status='failed', " \
                            "details='Error while uploading results', " \
                            "updated_at='%s' " \
                       "WHERE id=%d"%(jobtracker.nowstr(), job_submit['job_id']))
        jobtracker.query(queries)
        
        # Rolling back changes. 
        db.rollback()
    except (database.DatabaseConnectionError, \
               upload.UploadDeadlockError, database.DatabaseDeadlockError), e:
        # Connection error while uploading. We will try again later.
        sys.stderr.write(str(e))
        sys.stderr.write("\tRolling back DB transaction and will re-try later.\n")
        
        # Rolling back changes. 
        db.rollback()
Example #3
0
def upload_results(job_submit):
    """
    Uploads Results for a given submit.

        Input:
            job_submit: A row from the job_submits table.
                Results from this job submission will be
                uploaded.

        Output:
            None
    """
    print "Attempting to upload results"
    print "\tJob ID: %d, Job submission ID: %d" % \
            (job_submit['job_id'], job_submit['id'])
    if debug.UPLOAD:
        upload.upload_timing_summary = {}
        starttime = time.time()
    try:
        # Connect to the DB
        db = database.Database('default', autocommit=False)
        # Prepare for upload
        dir = job_submit['output_dir']
        if not os.path.exists(dir) or not os.listdir(dir):
            errormsg = 'ERROR: Results directory, %s, does not exist or is empty for job_id=%d' %\
                       (dir, job_submit['job_id'])
            raise upload.UploadNonFatalError(errormsg)

        fitsfiles = get_fitsfiles(job_submit)
        data = datafile.autogen_dataobj(fitsfiles)
        version_number = get_version_number(dir)

        if debug.UPLOAD:
            parsetime = time.time()
        # Upload results
        hdr = header.get_header(fitsfiles)

        print "\tHeader parsed."

        cands = candidates.get_candidates(version_number, dir)
        print "\tPeriodicity candidates parsed."
        sp_cands = sp_candidates.get_spcandidates(version_number, dir)
        print "\tSingle pulse candidates parsed."

        for c in (cands + sp_cands):
            hdr.add_dependent(c)
        diags = diagnostics.get_diagnostics(data.obs_name,
                                             data.beam_id, \
                                             data.obstype, \
                                             version_number, \
                                             dir)
        print "\tDiagnostics parsed."

        if debug.UPLOAD:
            upload.upload_timing_summary['Parsing'] = \
                upload.upload_timing_summary.setdefault('Parsing', 0) + \
                (time.time()-parsetime)

        # Perform the upload
        header_id = hdr.upload(db)
        for d in diags:
            d.upload(db)
        print "\tEverything uploaded and checked successfully. header_id=%d" % \
                    header_id
    except (upload.UploadNonFatalError):
        # Parsing error caught. Job attempt has failed!
        exceptionmsgs = traceback.format_exception(*sys.exc_info())
        errormsg = "Error while checking results!\n"
        errormsg += "\tJob ID: %d, Job submit ID: %d\n\n" % \
                        (job_submit['job_id'], job_submit['id'])
        errormsg += "".join(exceptionmsgs)

        sys.stderr.write("Error while checking results!\n")
        sys.stderr.write("Database transaction will not be committed.\n")
        sys.stderr.write("\t%s" % exceptionmsgs[-1])

        queries = []
        arglists = []
        queries.append("UPDATE job_submits " \
                       "SET status='upload_failed', " \
                            "details=?, " \
                            "updated_at=? " \
                       "WHERE id=?")
        arglists.append((errormsg, jobtracker.nowstr(), job_submit['id']))
        queries.append("UPDATE jobs " \
                       "SET status='failed', " \
                            "details='Error while uploading results', " \
                            "updated_at=? " \
                       "WHERE id=?")
        arglists.append((jobtracker.nowstr(), job_submit['job_id']))
        jobtracker.execute(queries, arglists)

        # Rolling back changes.
        db.rollback()
    except (database.DatabaseConnectionError, CornellFTP.CornellFTPTimeout,\
               upload.UploadDeadlockError, database.DatabaseDeadlockError), e:
        # Connection error while uploading. We will try again later.
        sys.stderr.write(str(e))
        sys.stderr.write(
            "\tRolling back DB transaction and will re-try later.\n")

        # Rolling back changes.
        db.rollback()
Example #4
0
                    required=False,
                    default='Тестовая база.xlsx')
args = parser.parse_args()
basepath = args.path

# получаем данные из апи
try:
    api = ClientApi(args.token)
    vacancies = api.get_opened_vacancies_list()
    statuses = api.get_statuses_list()
except Exception:
    logging.error("Cant get data from api")
    exit(1)

# получаем список претендентов, которых необходимо обработать и добавляем нужные поля
candidates = get_candidates(args.path,
                            args.filename)  # TODO прикрутить норм интерфейс
if candidates is None:
    logging.error("Cant get candidates from db")
    exit(1)
candidates = add_vacancy_id(candidates, vacancies)
if candidates is None:
    logging.error("Cant add vacancy_id")
    exit(1)
candidates = add_status_id(candidates, statuses)
if candidates is None:
    logging.error("Cant add vacancy_id")
    exit(1)

with open(path.join(basepath, IMPORT_PROGRESS_FILE), 'w+') as file:
    for candidate in candidates:
        cv_data = api.upload_resume(candidate['cv'])['data']
def upload_results(dir):

    try:
        db = database.Database('default', autocommit=False)

        if not os.path.exists(dir) or not os.listdir(dir):
            errormsg = 'ERROR: Results directory, %s, does not exist or is empty' % dir
            raise upload.UploadNonFatalError(errormsg)

        pdm_dir = os.path.join(
            dir, "zerodm") if config.upload.upload_zerodm_periodicity else dir
        sp_dir = os.path.join(
            dir, "zerodm") if config.upload.upload_zerodm_singlepulse else dir
        fitsfiles = glob.glob(os.path.join(dir, "*.fits"))
        data = datafile.autogen_dataobj(fitsfiles)
        version_number = JobUploader.get_version_number(dir)

        hdr = header.get_header(fitsfiles)
        print "\tHeader parsed."

        rat_inst_id_cache = ratings2.utils.RatingInstanceIDCache(
            dbname='common3')  #!!!!
        #rat_inst_id_cache = ratings2.utils.RatingInstanceIDCache(dbname='MichellePalfaCands')
        #cands, tempdir = candidates.get_candidates(version_number, dir)
        cands, tempdir = candidates.get_candidates(version_number, pdm_dir, \
                                                   timestamp_mjd=data.timestamp_mjd, \
                                                   inst_cache=rat_inst_id_cache)
        print "\tPeriodicity candidates parsed."
        #sp_cands = sp_candidates.get_spcandidates(version_number, dir)
        sp_cands, tempdir_sp = sp_candidates.get_spcandidates(version_number, sp_dir, \
                                                              timestamp_mjd=data.timestamp_mjd, \
                                                              inst_cache=rat_inst_id_cache)
        print "\tSingle pulse candidates parsed."

        for c in (cands + sp_cands):
            hdr.add_dependent(c)
        diags = diagnostics.get_diagnostics(data.obs_name,
                 data.beam_id, \
                 data.obstype, \
                 version_number, \
                 pdm_dir, sp_dir)
        print "\tDiagnostics parsed."

        header_id = hdr.upload(db)
        for d in diags:
            d.upload(db)
        print "\tEverything uploaded and checked successfully. header_id=%d" % \
             header_id

    except (upload.UploadNonFatalError):
        exceptionmsgs = traceback.format_exception(*sys.exc_info())
        errormsg = "Error while checking results!\n"
        errormsg += "\tResults Dir: %s\n\n" % dir
        errormsg += "".join(exceptionmsgs)

        sys.stderr.write("Error while checking results!\n")
        sys.stderr.write("Database transaction will not be committed.\n")
        sys.stderr.write("\t%s" % exceptionmsgs[-1])

        # Rolling back changes.
        db.rollback()

    except (database.DatabaseConnectionError, CornellFTP.CornellFTPTimeout,\
               upload.UploadDeadlockError, database.DatabaseDeadlockError), e:
        # Connection error while uploading. We will try again later.
        sys.stderr.write(str(e))
        sys.stderr.write(
            "\tRolling back DB transaction and will re-try later.\n")

        # Rolling back changes.
        db.rollback()
Example #6
0
def upload_results(job_submit):
    """
    Uploads Results for a given submit.

        Input:
            job_submit: A row from the job_submits table.
                Results from this job submission will be
                uploaded.

        Output:
            None
    """
    print "Attempting to upload results"

    print "\tJob ID: %d, Job submission ID: %d\n\tOutput Dir: %s" % \
            (job_submit['job_id'], job_submit['id'], job_submit['output_dir'])

    if debug.UPLOAD:
        upload.upload_timing_summary = {}
        starttime = time.time()
    try:
        # Connect to the DB
        db = database.Database('default', autocommit=False)

        # Prepare for upload
        dir = job_submit['output_dir']

        # NEW Beluga - Untar the tarball
        import tarfile
        to_keep = os.listdir(job_submit['output_dir'])
        tarball = glob.glob(job_submit['output_dir'] + '/*00.tgz')[0]
        tar = tarfile.open(tarball, 'r:gz')
        tar.extractall(path=job_submit['output_dir'])
        tar.close()

        all_files = os.listdir(job_submit['output_dir'])
        to_del = set(all_files) - set(to_keep)

        if config.upload.upload_zerodm_periodicity or config.upload.upload_zerodm_singlepulse:
            to_keep_zerodm = os.listdir(job_submit['output_dir'] + '/zerodm')
            tarball = glob.glob(job_submit['output_dir'] +
                                '/zerodm/*zerodm.tgz')[0]
            tar = tarfile.open(tarball, 'r:gz')
            tar.extractall(path=job_submit['output_dir'] + '/zerodm')
            tar.close()
            all_files_zerodm = os.listdir(job_submit['output_dir'] + '/zerodm')
            to_del_zerodm = set(all_files_zerodm) - set(to_keep_zerodm)

        pdm_dir = os.path.join(
            dir, "zerodm") if config.upload.upload_zerodm_periodicity else dir
        sp_dir = os.path.join(
            dir, "zerodm") if config.upload.upload_zerodm_singlepulse else dir

        if not os.path.exists(dir) or not os.listdir(dir):
            errormsg = 'ERROR: Results directory, %s, does not exist or is empty for job_id=%d' %\
                       (dir, job_submit['job_id'])
            raise upload.UploadNonFatalError(errormsg)
        elif len(os.listdir(dir)) == 1 and os.listdir(dir)[0] == 'zerodm' \
                                       and not os.listdir(os.path.join(dir,os.listdir(dir)[0])):
            errormsg = 'ERROR: Results directory, %s, does not exist or is empty for job_id=%d' %\
                       (dir, job_submit['job_id'])
            raise upload.UploadNonFatalError(errormsg)

        fitsfiles = get_fitsfiles(job_submit)
        try:
            data = datafile.autogen_dataobj(fitsfiles)
        except ValueError:
            raise upload.UploadNonFatalError
        version_number = get_version_number(dir)

        if debug.UPLOAD:
            parsetime = time.time()
        # Upload results
        hdr = header.get_header(fitsfiles)

        print "\tHeader parsed."

        rat_inst_id_cache = ratings2.utils.RatingInstanceIDCache(
            dbname='common3')

        cands, tempdir = candidates.get_candidates(version_number, pdm_dir, \
                                                   timestamp_mjd=data.timestamp_mjd, \
                                                   inst_cache=rat_inst_id_cache)
        print "\tPeriodicity candidates parsed. (%d cands)" % len(cands)
        sp_cands, tempdir_sp = sp_candidates.get_spcandidates(version_number, sp_dir, \
                                                              timestamp_mjd=data.timestamp_mjd, \
                                                              inst_cache=rat_inst_id_cache)
        print "\tSingle pulse candidates parsed. (%d cands)" % len(sp_cands)

        diags = diagnostics.get_diagnostics(data.obs_name,
                                             data.beam_id, \
                                             data.obstype, \
                                             version_number, \
                                             pdm_dir, sp_dir)
        print "\tDiagnostics parsed."

        for c in (cands + sp_cands):
            hdr.add_dependent(c)

        if debug.UPLOAD:
            upload.upload_timing_summary['Parsing'] = \
                upload.upload_timing_summary.setdefault('Parsing', 0) + \
                (time.time()-parsetime)

        # Perform the upload
        header_id = hdr.upload(db)
        print "Header ID: ", header_id
        for d in diags:
            d.upload(db)
        print "\tDB upload completed and checked successfully. header_id=%d" % \
                    header_id

    except (upload.UploadNonFatalError):
        # Parsing error caught. Job attempt has failed!
        exceptionmsgs = traceback.format_exception(*sys.exc_info())
        errormsg = "Error while checking results!\n"
        errormsg += "\tJob ID: %d, Job submit ID: %d\n\n" % \
                        (job_submit['job_id'], job_submit['id'])
        errormsg += "".join(exceptionmsgs)

        sys.stderr.write("Error while checking results!\n")
        sys.stderr.write("Database transaction will not be committed.\n")
        sys.stderr.write("\t%s" % exceptionmsgs[-1])

        queries = []
        arglists = []
        queries.append("UPDATE job_submits " \
                       "SET status='upload_failed', " \
                            "details=?, " \
                            "updated_at=? " \
                       "WHERE id=?")
        arglists.append((errormsg, jobtracker.nowstr(), job_submit['id']))
        queries.append("UPDATE jobs " \
                       "SET status='failed', " \
                            "details='Error while uploading results', " \
                            "updated_at=? " \
                       "WHERE id=?")
        arglists.append((jobtracker.nowstr(), job_submit['job_id']))
        jobtracker.execute(queries, arglists)

        # Rolling back changes.
        db.rollback()
    except (database.DatabaseConnectionError, ratings2.database.DatabaseConnectionError,\
               CornellFTP.CornellFTPTimeout, upload.UploadDeadlockError,\
               database.DatabaseDeadlockError), e:
        # Connection error while uploading. We will try again later.
        sys.stderr.write(str(e))
        sys.stderr.write(
            "\tRolling back DB transaction and will re-try later.\n")

        # Rolling back changes.
        db.rollback()