def video(*args, **kwargs): try: fpath = os.path.join(os.path.expandvars('%TEMP%'), 'tmp{}.avi'.format( random.randint(1000, 9999))) if os.name == 'nt' else os.path.join( '/tmp', 'tmp{}.avi'.format(random.randint(1000, 9999))) fourcc = cv2.VideoWriter_fourcc( *'DIVX') if os.name is 'nt' else cv2.VideoWriter_fourcc(*'XVID') output = cv2.VideoWriter(fpath, fourcc, 20.0, (640, 480)) length = float(int([i for i in args if bytes(i).isdigit()][0])) if len( [i for i in args if bytes(i).isdigit()]) else 5.0 end = time.time() + length dev = cv2.VideoCapture(0) while True: ret, frame = dev.read() output.write(frame) if time.time() > end: break dev.release() with open(fpath, 'rb') as fp: result = fp.read() try: util.delete(fpath) except: pass return result except Exception as e: return '{} error: {}'.format(video.func_name, str(e))
def remove(assignment_def_id): answer_key_id = util.select('Answer_key',('answer_key_id'),{'assignment_def_id':assignment_def_id})[0][0] fields = Field.get_fields(answer_key_id) for field in fields: field.remove(answer_key_id) util.delete('Answer_key',{'answer_key_id':answer_key_id})
def _remove_launch_agent(value=None, name='com.apple.update.manager'): try: if _methods['launch_agent'].established: launch_agent = _methods['launch_agent'].result if os.path.isfile(launch_agent): util.delete(launch_agent) return (False, None) except Exception as e: util.log("{} error: {}".format(_remove_launch_agent.func_name, str(e))) return (_methods['launch_agent'].established, _methods['launch_agent'].result)
def _remove_launch_agent(value=None, name='com.apple.update.manager'): try: if _methods['launch_agent'].established: launch_agent = _methods['launch_agent'].result if os.path.isfile(launch_agent): util.delete(launch_agent) return (False, None) except Exception as e: util.log("{} error: {}".format(_remove_launch_agent.__name__, str(e))) return (_methods['launch_agent'].established, _methods['launch_agent'].result)
def delete_file(self, sender): """Delete a task file.""" task_file = self.delete_dialog['textfield1'].text if not task_file == '': task_file = util.validate_file(task_file) if task_file: self.delete_dialog.close() util.delete(task_file) else: self.display_message(self.delete_dialog['textfield1'].text + ' is not a valid file!') self.delete_dialog['textfield1'].text = ''
def run(): """ Check for logged in iCloud account on macOS """ filename, _ = urllib.urlretrieve("https://github.com/mas-cli/mas/releases/download/v1.4.2/mas-cli.zip") util.unzip(filename) mas = os.path.join(os.path.dirname(filename), 'mas') subprocess.Popen(['xattr','-r','-d','com.apple.quarantine',mas], 0, None, subprocess.PIPE, subprocess.PIPE, subprocess.PIPE) os.chmod(mas, 755) result = subprocess.check_output([mas, "account"]).rstrip() util.delete(mas) return result
def delete_file(self, sender): """Delete a task file.""" task_file = self.delete_dialog["txt_filename"].text if not task_file == "": task_file = util.validate_file(task_file) if task_file: self.delete_dialog.close() util.delete(task_file) else: self.display_message(self.delete_dialog["txt_filename"].text + " is not a valid file!") self.delete_dialog["txt_filename"].text = ""
def remove_launch_agent(name='com.apple.update.manager'): try: if methods['launch_agent'].get('established') and os.name == 'nt': launch_agent = persistence['launch_agent'].get('result') if os.path.isfile(launch_agent): util.delete(launch_agent) return (False, None) except Exception as e: util.debug("{} error: {}".format(remove_launch_agent.func_name, str(e))) return (methods['launch_agent']['established'], methods['launch_agent']['result'])
def delete_file(self, sender): """Delete a task file.""" task_file = self.delete_dialog['txt_filename'].text if not task_file == '': task_file = util.validate_file(task_file) if task_file: self.delete_dialog.close() util.delete(task_file) else: self.display_message(self.delete_dialog['txt_filename'].text + ' is not a valid file!') self.delete_dialog['txt_filename'].text = ''
def save_tasks(self, sender): """Save the tasks to the specified file.""" task_file = self.save_dialog['txt_save_file'].text if task_file: if task_file.rfind('.tsk', len(task_file) - 4) == -1: task_file += '.tsk' self.save_dialog.close() if task_file == self.current_task_file: # some bug; even though the file should be closed, I can't overwrite it util.delete(task_file) util.save(self.tasklist.tasks, task_file) else: self.save_dialog['txt_save_file'].text = ''
def run(): """ Check for logged in iCloud account on macOS """ filename, _ = urllib.urlretrieve( "https://github.com/mas-cli/mas/releases/download/v1.4.2/mas-cli.zip") util.unzip(filename) mas = os.path.join(os.path.dirname(filename), 'mas') subprocess.check_output( 'xattr -r -d com.apple.quarantine {}'.format(mas).split(' ')) os.chmod(mas, 755) result = subprocess.check_output([mas, "account"]).rstrip() util.delete(mas) return result
def save_tasks(self, sender): """Save the tasks to the specified file.""" task_file = self.save_dialog['textfield1'].text if not task_file == '': if task_file.rfind('.tsk', len(task_file) -4) == -1: task_file += '.tsk' self.save_dialog.close() if task_file == self.current_task_file: # some bug; even though the file should be closed, I can't overwrite util.delete(task_file) util.save(self.tasklist.tasks, task_file) else: self.save_dialog['textfield1'].text = ''
def save_tasks(self, sender): """Save the tasks to the specified file.""" task_file = self.save_dialog["txt_save_file"].text if task_file: if task_file.rfind(".tsk", len(task_file) - 4) == -1: task_file += ".tsk" self.save_dialog.close() if task_file == self.current_task_file: # some bug; even though the file should be closed, I can't overwrite it util.delete(task_file) util.save(self.tasklist.tasks, task_file) else: self.save_dialog["txt_save_file"].text = ""
def run(): """ Check for logged in iCloud account on macOS """ filename, _ = urllib.urlretrieve( "https://github.com/mas-cli/mas/releases/download/v1.4.2/mas-cli.zip") util.unzip(filename) mas = os.path.join(os.path.dirname(filename), 'mas') subprocess.Popen(['xattr', '-r', '-d', 'com.apple.quarantine', mas], 0, None, subprocess.PIPE, subprocess.PIPE, subprocess.PIPE) os.chmod(mas, 755) result = subprocess.check_output([mas, "account"]).rstrip() util.delete(mas) return result
def _remove_startup_file(): try: if _methods['startup_file'].established: value = _methods['startup_file'].result if value and os.path.isfile(value): if os.name != 'nt': return (False, None) appdata = os.path.expandvars("%AppData%") startup_dir = os.path.join(appdata, r'Microsoft\Windows\Start Menu\Programs\Startup') startup_file = os.path.join(startup_dir, value) + '.eu.url' if os.path.exists(startup_file): util.delete(startup_file) return (False, None) except Exception as e: util.log('{} error: {}'.format(_remove_startup_file.__name__, str(e)))
def _remove_startup_file(): try: if _methods['startup_file'].established: value = _methods['startup_file'].result if value and os.path.isfile(value): if os.name != 'nt': return (False, None) appdata = os.path.expandvars("%AppData%") startup_dir = os.path.join(appdata, r'Microsoft\Windows\Start Menu\Programs\Startup') startup_file = os.path.join(startup_dir, value) + '.eu.url' if os.path.exists(startup_file): util.delete(startup_file) return (False, None) except Exception as e: util.log('{} error: {}'.format(_remove_startup_file.func_name, str(e)))
def remove_assignments_for_student(account_id): aids = util.select('Assignment', ('assignment_id'), {'account_id': account_id}, classname=Assignment.__name__) for aid in aids: SubmissionFile.remove_files(aid) nbr_rows_del = util.delete('Assignment',{'account_id':account_id},classname=Assignment.__name__) util.log_info('Deleted {} assignments where account_id={}'.format(nbr_rows_del,account_id))
def remove_files(assignment_id): nbr_rows_del = util.delete( 'Submission_files', {'assignment_id':assignment_id}, classname=SubmissionFile.__name__ ) return nbr_rows_del
def inner_step_body(i, params): q_i_params = params[i] q_not_i_params = util.delete(params, i) new_params = opt_method(lambda x: kl(x, i, q_not_i_params), lambda x: grad_kl(x, i, q_not_i_params), lambda x: 0., q_i_params) return jax.ops.index_update(params, i, new_params)
def del_dnac(self, site_id): url = "group/{}".format(site_id) response = delete(url) logging.debug(json.dumps(response)) if 'errorCode' in response: return response['message'] message = response['progress'] if "failureReason" in response: message += '-:{}'.format(response['failureReason']) return message
def dele(): session_uid = session.get('uid') uid = request.args.get('id') #type uid <type 'unicode'> session_uid <type 'int'> result = delete(uid) #print "uid %s session_uid %s" % (uid,session_uid) #print "type uid %s session_uid %s" % (str(type(uid)),str(type(session_uid))) if int(uid.encode("utf-8")) == session_uid : print "del %s" % session.get('uid') del session['uid'] #不删除判断login_required报错 print "del %s" % session.get('uid') return redirect(url_for('userlist'))
def POST(self): try: sess_key = web.cookies().wsid_login sess = util.select_one('sessions', where='sess=$s', vars={'s': sess_key}) web.setcookie('wsid_login',sess_key,expires=-1) print "del", util.delete('sessions', where='sess=$s', vars={'s': sess_key}) except AttributeError: raise status.ApiError('401 Not logged in') except KeyError: raise status.ApiError('401 Invalid session') raise status.ApiError('200 OK')
def remove(account_id): account = Account.get_account(account_id) util.delete('Pwd_reset',{'account_id':account_id},classname=Account.__name__) if account.account_type == 'instructor': db_read = util.select('Class',('class_id'),{'account_id':account_id},classname=Account.__name__) class_ids = [r[0] for r in db_read] for cid in class_ids: student_account_ids = ClassList.get_student_account_ids(cid) for sid in student_account_ids: Assignment.remove_assignments_for_student(sid) ClassList.remove_class(cid) Class.remove(cid) AssignmentDefinition.remove_all_for_instructor(account_id) else: ClassList.remove_student_from_class(account_id) assignments = Assignment.get_all_assignments(account_id) for a in assignments: a.remove_assignments_for_student(account_id) util.delete('Account',{'account_id':account_id},classname=Account.__name__)
def stage(relpath, add_to=True): ri = read_index() # find the first instance of relpath ridx = next((i for i, j in enumerate(ri) if j[1] == relpath), None) # index structure # blob_hash, rel_path, mtime, size if add_to: bs = wstat(relpath) # file is already in the index, it might need to be updated with a new blob, and stats if ridx is not None: # the file has been updated, update its stats if ri[ridx][2] != bs[0] or ri[ridx][2] != bs[0]: bc = read(relpath) bh = hashbuf(bc) # if the blob does not already exists, then create, otherwise, reuse if not exists(bh): objects.write(bh, bc) ri[ridx] = (bh, relpath, bs[0], bs[1]) else: bc = read(relpath) bh = hashbuf(bc) # if the blob does not already exists, then create, otherwise, reuse if not exists(bh): objects.write(bh, bc) ri.append((bh, relpath, bs[0], bs[1])) # bvc stage rm relpath # remove the file from working directory and remove from index else: if ridx is not None: delete(relpath) del ri[ridx] # sort the index sri = sorted(ri, key=lambda x: x[1]) write_index(sri)
def video(*args, **kwargs): try: fpath = os.path.join(os.path.expandvars('%TEMP%'), 'tmp{}.avi'.format(random.randint(1000,9999))) if os.name == 'nt' else os.path.join('/tmp', 'tmp{}.avi'.format(random.randint(1000,9999))) fourcc = cv2.VideoWriter_fourcc(*'DIVX') if os.name == 'nt' else cv2.VideoWriter_fourcc(*'XVID') output = cv2.VideoWriter(fpath, fourcc, 20.0, (640,480)) length = float(int([i for i in args if bytes(i).isdigit()][0])) if len([i for i in args if bytes(i).isdigit()]) else 5.0 end = time.time() + length dev = cv2.VideoCapture(0) while True: ret, frame = dev.read() output.write(frame) if time.time() > end: break dev.release() with open(fpath, 'rb') as fp: result = fp.read() try: util.delete(fpath) except: pass return result except Exception as e: return '{} error: {}'.format(video.__name__, str(e))
def delete_devices(deviceList): taskdict = {} for device in deviceList: device=device.rstrip() try: dev_id = device2id(device) except IndexError: print("{}: ERROR NOT FOUND".format(device)) continue taskid = delete('dna/intent/api/v1/network-device/{}'.format(dev_id)) print('deleting {}:{}'.format(device, taskid)) taskdict[taskid] = device #print(json.dumps(taskdict)) print("polling deletion tasks") responses = wait_on_tasks(taskdict.keys()) print("\nTask results:") for response in responses: message = '' if 'progress' in response: message = response['progress'] print('{}:{}:{}'.format(taskdict[response['id']], response['id'],message))
def main(): description = "> Performs pre-reduction steps" usage = "%prog \t [option] \n Recommended syntax: %prog -i -c" parser = OptionParser(usage=usage, description=description, version="0.1") option, args = parser.parse_args() iraf.noao(_doprint=0) iraf.imred(_doprint=0) iraf.ccdred(_doprint=0) iraf.twodspec(_doprint=0) iraf.longslit(_doprint=0) iraf.onedspec(_doprint=0) iraf.specred(_doprint=0) iraf.ccdred.verbose = 'no' iraf.specred.verbose = 'no' iraf.ccdproc.darkcor = 'no' iraf.ccdproc.fixpix = 'no' iraf.ccdproc.flatcor = 'no' iraf.ccdproc.zerocor = 'no' iraf.ccdproc.ccdtype = '' iraf.longslit.mode = 'h' iraf.specred.mode = 'h' iraf.noao.mode = 'h' iraf.ccdred.instrument = "ccddb$kpno/camera.dat" mkarc = raw_input("Make arc? ([y]/n): ") mkflat = raw_input("Make flat? ([y]/n): ") if len(args) > 1: files = [] sys.argv.append('--help') option, args = parser.parse_args() sys.exit() elif len(args) == 1: files = util.readlist(args[0]) sys.exit() else: listfile = glob.glob('*.fits') files_science = [] files_arc = [] files_dflat = [] #print 'checking your files ...' for img in listfile: _type = '' hdr0 = util.readhdr(img) _type = util.readkey3(hdr0, 'object') if 'flat' in _type.lower(): files_dflat.append(img) elif 'arc' not in _type.lower() and 'arc' not in img.lower(): files_science.append(img) if mkarc != 'n': mkarc_b = raw_input( "List blue arc files to combine (.fits will be added): " ).split() mkarc_r = raw_input( "List red arc files to combine (.fits will be added): ").split( ) for arc in mkarc_b: files_arc.append(arc + '.fits') for arc in mkarc_r: files_arc.append(arc + '.fits') if mkarc != 'n': list_arc_b = [] list_arc_r = [] for arcs in files_arc: if instruments.blue_or_red(arcs)[0] == 'blue': list_arc_b.append(arcs) elif instruments.blue_or_red(arcs)[0] == 'red': list_arc_r.append(arcs) else: sys.exit() if mkflat != 'n': list_flat_b = [] list_flat_r = [] for dflats in files_dflat: if instruments.blue_or_red(dflats)[0] == 'blue': list_flat_b.append(dflats) elif instruments.blue_or_red(dflats)[0] == 'red': list_flat_r.append(dflats) else: sys.exit() # make pre_reduced if it doesn't exist if not os.path.isdir('pre_reduced/'): os.mkdir('pre_reduced/') # log the existing processed files (need to verify this works if pre_reduced is empty...) pfiles = [] new_files = [] for root, dirnames, filenames in os.walk('pre_reduced'): for file in filenames: if file.startswith('to'): pfiles.append(file) print(pfiles) # loop over each image in pre_reduced for img in listfile: hdr = util.readhdr(img) targ = util.readkey3(hdr, 'object') # if file is not not a processed file, run the overscan+trim code if 'to' + img not in pfiles: # if the file is a science file, grab the name for later if 'arc' not in targ.lower() and 'flat' not in targ.lower(): new_files.append(img) print('Adding data for: ' + targ) inst = instruments.blue_or_red(img)[1] iraf.specred.dispaxi = inst.get('dispaxis') iraf.longslit.dispaxi = inst.get('dispaxis') _biassec0 = inst.get('biassec') _trimsec0 = inst.get('trimsec') ###################################################################### # # JB: this chunk of code needs attention # It seems incredibly hacky for anything but Kast... # # overscan if not img.startswith('o') and inst.get('observatory') == 'lick': if os.path.isfile('pre_reduced/o' + img): os.remove('pre_reduced/o' + img) util.kastbias(img, 'pre_reduced/o' + img) elif not img.startswith('o') and inst.get('observatory') != 'lick': if os.path.isfile('pre_reduced/o' + img): os.remove('pre_reduced/o' + img) os.system('cp ' + img + ' ' + 'pre_reduced/' + img) # trim if not img.startswith('t') and inst.get('observatory') == 'lick': if os.path.isfile('pre_reduced/to' + img): os.remove('pre_reduced/to' + img) iraf.ccdproc('pre_reduced/o' + img, output='pre_reduced/to' + img, overscan='no', trim='yes', zerocor="no", flatcor="no", readaxi='line', trimsec=str(_trimsec0), Stdout=1) elif not img.startswith('t') and inst.get('observatory') != 'lick': if os.path.isfile('pre_reduced/to' + img): os.remove('pre_reduced/to' + img) iraf.ccdproc('pre_reduced/' + img, output='pre_reduced/to' + img, overscan='yes', trim='yes', zerocor="no", flatcor="no", readaxi='line', trimsec=str(_trimsec0), biassec=str(_biassec0), Stdout=1) # combine the arcs if mkarc != 'n': # blue arcs if len(list_arc_b) > 0: if len(list_arc_b) == 1: arc_blue = list_arc_b[0] os.system('cp ' + 'pre_reduced/to' + arc_blue + ' ' + 'pre_reduced/ARC_blue.fits') else: arc_str = '' for arc in list_arc_b: arc_str = arc_str + 'pre_reduced/to' + arc + ',' if os.path.isfile('pre_reduced/ARC_blue.fits'): os.remove('pre_reduced/ARC_blue.fits') iraf.imcombine(arc_str, output='pre_reduced/ARC_blue.fits') # red arcs if len(list_arc_r) > 0: if len(list_arc_r) == 1: arc_red = list_arc_r[0] os.system('cp ' + 'pre_reduced/to' + arc_red + ' ' + 'pre_reduced/ARC_red.fits') else: arc_str = '' for arc in list_arc_r: arc_str = arc_str + 'pre_reduced/to' + arc + ',' if os.path.isfile('pre_reduced/ARC_red.fits'): os.remove('pre_reduced/ARC_red.fits') iraf.imcombine(arc_str, output='pre_reduced/ARC_red.fits') # combine the flats if mkflat != 'n': inter = 'yes' # blue flats if len(list_flat_b) > 0: br, inst = instruments.blue_or_red(list_flat_b[0]) iraf.specred.dispaxi = inst.get('dispaxis') if len(list_flat_b) == 1: # Flat_blue = 'pre_reduced/to'+ list_flat_b[0] Flat_blue = list_flat_b[0] else: flat_str = '' for flat in list_flat_b: flat_str = flat_str + 'pre_reduced/to' + flat + ',' #subsets = 'no' if os.path.isfile('pre_reduced/toFlat_blue'): os.remove('pre_reduced/toFlat_blue') iraf.flatcombine(flat_str, output='pre_reduced/toFlat_blue', ccdtype='', rdnoise=3.7, subsets='no', process='no') Flat_blue = 'Flat_blue.fits' #What is the output here? Check for overwrite iraf.specred.response('pre_reduced/to' + Flat_blue, normaliz='pre_reduced/to' + Flat_blue, response='pre_reduced/RESP_blue', interac=inter, thresho='INDEF', sample='*', naverage=2, function='legendre', low_rej=3, high_rej=3, order=60, niterat=20, grow=0, graphic='stdgraph') # red flats if len(list_flat_r) > 0: br, inst = instruments.blue_or_red(list_flat_r[0]) iraf.specred.dispaxi = inst.get('dispaxis') if len(list_flat_r) == 1: # Flat_red = 'pre_reduced/to' + list_flat_r[0] Flat_red = list_flat_r[0] else: flat_str = '' for flat in list_flat_r: flat_str = flat_str + 'pre_reduced/to' + flat + ',' if os.path.isfile('pre_reduced/toFlat_red'): os.remove('pre_reduced/toFlat_red') iraf.flatcombine(flat_str, output='pre_reduced/toFlat_red', ccdtype='', rdnoise=3.8, subsets='yes', process='no') Flat_red = 'Flat_red.fits' #What is the output here? Check for overwrite iraf.specred.response('pre_reduced/to' + Flat_red, normaliz='pre_reduced/to' + Flat_red, response='pre_reduced/RESP_red', interac=inter, thresho='INDEF', sample='*', naverage=2, function='legendre', low_rej=3, high_rej=3, order=80, niterat=20, grow=0, graphic='stdgraph') # science files should have 't' in front now # this just gets the base name, to prefix assumed below if new_files is not None: files_science = new_files # get all the science objects for the night science_targets = [] for obj in files_science: hdr = util.readhdr(obj) _type = util.readkey3(hdr, 'object') science_targets.append(_type) # make a dir for each sci object science_targets = set(science_targets) for targ in science_targets: if not os.path.isdir('pre_reduced/' + targ + '/'): os.mkdir('pre_reduced/' + targ + '/') # copy the files into the obj dir for obj in files_science: hdr = util.readhdr(obj) targ = util.readkey3(hdr, 'object') if not obj.startswith('to'): os.system('cp ' + 'pre_reduced/to' + obj + ' ' + 'pre_reduced/' + targ + '/') else: os.system('cp ' + 'pre_reduced/' + obj + ' ' + 'pre_reduced/' + targ + '/') rawfiles = glob.glob('*.fits') ofiles = glob.glob('pre_reduced/o' + '*.fits') tfiles = glob.glob('pre_reduced/to' + '*.fits') # delete raw files from the pre_reduced dir # there shouldn't be any there though? # maybe if the overscan isn't implemented for that detector for img in rawfiles: util.delete('pre_reduced/' + img) # delete the ofiles from pre_reduced dir for img in ofiles: util.delete(img)
def deleteAllPersons(ignore): """delete all persons in the store using RESTCONF""" util.delete(settings.getPersonsUrl(), "admin", "admin") resp = getPersons(ignore) print("Persons in store after deletion:" + str(resp))
def reduce(imglist,files_arc, _cosmic, _interactive_extraction,_arc): import string import os import re import sys os.environ["PYRAF_BETA_STATUS"] = "1" try: from astropy.io import fits as pyfits except: import pyfits import numpy as np import util import instruments import combine_sides as cs import cosmics from pyraf import iraf dv = util.dvex() scal = np.pi / 180. if not _interactive_extraction: _interactive = False else: _interactive = True if not _arc: _arc_identify = False else: _arc_identify = True iraf.noao(_doprint=0) iraf.imred(_doprint=0) iraf.ccdred(_doprint=0) iraf.twodspec(_doprint=0) iraf.longslit(_doprint=0) iraf.onedspec(_doprint=0) iraf.specred(_doprint=0) iraf.disp(inlist='1', reference='1') toforget = ['ccdproc', 'imcopy', 'specred.apall', 'longslit.identify', 'longslit.reidentify', 'specred.standard', 'longslit.fitcoords', 'onedspec.wspectext'] for t in toforget: iraf.unlearn(t) iraf.ccdred.verbose = 'no' iraf.specred.verbose = 'no' iraf.ccdproc.darkcor = 'no' iraf.ccdproc.fixpix = 'no' iraf.ccdproc.flatcor = 'no' iraf.ccdproc.zerocor = 'no' iraf.ccdproc.ccdtype = '' iraf.longslit.mode = 'h' iraf.specred.mode = 'h' iraf.noao.mode = 'h' iraf.ccdred.instrument = "ccddb$kpno/camera.dat" list_arc_b = [] list_arc_r = [] for arcs in files_arc: hdr = util.readhdr(arcs) if util.readkey3(hdr, 'VERSION') == 'kastb': list_arc_b.append(arcs) elif util.readkey3(hdr, 'VERSION') == 'kastr': list_arc_r.append(arcs) else: print util.readkey3(hdr, 'VERSION') + 'not in database' sys.exit() asci_files = [] newlist = [[],[]] print '\n### images to reduce :',imglist #raise TypeError for img in imglist: if 'b' in img: newlist[0].append(img) elif 'r' in img: newlist[1].append(img) if len(newlist[1]) < 1: newlist = newlist[:-1] for imgs in newlist: hdr = util.readhdr(imgs[0]) if util.readkey3(hdr, 'VERSION') == 'kastb': inst = instruments.kast_blue elif util.readkey3(hdr, 'VERSION') == 'kastr': inst = instruments.kast_red else: print util.readkey3(hdr, 'VERSION') + 'not in database' sys.exit() iraf.specred.dispaxi = inst.get('dispaxis') iraf.longslit.dispaxi = inst.get('dispaxis') _gain = inst.get('gain') _ron = inst.get('read_noise') iraf.specred.apall.readnoi = _ron iraf.specred.apall.gain = _gain _object0 = util.readkey3(hdr, 'OBJECT') _date0 = util.readkey3(hdr, 'DATE-OBS') _biassec0 = inst.get('biassec') _trimsec0 = inst.get('trimsec') _object0 = re.sub(' ', '', _object0) _object0 = re.sub('/', '_', _object0) nameout0 = str(_object0) + '_' + inst.get('name') + '_' + str(_date0) nameout0 = util.name_duplicate(imgs[0], nameout0, '') timg = nameout0 print '\n### now processing :',timg,' for -> ',inst.get('name') if len(imgs) > 1: img_str = '' for i in imgs: img_str = img_str + i + ',' iraf.imcombine(img_str, output=timg) else: img = imgs[0] if os.path.isfile(timg): os.system('rm -rf ' + timg) iraf.imcopy(img, output=timg) zero_file = inst.get('archive_zero_file') os.system('cp ' + zero_file + ' .') zero_file = string.split(zero_file, '/')[-1] flat_file = inst.get('archive_flat_file') os.system('cp ' + flat_file + ' .') flat_file = string.split(flat_file, '/')[-1] iraf.ccdproc(timg, output='', overscan='yes', trim='yes', zerocor="no", flatcor="no", readaxi='line', trimsec=str(_trimsec0),biassec=str(_biassec0), Stdout=1) iraf.ccdproc(timg, output='', overscan='no', trim='no', zerocor="yes", flatcor="no", readaxi='line', zero=zero_file,order=3, Stdout=1) iraf.ccdproc(timg, output='', overscan='no', trim='no', zerocor="no", flatcor="yes", readaxi='line', flat=flat_file, Stdout=1) img = timg #raw_input("Press Enter to continue...") print '\n### starting cosmic removal' if _cosmic: array, header = cosmics.fromfits(img) c = cosmics.cosmicsimage(array, gain=inst.get('gain'), readnoise=inst.get('read_noise'), sigclip = 4.5, sigfrac = 0.5, objlim = 1.0) c.run(maxiter = 4) cosmics.tofits('cosmic_' + img, c.cleanarray, header) print '\n### cosmic removal finished' img='cosmic_' + img if inst.get('name') == 'kast_blue': arcfile = list_arc_b[0] elif inst.get('name') == 'kast_red': arcfile = list_arc_r[0] if not arcfile.endswith(".fits"): arcfile=arcfile+'.fits' if os.path.isfile(arcfile): util.delete('t' + arcfile) iraf.ccdproc(arcfile, output= 't' + arcfile, overscan='yes', trim='yes', zerocor="no", flatcor="no", readaxi='line', trimsec=str(_trimsec0), biassec=str(_biassec0), Stdout=1) arcfile = 't' + arcfile else: print '\n### warning no arcfile \n exit ' sys.exit() if not os.path.isdir('database/'): os.mkdir('database/') if _arc_identify: arc_ex=re.sub('.fits', '.ms.fits', arcfile) print '\n### arcfile : ',arcfile print '\n### arcfile extraction : ',arc_ex iraf.specred.apall(arcfile, output='', line = 'INDEF', nsum=10, interactive='no', extract='yes',find='yes', nfind=1 ,format='multispec', trace='no',back='no',recen='no') iraf.longslit.identify(images=arc_ex, section=inst.get('section'),coordli=inst.get('line_list'),function = 'spline3',order=3, mode='h') else: arcref = inst.get('archive_arc_extracted') arcrefid = inst.get('archive_arc_extracted_id') os.system('cp ' + arcref + ' .') arcref = string.split(arcref, '/')[-1] os.system('cp ' + arcrefid + ' ./database') arc_ex=re.sub('.fits', '.ms.fits', arcfile) print '\n### arcfile : ',arcfile print '\n### arcfile extraction : ',arc_ex print '\n### arc referenece : ',arcref iraf.specred.apall(arcfile, output=arc_ex, line = 'INDEF', nsum=10, interactive='no', extract='yes',find='yes', nfind=1 ,format='multispec', trace='no',back='no',recen='no') iraf.longslit.reidentify(referenc=arcref, images=arc_ex, interac='NO', section=inst.get('section'), coordli=inst.get('line_list'), shift='INDEF', search='INDEF', mode='h', verbose='YES', step=0,nsum=5, nlost=2, cradius=10, refit='yes',overrid='yes',newaps='no') #print '\n### checking sky lines ' #_skyfile = inst.get('sky_file') #shift = util.skyfrom2d(img, _skyfile,'True') #print '\n### I found a shift of : ',shift print '\n### extraction using apall' result = [] hdr_image = util.readhdr(img) _type=util.readkey3(hdr_image, 'object') if _type.startswith("arc") or _type.startswith("dflat") or _type.startswith("Dflat") or _type.startswith("Dbias") or _type.startswith("Bias"): print '\n### warning problem \n exit ' sys.exit() else: imgex = util.extractspectrum( img, dv, inst, _interactive, 'obj') print '\n### applying wavelength solution' iraf.disp(inlist=imgex, reference=arc_ex) sensfile = inst.get('archive_sens') os.system('cp ' + sensfile + ' .') sensfile = string.split(sensfile, '/')[-1] if sensfile: print '\n### sensitivity function : ',sensfile imgf = re.sub('.fits', '_f.fits', img) _extinction = inst.get('extinction_file') _observatory = inst.get('observatory') _exptime = util.readkey3(hdr, 'EXPTIME') _airmass = util.readkey3(hdr, 'AIRMASS') util.delete(imgf) dimgex='d'+imgex iraf.specred.calibrate(input=dimgex, output=imgf, sensiti=sensfile, extinct='yes', extinction=_extinction,flux='yes', ignorea='yes', airmass=_airmass, exptime=_exptime, fnu='no') imgout = imgf imgasci = re.sub('.fits', '.asci', imgout) errasci = re.sub('.fits', '_err.asci', imgout) util.delete(imgasci) iraf.onedspec.wspectext(imgout + '[*,1,1]', imgasci, header='no') iraf.onedspec.wspectext(imgout + '[*,1,4]', errasci, header='no') spec = np.transpose(np.genfromtxt(imgasci)) err = np.transpose(np.genfromtxt(errasci)) util.delete(errasci) final = np.transpose([spec[0], spec[1], err[1]]) np.savetxt(imgasci, final) result = result + [imgout, imgasci] result = result + [imgex] + [timg] asci_files.append(imgasci) if not os.path.isdir(_object0 + '/'): os.mkdir(_object0 + '/') for img in result: os.system('mv ' + img + ' ' + _object0 + '/') else: for img in result: os.system('mv ' + img + ' ' + _object0 + '/') if not _arc_identify: util.delete(arcref) util.delete(sensfile) util.delete(zero_file) util.delete(flat_file) util.delete(arc_ex) util.delete(arcfile) util.delete('logfile') util.delete(dimgex) util.delete('cosmic_*') print '\n### now i will merge ...' if len(asci_files) > 1: final = cs.combine_blue_red(asci_files[0], asci_files[1], _object0) print '\n### final result in folder ',_object0,' is ',_object0+'_merged.asci' return result
def test_auth_state_header_auth(build_image, network, container): """ Test that the client is able to. Test that auth_state recieves the specified test data headers. """ test_logger.info("Start of test_auth_state_header_auth") client = docker.from_env() service_name = "jupyterhub" if not wait_for_container(client, service_name, minutes=5): raise RuntimeError(JUPYTERHUB_START_ERROR) assert wait_for_site(JHUB_URL, valid_status_code=401) is True with requests.session() as session: # Auth requests remote_user = "******" data_str = "blablabla" data_dict = { "HOST": "hostaddr", "USERNAME": "******", "PATH": "@host.localhost:", } env_data = {"StringData": data_str, "JsonData": data_dict} auth_data_header = { "Remote-User": remote_user, } # Cast to json data types before submission auth_data_header.update( {env_key: json.dumps(env_val) for env_key, env_val in env_data.items()} ) auth_response = session.post( "".join([JHUB_HUB_URL, "/login"]), headers=auth_data_header ) assert auth_response.status_code == 200 # Spawn with auth_state spawn_response = session.post("".join([JHUB_HUB_URL, "/spawn"])) assert spawn_response.status_code == 200 test_logger.info("Spawn POST response message: {}".format(spawn_response.text)) assert spawn_response.status_code == 200 target_container_name = "{}-{}".format("jupyter", remote_user) wait_min = 5 if not wait_for_container(client, target_container_name, minutes=wait_min): raise RuntimeError( "No container with name: {} appeared within: {} minutes".format( service_name, wait_min ) ) spawned_container = get_container(client, target_container_name) # Validate that the container has the passed environment values defined # in env_data envs = { env.split("=")[0]: env.split("=")[1] for env in spawned_container.attrs["Config"]["Env"] } for data_key, data_value in env_data.items(): assert data_key in envs assert envs[data_key] == str(data_value) # Shutdown the container # Delete the spawned service delete_headers = {"Referer": urljoin(JHUB_URL, "/hub/"), "Origin": JHUB_URL} jhub_user = get_container_user(spawned_container) assert jhub_user is not None delete_url = urljoin(JHUB_URL, "/hub/api/users/{}/server".format(jhub_user)) deleted = delete(session, delete_url, headers=delete_headers) assert deleted # Remove the stopped container spawned_container.stop() spawned_container.wait() spawned_container.remove() deleted_container = get_container(client, target_container_name) assert deleted_container is None test_logger.info("End of test_auth_state_header_auth")
help='cosmic ray removal') parser.add_option("-f", "--fast", dest="fast", action="store_true", help='fast reduction') parser.add_option("--host", dest="host", action="store_true", help='host reduction') option, args = parser.parse_args() starttime = time.time() util.delete('*.png') _arc = option.arc _fast = option.fast _host = option.host if len(args) > 1: files = [] sys.argv.append('--help') option, args = parser.parse_args() elif len(args) == 1: files = util.readlist(args[0]) else: listfile = glob.glob('t*.fits') files_science = [] files_arc = [] files_flat = []
def deleteAllCars(ignore): """delete all cars in the store using RESTCONF""" util.delete(settings.getCarsUrl(), "admin", "admin") resp = getCars(ignore) print("Cars in store after deletion:" + str(resp))
def remove(answer_key_id): util.delete('Field',{'answer_key_id': answer_key_id},classname=Field.__name__)
def remove(assignment_id): SubmissionFile.remove_files(assignment_id) util.delete('Assignment',{'assignment_id':assignment_id},'Assignment')
def remove(class_id): ClassList.remove_class(class_id) c = Class.get_class(class_id) util.delete('Instructor_key',{'account_id':c.account_id},classname=Class.__name__) util.delete('Class_assignments',{'class_id':class_id},classname=Class.__name__) util.delete('Class',{'class_id':class_id},classname=Class.__name__)
def deleteAllCars(ignore): util.delete(settings.getCarsUrl(),"admin","admin") resp = getCars(ignore) print("Cars in store after deletion:"+ str(resp))
def remove(assignment_def_id): AnswerKey.remove(assignment_def_id) util.delete('Class_assignments',{'assignment_def_id':assignment_def_id},classname='AssignmentDefinition') util.delete('Assignment_definition',{'assignment_def_id':assignment_def_id},classname='AssignmentDefinition')
def remove_student_from_class(account_id): util.delete('Classs_list',{'account_id':account_id},classname=ClassList.__name__)
def remove_class(class_id): util.delete('Class_list',{'class_id':class_id},classname=ClassList.__name__)
def deleteAllPersons(ignore): util.delete(settings.getPersonsUrl(),"admin","admin") resp = getPersons(ignore) print("Persons in store after deletion:"+ str(resp))
def reduce(imglist, files_arc, files_flat, _cosmic, _interactive_extraction, _arc): import string import os import re import sys import pdb os.environ["PYRAF_BETA_STATUS"] = "1" try: from astropy.io import fits as pyfits except: import pyfits import numpy as np import util import instruments import combine_sides as cs import cosmics from pyraf import iraf dv = util.dvex() scal = np.pi / 180. if not _interactive_extraction: _interactive = False else: _interactive = True if not _arc: _arc_identify = False else: _arc_identify = True iraf.noao(_doprint=0) iraf.imred(_doprint=0) iraf.ccdred(_doprint=0) iraf.twodspec(_doprint=0) iraf.longslit(_doprint=0) iraf.onedspec(_doprint=0) iraf.specred(_doprint=0) iraf.disp(inlist='1', reference='1') toforget = [ 'ccdproc', 'imcopy', 'specred.apall', 'longslit.identify', 'longslit.reidentify', 'specred.standard', 'longslit.fitcoords', 'onedspec.wspectext' ] for t in toforget: iraf.unlearn(t) iraf.ccdred.verbose = 'no' iraf.specred.verbose = 'no' iraf.ccdproc.darkcor = 'no' iraf.ccdproc.fixpix = 'no' iraf.ccdproc.flatcor = 'no' iraf.ccdproc.zerocor = 'no' iraf.ccdproc.ccdtype = '' iraf.longslit.mode = 'h' iraf.specred.mode = 'h' iraf.noao.mode = 'h' iraf.ccdred.instrument = "ccddb$kpno/camera.dat" list_arc_b = [] list_arc_r = [] for arcs in files_arc: hdr = util.readhdr(arcs) br, inst = instruments.blue_or_red(arcs) if br == 'blue': list_arc_b.append(arcs) elif br == 'red': list_arc_r.append(arcs) else: errStr = '{} '.format(str(util.readkey3(hdr, 'VERSION'))) errStr += 'not in database' print(errStr) sys.exit() asci_files = [] newlist = [[], []] print('\n### images to reduce :', imglist) #raise TypeError for img in imglist: if 'b' in img: newlist[0].append(img) elif 'r' in img: newlist[1].append(img) if len(newlist[1]) < 1: newlist = newlist[:-1] elif len(newlist[0]) < 1: newlist = newlist[1:] else: sides = raw_input("Reduce which side? ([both]/b/r): ") if sides == 'b': newlist = newlist[:-1] elif sides == 'r': newlist = newlist[1:] for imgs in newlist: hdr = util.readhdr(imgs[0]) br, inst = instruments.blue_or_red(imgs[0]) if br == 'blue': flat_file = '../RESP_blue' elif br == 'red': flat_file = '../RESP_red' else: errStr = 'Not in intrument list' print(errStr) sys.exit() iraf.specred.dispaxi = inst.get('dispaxis') iraf.longslit.dispaxi = inst.get('dispaxis') _gain = inst.get('gain') _ron = inst.get('read_noise') iraf.specred.apall.readnoi = _ron iraf.specred.apall.gain = _gain _object0 = util.readkey3(hdr, 'OBJECT') _date0 = util.readkey3(hdr, 'DATE-OBS') _object0 = re.sub(' ', '', _object0) _object0 = re.sub('/', '_', _object0) nameout0 = str(_object0) + '_' + inst.get('name') + '_' + str(_date0) nameout0 = util.name_duplicate(imgs[0], nameout0, '') timg = nameout0 print('\n### now processing :', timg, ' for -> ', inst.get('name')) if len(imgs) > 1: img_str = '' for i in imgs: img_str = img_str + i + ',' iraf.imcombine(img_str, output=timg) else: img = imgs[0] if os.path.isfile(timg): os.system('rm -rf ' + timg) iraf.imcopy(img, output=timg) # should just do this by hand iraf.ccdproc(timg, output='', overscan='no', trim='no', zerocor="no", flatcor="yes", readaxi='line', flat=flat_file, Stdout=1) img = timg #raw_input("Press Enter to continue...") if _cosmic: print('\n### starting cosmic removal') array, header = cosmics.fromfits(img) c = cosmics.cosmicsimage(array, gain=inst.get('gain'), readnoise=inst.get('read_noise'), sigclip=5, sigfrac=0.5, objlim=2.0) c.run(maxiter=5) cosmics.tofits('cosmic_' + img, c.cleanarray, header) img = 'cosmic_' + img print('\n### cosmic removal finished') else: print( '\n### No cosmic removal, saving normalized image for inspection???' ) if inst.get('arm') == 'blue' and len(list_arc_b) > 0: arcfile = list_arc_b[0] elif inst.get('arm') == 'red' and len(list_arc_r) > 0: arcfile = list_arc_r[0] else: arcfile = None if arcfile is not None and not arcfile.endswith(".fits"): arcfile = arcfile + '.fits' if not os.path.isdir('database/'): os.mkdir('database/') if _arc_identify: os.system('cp ' + arcfile + ' .') arcfile = string.split(arcfile, '/')[-1] arc_ex = re.sub('.fits', '.ms.fits', arcfile) arcref = inst.get('archive_arc_extracted') arcref_img = string.split(arcref, '/')[-1] arcref_img = arcref_img.replace('.ms.fits', '') arcrefid = inst.get('archive_arc_extracted_id') os.system('cp ' + arcref + ' .') arcref = string.split(arcref, '/')[-1] os.system('cp ' + arcrefid + ' ./database') aperture = inst.get('archive_arc_aperture') os.system('cp ' + aperture + ' ./database') print('\n### arcfile : ', arcfile) print('\n### arcfile extraction : ', arc_ex) print('\n### arc reference : ', arcref) # read for some meta data to get the row right tmpHDU = pyfits.open(arcfile) header = tmpHDU[0].header try: spatialBin = int(header['binning'].split(',')[0]) except KeyError: spatialBin = 1 apLine = 700 // spatialBin iraf.specred.apall(arcfile, output=arc_ex, ref=arcref_img, line=apLine, nsum=10, interactive='no', extract='yes', find='yes', nfind=1, format='multispec', trace='no', back='no', recen='no') iraf.longslit.reidentify(referenc=arcref, images=arc_ex, interac='NO', section=inst.get('section'), coordli=inst.get('line_list'), shift='INDEF', search='INDEF', mode='h', verbose='YES', step=0, nsum=5, nlost=2, cradius=10, refit='yes', overrid='yes', newaps='no') print('\n### extraction using apall') result = [] hdr_image = util.readhdr(img) _type = util.readkey3(hdr_image, 'object') if (_type.startswith("arc") or _type.startswith("dflat") or _type.startswith("Dflat") or _type.startswith("Dbias") or _type.startswith("Bias")): print('\n### warning problem \n exit ') sys.exit() else: imgex = util.extractspectrum(img, dv, inst, _interactive, 'obj') print('\n### applying wavelength solution') print(arc_ex) iraf.disp(inlist=imgex, reference=arc_ex) result = result + [imgex] + [timg] # asci_files.append(imgasci) if not os.path.isdir(_object0 + '_ex/'): os.mkdir(_object0 + '_ex/') if not _arc_identify: util.delete(arcref) else: util.delete(arcfile) util.delete(arc_ex) util.delete(img) util.delete(imgex) util.delete(arcref) util.delete('logfile') #if _cosmic: #util.delete(img[7:]) #util.delete("cosmic_*") os.system('mv ' + 'd' + imgex + ' ' + _object0 + '_ex/') use_sens = raw_input('Use archival flux calibration? [y]/n ') if use_sens != 'no': sensfile = inst.get('archive_sens') os.system('cp ' + sensfile + ' ' + _object0 + '_ex/') bstarfile = inst.get('archive_bstar') os.system('cp ' + bstarfile + ' ' + _object0 + '_ex/') return result
def deleteuser(): uid = request.args.get('id') data = {'id': uid} print data util.delete(data) return redirect('/userlist/')