def file(self, file_name, type=None): """Creates a new file Creates a new file with the given characteristics and saves the file information in the 'res' attribute Args: self: The object pointer file_name: The name of the file type: The file's type Returns: A python file object See: results_model: avi.models.results_model """ fm = file_manager() path = wh().get().RESULTS_PATH fname = wh().get().RESULTS_FMT % { "task": "res", "date": str(round(time.time())), "name": file_name } full_name = os.path.join(path, fname) ret = None if type == "b": ret = open(full_name, "wb") else: ret = open(full_name, "w") model = fm.save_file_info(file_name, full_name, self.res.job_id, self.task_name, timezone.now()) self.res.resources.add(model) return ret
def start(self, data): """This method runs the save_user_data job. It will received the user file and it will store it in the user space using the file_manager. Args: self: The object pointer. data: A dictionary containing the user file to be saved. Returns: The job_data attribute. The ok attribure will be True if everything went correctly, False otherwise. @see file_manager @link avi.utils.data.file_manager.file_manager """ from django.core.files.storage import FileSystemStorage fs = FileSystemStorage() f = data['file'] file_name = wh().get().USER_FMT % { "user": "******", "date": str(round(time.time())), "name": f.name } full_name = os.path.join(wh().get().USER_PATH, file_name) filename = fs.save(full_name, f) fm = file_manager() fm.save_file_info(f.name, full_name, -1, "user", timezone.now()) self.job_data.ok = True self.job_data.data = {} self.job_data.data['status'] = 'success' return self.job_data
def save_vot(self, fname, data): """Saves a VOTable file Saves the given data in a VOTable file and saves the file information in the results_model stored in 'res' Args: self: The object pointer fname: The name of the file to be saved data: The data to be saved See: results_model: avi.models.results_model """ fm = file_manager() path = wh().get().RESULTS_PATH fname = wh().get().RESULTS_FMT % { "task": "res", "date": str(round(time.time())), "name": fname } full_name = os.path.join(path, fname) model = fm.save_file_info(fname, full_name, self.res.job_id, self.task_name, timezone.now()) self.res.resources.add(model) data.to_xml(full_name)
def start(self, data): """This method runs the save_samp_data job. It will received the samp data and it will store it in the user space using the file_manager. Args: self: The object pointer. data: A dictionary containing the samp data to be saved. Returns: The job_data attribute. The ok attribure will be True if everything went correctly, False otherwise. @see file_manager @link avi.utils.data.file_manager.file_manager """ # FIXME: fm = file_manager() file_name = wh().get().USER_FMT % { "user": "******", "date": str(round(time.time())), "name": data['name'] } fm.save_file_plain_data(unquote(data['data']), file_name, wh().get().USER_PATH, -1, "user", timezone.now()) self.job_data.ok = True self.job_data.data = {} self.job_data.data['status'] = 'success' return self.job_data
def __get_data(self, data, data_type): """Returns the data in its proper type. This method returns a variable in its proper type with the given data in a string format and a string describing the data type. Args: self: The object pointer. data: The data value to be returned. data_type: The type of the data value to be returned. Returns: The data value in its given type """ ret = "" if data_type == "float": try: ret = float(data) return ret except ValueError: return "" if data_type == "string": return data if data_type == "gaia_table": return os.path.join(wh().get().GAIA_PATH, data) if data_type == "hsa_table" or data_type == "hsa_fits": return os.path.join(wh().get().HSA_PATH, data) if data_type == "results_data": return os.path.join(wh().get().RESULTS_PATH, data) if data_type == "user_data": return os.path.join(wh().get().USER_PATH, data) if data_type == "integer": try: ret = int(data) return ret except ValueError: return "" if data_type == "long": try: ret = int(data) return ret except ValueError: return "" if data_type == "complex": try: ret = complex(data) return ret except ValueError: return "" return ret
def get(self, request, resource_id): """Serves a resource Args: self: The object pointer request: HttpRequest object resource_id: The id of the resource Returns: A HttpResponse with the resource """ #return JsonResponse({"votable":3, "ASD": 123}) #vot_name = open('/data/output/100.241700_9.895000_0.100000_gaia_source.vot','rb') res = resource_model.objects.filter(pk=resource_id) if not res: return None full_name = os.path.join(res[0].path, res[0].name) file_data = open(full_name, 'rb') file_type = 'application/xml' name, ext = os.path.splitext(res[0].name) if ext == ".fits" or ext == ".tar": file_type = 'application/x-tar' elif ext != ".xml" or ext != ".vot": file_type = mimetypes.guess_type(full_name)[0] size = os.path.getsize(full_name) if wh().get().production: response = HttpResponse() response[ 'Content-Disposition'] = 'attachment; filename="%s"' % res[ 0].name response['X-Sendfile'] = full_name return response if size <= 20: response = HttpResponse(file_data, content_type=file_type) response[ 'Content-Disposition'] = 'attachment; filename="%s"' % res[ 0].name response['Content-Length'] = os.path.getsize(full_name) return response chunk_size = 8192 response = StreamingHttpResponse(FileWrapper(open(full_name, 'rb'), chunk_size), content_type=file_type) response['Content-Length'] = size response[ 'Content-Disposition'] = 'attachment; filename="%s"' % res[0].name return response
def get_herschel_data(self, log, data): """Does a query to the herschel archive It will read the input contained in the data parameter and it will query the herschel archive trough the interface_manager. Then it will save the results using the file_manager Args: self: The object pointer log: The log data: The input data to the query Raises: task_exception: avi.task.task.task_exception See: interface_manager: avi.core.interface.interface_manager.interface_manager See also: file_manager: avi.utils.data.file_manager.file_manager """ log.debug('get_herschel_data method') im = risea().get().interface_manager fm = file_manager() cm = coordinates_manager() jm = json_manager() if not im: log.error('There is no interface manager initialized!') raise err("There is no interface manager initialized!") try: ra = None dec = None if data.get('name') and data.get('name_coord') == 'name': log.info("Name attr %s found, retrieving coordinates from " \ + "Simbad/Ned databases", data['name']) coords = simbad().get_object_coordinates(data['name']) if not coords: coords = ned().get_object_coordinates(data['name']) if not coords: log.error('Name %s not found in Simbad/Ned data bases', data['name']) raise err('Name %s not found in Simbad/Ned data bases', data['name']) v_ra = coords['ra'] v_dec = coords['dec'] else: log.info("Retrieving coordinates from the provided data...") v_ra = data.get('ra') v_dec = data.get('dec') if not v_ra or not v_dec: log.info("No equatorial coordinates found!") log.info("Reading galactic coordinates from data...") v_l = data.get('l') v_b = data.get('b') if not v_l or not v_b: log.error('No valid coordinates found') raise err('No valid coordinates found') coords = cm.gal_to_icrs(float(v_l), float(v_b)) ra = coords['ra'] dec = coords['dec'] else: try: ra = float(v_ra) dec = float(v_dec) except ValueError: coords = cm.icrs_degrees(v_ra, v_dec) ra = coords['ra'] dec = coords['dec'] src = None shape = data['shape'] if shape != 'cone' and shape != 'box' and shape != 'polygon': log.error("Unknown shape!") raise err("Unknown shape!") log.info("Shape: %s", shape) table = data['table'] if not table or table == "": table = "v_active_observation" log.info("Table: %s", "hsa.%s" % (table)) if not data['positional_images']: log.info("Retrieving positional sources from " + "the herschel archive...") if shape == 'cone': if not data['radius']: log.error("No radius provided") raise err("No radius provided") src = im._archive_herschel_get_circle( ra, dec, data['radius'], table) elif shape == 'box': if not data['width'] or not data['height']: log.error("No dimensions provided") raise err("No dimansions provided") src = im._archive_herschel_get_box(ra, dec, data['width'], data['height'], table) elif shape == 'polygon': vertexes = jm.get_vertexes(data) src = im._archive_herschel_get_polygon( ra, dec, vertexes, table) if src != None: if not data.get('output_file'): file_name = wh().get() \ .SOURCES_FMT%{"mission":"hsa", "date":str(round(time.time())), "name":"data"} else: file_name = wh().get() \ .SOURCES_FMT%{"mission":"hsa", "date":str(round(time.time())), "name":data['output_file']} fm.save_file_plain_data(src, "%s.vot" % (file_name), wh().get().HSA_PATH, self.task_id, "hsa", timezone.now()) #"%f_%f_%s_%s.vot" \ #%(ra,dec,shape,table)) else: log.error( "Something went wrong while querying the archive!") raise err( "Something went wrong while querying the archive!") log.info("Everything done!") return log.info('Retrieving maps from the herschel archive...') if shape == 'cone': if not data['radius']: log.error("No radius provided") raise err("No radius provided") if not data.get('output_file'): im.archive_get_maps(ra, dec, data['radius'], data['level'], data['instrument'], id=self.task_id) else: log.info("fileeeeeeeeeee") im.archive_get_maps(ra, dec, data['radius'], data['level'], data['instrument'], id=self.task_id, name=data['output_file']) log.info("Everything done!") return elif shape == 'box': if not data['width'] or not data['height']: log.error("No dimensions provided") raise err("No dimansions provided") if not data.get('output_file'): im.archive_get_maps_box(ra, dec, data['width'], data['height'], data['level'], data['instrument'], id=self.task_id) else: im.archive_get_maps_box(ra, dec, data['width'], data['height'], data['level'], data['instrument'], id=self.task_id, name=data['output_file']) log.info("Everything done!") return elif shape == 'polygon': vertexes = jm.get_vertexes(data) if not data.get('output_file'): im.archive_get_maps_polygon(ra, dec, vertexes, data['level'], data['instrument'], id=self.task_id) else: im.archive_get_maps_polygon(ra, dec, vertexes, data['level'], data['instrument'], id=self.task_id, name=data['output_file']) log.info("Everything done!") return log.error("Something went wrong...") raise err("Something went wrong...") except Exception: log.error(traceback.format_exc()) raise err(traceback.format_exc())
def run(self): """Runs the query to the herschel archive. If the task_data contains the 'input_file' key it will read that value and call get_herschel_data() once per input parameter found in the input_file. If the task_data contains the 'adql' key it will query the archive through the interface_manager using that query. Otherwise it will call get_herschel_data() with the input from task_data Args: self: The object pointer. Raises: task_exception: avi.task.task.task_exception See: interface_manager: avi.core.interface.interface_manager.interface_manager See also: get_herschel_data: get_herschel_data() """ def get_herschel_data(log, data): """Deprecated""" pass log = logger().get_log('herschel_query_task') data = self.task_data.data jm = json_manager() if data.get('input_file') and data.get('name_coord') == 'file': log.info('There is an input file') #self.get_herschel_data(log, data) try: d = jm.read_herschel_input(data['input_file']) for i in d: if i.get('name'): i['name_coord'] = 'name' if i.get('wavelength'): wl = int(i['wavelength']) if wl == 70 or wl == 100 or wl == 160: i['tablee'] = "hsa.pacs_point_source_%s" % ( str(wl).zfill(3)) #"cat_hppsc_%s"%(str(wl).zfill(3)) elif wl == 250 or wl == 350 or wl == 500: i['tablee'] = "hsa.spire_point_source_%s" % (wl) #"cat_spsc_%i"%(wl) if i.get('positional_source'): if i['positional_source'] == 'False': i['positional_images'] = True else: i['positional_images'] = False else: i['positional_images'] = True self.get_herschel_data(log, i) except Exception: log.error("Exception while retrieving data from herschel") log.error(traceback.format_exc()) raise err(traceback.format_exc()) finally: pass #os.remove(data['input_file']) return elif data.get('adql') and data.get('name_coord') == 'adql': log.info('ADQL query') im = risea().get().interface_manager fm = file_manager() adql = data['adql'] if not im: log.error('There is no interface manager initialized!') raise err("There is no interface manager initialized!") src = im._archive_herschel_get_adql(adql) if src != None: if not data.get('output_file'): file_name = wh().get().SOURCES_FMT % { "mission": "hsa", "date": str(round(time.time())), "name": "data" } else: file_name = wh().get().SOURCES_FMT % { "mission": "hsa", "date": str(round(time.time())), "name": data['output_file'] } fm.save_file_plain_data(src, "%s.vot" % (file_name), wh().get().HSA_PATH, self.task_id, "hsa", timezone.now()) log.info("Everything done!") return else: if data.get('shape') == 'polygon': jm.set_vertexes(data, data['polygon']) log.info("added vertexes %s", str(data)) self.get_herschel_data(log, data) return
def start(self, data): """This method runs the get_algorithms job. If the algorithms are not loaded it will load them. Then it will retrieve the all algorithm_info_models and return the data from them. Args: self: The object pointer. data: A dictionary containing the input data for the job. Returns: The job_data attribute. The ok attribute will be True if there are algorithms retrieved, False otherwise. @see algorithm_info_model @link avi.models.algorithm_info_model """ log = logger().get_log('algorithm_manager') wh_f = wh_frontend_config().get() if not wh().get().ALGORITHMS_LOADED: from avi.core.algorithm.algorithm_manager import algorithm_manager algorithm_manager().init() wh().get().ALGORITHMS_LOADED = True from avi.models import algorithm_info_model, algorithm_group_model all_ms = algorithm_info_model.objects.all().order_by( 'name_view', 'name', 'pk') #sall_ms = sorted(all_ms, key = lambda x:(x.name_view is None, x)) self.job_data.data = {} self.job_data.ok = all_ms is not None if not all_ms: return self.job_data pg = Paginator(all_ms, wh_f.MAX_ALG_PER_PAGE) page = wh_f.CURRENT_ALG_PAGE if page < 1: wh_f.CURRENT_ALG_PAGE = 1 elif page > pg.num_pages: wh_f.CURRENT_ALG_PAGE = pg.num_pages ms = pg.page(wh_f.CURRENT_ALG_PAGE) all_ms_g = algorithm_group_model.objects.all().order_by( 'name_view', 'name', 'pk') pg_g = Paginator(all_ms_g, wh_f.MAX_ALG_PER_PAGE) page = wh_f.CURRENT_ALG_PAGE if page < 1: wh_f.CURRENT_ALG_PAGE = 1 elif page > pg.num_pages: wh_f.CURRENT_ALG_PAGE = pg.num_pages ms_g = pg_g.page(wh_f.CURRENT_ALG_PAGE) data = [] for g in ms_g: data.append({"group": g, "algorithms": []}) data.sort(key=lambda x: x["group"].position, reverse=False) for j in ms: for g in data: if j.algorithm_group == g["group"].name: g["algorithms"].append( (j.pk, j.name, j.name_view, j.algorithm_type, j.algorithm_group, j.position)) for g in data: g["algorithms"].sort(key=lambda x: x[4], reverse=False) for g in data: log.info(g["group"].name) log.info(g["group"].position) for a in g["algorithms"]: log.info(a[4]) log.info(a[1]) # OLD # data = {} # i = 0 # for j in ms: # data[i] = (j.pk, # j.name, # j.name_view, # j.algorithm_type) # i += 1 res = {} res["algorithms"] = data res["max_pages"] = pg.num_pages res["current_page"] = wh_f.CURRENT_ALG_PAGE res["next_page"] = wh_f.CURRENT_ALG_PAGE + 1 res["prev_page"] = wh_f.CURRENT_ALG_PAGE - 1 self.job_data.data = res return self.job_data
def run(self): """Runs the query to the gaia archive. If the task_data contains the 'input_file' key it will read that value and call get_gaia_data() once per input parameter found in the input_file. If the task_data contains the 'adql' key it will query the archive through the interface_manager using that query. Otherwise it will call get_gaia_data() with the input from task_data Args: self: The object pointer. Raises: task_exception: avi.task.task.task_exception See: interface_manager: avi.core.interface.interface_manager.interface_manager See also: get_gaia_data: get_gaia_data() """ log = logger().get_log('gaia_query_task') jm = json_manager() data = self.task_data.data log.info("%s", str(data)) if data.get('input_file') and data.get('name_coord') == 'file': log.info('There is an input file') try: d = jm.read_gaia_input(data['input_file']) for i in d: if i.get('name'): i['name_coord'] = 'name' self.get_gaia_data(log, i) except Exception: log.error("Exception while retrieving data from gaia") log.error(traceback.format_exc()) raise err(traceback.format_exc()) finally: os.remove(data['input_file']) return elif data.get('adql') and data.get('name_coord') == 'adql': log.info('ADQL query') im = risea().get().interface_manager fm = file_manager() adql = data['adql'] if not im: log.error('There is no interface manager initialized!') raise err("There is no interface manager initialized!") src = im._archive_gaia_get_adql(adql) if src != None: if not data.get('output_file'): file_name = wh().get().SOURCES_FMT % { "mission": "gaia", "date": str(round(time.time())), "name": "data" } else: file_name = wh().get().SOURCES_FMT % { "mission": "gaia", "date": str(round(time.time())), "name": data['output_file'] } fm.save_file_plain_data(src, "%s.vot" % (file_name), wh().get().GAIA_PATH, self.task_id, "gaia", timezone.now()) log.info("Everything done!") return else: if data.get('shape') == 'polygon': jm.set_vertexes(data, data['polygon']) self.get_gaia_data(log, data) return
def test_data_file(self): wh().get().RESULTS_PATH = os.path.dirname(os.path.realpath(__file__)) test_log("Testing the initialization...", self) job_id = 1 fname = "test_data.dat" model = results_model.objects.filter(job_id=job_id) self.assertFalse(model) d = data_file(1) model = results_model.objects.filter(job_id=job_id) self.assertEqual(len(model), 1) test_log("Testing a simple file creation...", self) file_name = os.path.join(wh().get().RESULTS_PATH, fname) f = d.file(fname) f.write("test") f.close() model = results_model.objects.filter(job_id=job_id) self.assertEqual(wh().get().RESULTS_PATH, model[0].resources.all().filter(name=fname)[0].path) self.assertEqual(fname, model[0].resources.all().filter(name=fname)[0].name) self.assertTrue(os.path.isfile(file_name)) f = open(file_name) fdata = f.read() f.close() self.assertEqual(fdata, "test") os.remove(file_name) test_log("Testing a plot addition...", self) plot = plot_model.objects.create(name="test_plot", job_id=job_id, alg_name="test", script="", html="") self.assertFalse(model[0].plots.all().filter(name="test_plot")) d.add_plot(plot) self.assertEqual(len(model[0].plots.all().filter(name="test_plot")), 1) test_log("Testing a FITS file storage...", self) n = np.arange(100.0) hdu = fits.PrimaryHDU(n) hdul = fits.HDUList([hdu]) d.save_fits(fname, hdul) self.assertEqual(len(model[0].resources.all().filter(name=fname)), 2) self.assertTrue(os.path.isfile(file_name)) os.remove(file_name) test_log("Testing a VOTable file storage...", self) votable = VOTableFile() resource = Resource() votable.resources.append(resource) table = Table(votable) resource.tables.append(table) table.fields.extend([ Field(votable, name="filename", datatype="char", arraysize="*"), Field(votable, name="matrix", datatype="double", arraysize="2x2") ]) table.create_arrays(2) table.array[0] = ('test_1', [[1, 0], [0, 1]]) table.array[1] = ('test_2', [[0.5, 0.3], [0.2, 0.1]]) d.save_vot(fname, votable) self.assertEqual(len(model[0].resources.all().filter(name=fname)), 3) self.assertTrue(os.path.isfile(file_name)) os.remove(file_name)