def _load_stars_from_folder(self, path): """Load all files with a certain suffix as light curves""" if not path.endswith("/"): path += "/" # Get all light curve files (all files which end with certain suffix if not self.suffix: stars_list = [] for suffix in self.SUFFIXES: stars_list += glob.glob("{}*{}".format(path, suffix)) else: stars_list = glob.glob("{}*{}".format(path, self.suffix)) found_files = len(stars_list) if found_files == 0: if self.suffix: raise InvalidFilesPath( "There are no stars in %s with %s suffix" % (path, self.suffix)) else: raise InvalidFilesPath( "There are no stars in %s with any of supported suffix: %s" % (path, self.SUFFIXES)) files_limit = self.files_limit or found_files if self.suffix in self.FITS_SUFFIX: return self._loadFromFITS(stars_list, files_limit) stars = self._loadDatFiles( [s for s in stars_list if s.endswith("dat")], files_limit) stars += self._loadFromFITS( [s for s in stars_list if s.endswith("fits")], files_limit) return stars
def save_query(self, query, fi_name="query_file.txt", PATH=".", DELIM=None, overwrite=False): ''' Save queries into the file which can be loaded for another query Parameters ---------- query : list List of dictionaries which contains query params Returns ------- None ''' header = list(query[0].keys()) path = os.path.join(PATH, fi_name) if not DELIM: DELIM = self.DELIMITER try: if overwrite: query_file = open(path, "w+") else: query_file = open(path, "a+") except IOError as err: raise InvalidFilesPath(err) n = len(header) if not query_file.readline().startswith("#"): query_file.write("#") for i, head in enumerate(header): delim = DELIM if i >= n - 1: delim = "" query_file.write(head + delim) query_file.write("\n") for que in query: if len(que) != len(header): raise Exception( "Number of header params and values have to be the same.\nGot query %s and header %s \nCheck the query file if there are no missing value in any column or if there is a whitespace." % (que, header)) for i, key in enumerate(que): delim = DELIM if i >= n - 1: delim = "" query_file.write(str(que[key]) + delim) query_file.write("\n") query_file.close()
def saveIntoFile(obj, path=".", file_name="saved_object.pickle", folder_name=None): """ This method serialize object (save it into file) obj : object Object to serialize path : str Path to the folder file_name : str Name of result file folder_name : str Name of folder Returns ------- None """ path_with_name = "%s/%s" % (path, file_name) if folder_name: os.makedirs(path_with_name + folder_name) path_with_name = "%s/%s/%s" % (path, folder_name, file_name) try: with open(path_with_name, "wb") as output: pickle.dump(obj, output, pickle.HIGHEST_PROTOCOL) print("Object has been saved into %s/%s" % (path, file_name)) except IOError: raise InvalidFilesPath("Path: %s\tfile name: %s" % (path, file_name))
def _loadLcFromDat(cls, file_name): """ Load Light curve from dat file of light curve Parameters ----------- file_with_path : str Name of the light curve file with its path Returns -------- List of tuples of (time, mag, err) """ try: dat = np.loadtxt(file_name, usecols=(cls.TIME_COL, cls.MAG_COL, cls.ERR_COL), skiprows=0) except IndexError: dat = np.loadtxt(file_name, usecols=(cls.TIME_COL, cls.MAG_COL, cls.ERR_COL), skiprows=2) except IOError, Argument: raise InvalidFilesPath("\nCannot open light curve file\n %s" % Argument)
def _load_stars_from_folder(self): """Load all files with a certain suffix as light curves""" # Check whether the path ends with "/" sign, if not add if not (self.path.endswith("/")): self.path = self.path + "/" # Get all light curve files (all files which end with certain suffix if not self.suffix: starsList = [] for suffix in self.SUFFIXES: starsList += glob.glob("%s*%s" % (self.path, suffix)) else: starsList = glob.glob("%s*%s" % (self.path, self.suffix)) numberOfFiles = len(starsList) if (numberOfFiles == 0): if self.suffix: raise InvalidFilesPath( "There are no stars in %s with %s suffix" % (self.path, self.suffix)) else: raise InvalidFilesPath( "There are no stars in %s with any of supported suffix: %s" % (self.path, self.SUFFIXES)) if (numberOfFiles < self.files_limit): self.files_limit = None else: numberOfFiles = self.files_limit if self.suffix in self.FITS_SUFFIX: return self._loadFromFITS(starsList, numberOfFiles) stars = self._loadDatFiles([s for s in starsList if s.endswith("dat")], numberOfFiles) stars += self._loadFromFITS( [s for s in starsList if s.endswith("fits")], numberOfFiles) return stars
def save_lists_query(self, query=[], fi_name="query_file.txt", PATH=".", DELIM=None, overwrite=False, header=None): ''' Save queries into the file which can be loaded for another query Parameters ---------- query : list List of lists which contains Returns ------- None ''' path = os.path.join(PATH, fi_name) if not DELIM: DELIM = self.DELIMITER if not check_depth(query, 2, ifnotraise=False): query = [query] if not header and query[0]: return False try: if overwrite: query_file = open(path, "w+") else: query_file = open(path, "a+") except IOError as err: raise InvalidFilesPath(err) if header and not query_file.readline(): query_file.write("#" + DELIM.join([str(it) for it in header])) for line in query: query_file.write(DELIM.join([str(it) for it in line]) + "\n") query_file.close()
def _loadLcFromDat(cls, file_name): """ Load Light curve from dat file of light curve Parameters ----------- file_with_path : str Name of the light curve file with its path Returns -------- List of tuples of (time, mag, err) """ try: dat = np.loadtxt(file_name, usecols=(cls.TIME_COL, cls.MAG_COL, cls.ERR_COL), skiprows=0) except IndexError: dat = np.loadtxt(file_name, usecols=(cls.TIME_COL, cls.MAG_COL, cls.ERR_COL), skiprows=2) except IOError as Argument: raise InvalidFilesPath("\nCannot open light curve file\n %s" % Argument) mag, time, err = dat.T if not (len(mag) == len(time) == len(err)): raise InvalidFile( "Length of columns in light curve file is not the same") else: clean_dat = [] for x, y, z in zip(mag, time, err): if (x not in cls.BAD_VALUES and y not in cls.BAD_VALUES and z not in cls.BAD_VALUES): clean_dat.append([ round(x, cls.ROUND_DIGITS), round(y, cls.ROUND_DIGITS), round(z, cls.ROUND_DIGITS) ]) return clean_dat
def queryStar(self, query): status = collections.OrderedDict( (("found", False), ("lc", False), ("passed", False))) try: provider = StarsProvider().getProvider(self.obth_method, query) if hasattr(provider, "multiproc"): provider.multiproc = False stars = provider.getStars() except QueryInputError: raise except (KeyboardInterrupt, SystemExit): raise except Exception as e: warn(str(e)) warn("Couldn't download any star for query %s" % query ) stars = [] # TODO: status attribute is rewrited and just status of the last star is noted for one_star in stars: status["found"] = True contain_lc = True try: one_star.lightCurve.time except AttributeError: contain_lc = False if contain_lc: # TODO if self.save_coords and self.stars_filters: spc = self.stars_filters[ 0].getSpaceCoordinates([one_star]).values if len(spc): self._saveCoords([one_star.name] + spc[0].tolist()) # Try to apply filters to the star try: passed = self.filterStar(one_star, query) status["lc"] = True status["passed"] = passed except (KeyboardInterrupt, SystemExit): raise except IOError as err: raise InvalidFilesPath(err) except Exception as err: self.failProcedure(query, err) warn( "Something went wrong during filtering:\n\t%s" % err) else: status["lc"] = False status["passed"] = False query["name"] = one_star.name self.statusFile(query, status) if not stars: query["name"] = "" self.statusFile(query, status) return stars, status