def setUp(self): file_name = f'{root_dir}/test_database/current_holdings.txt' self.sell_only = False self.portfolio = ASI.read_from_File(file_name) self.logger = self.logs['logger'].getLogger( 'ASI.Test_Manage_Stocks_Portfolio') self.manage_portfolio.update_sector_funds_available = self.sector_funds_available self.manage_portfolio.update_total_sector_funds = self.total_sector_funds self.manage_portfolio.update_portfolio = self.portfolio self.manage_portfolio.update_universe = self.universe self.manage_portfolio.update_logger = self.logger self.manage_portfolio._sell_only = self.sell_only
def get_cnfi_image(dn, ht=250): doy = dn.timetuple().tm_yday year = dn.timetuple().tm_year #print doy #print year # Grab all files (need to extend to search doy-1 to doy+1) files = glob.glob('/rdata/airglow/imaging/cnfi01/hka/%i/%03i/6300_*.tif' % (year, doy)) files.sort() a,i = get_cnfi_dn(files, dn) # If time difference is too big, try previous/next day if a > 30*60.: if a > 0: # Try previous day files = glob.glob('/rdata/airglow/imaging/cnfi01/hka/%i/%03i/6300_*.tif' % (year, doy-1)) files.sort() else: # Try next day files = glob.glob('/rdata/airglow/imaging/cnfi01/hka/%i/%03i/6300_*.tif' % (year, doy+1)) files.sort() a,i = get_cnfi_dn(files, dn) # Exit out if nothing is within 30 minutes if a > 30*60.: print a return np.nan, np.nan, np.nan, np.nan # Load the closest image fn = files[i] d = Image.open(fn) im = np.asarray(d) dt = d.info['UniversalTime'] print 'SSUSI:', dn, 'CNFI:', dt # Load appropriate calibration image instr_info = asiinfo.get_instr_info('cnfi01', dt) cal_file = np.load(instr_info['cal_file']) az = cal_file['az'] el = cal_file['el'] rx_lat = cal_file['rx_lat'] rx_lon = cal_file['rx_lon'] lat, lon = ASI.ConvertAzEl2LatLon(az, el, ht, rx_lat, rx_lon) apex_ht, apex_lon = load_cnfi_proj() return lat, lon, apex_ht, apex_lon, im, dt
#!/usr/bin/python3 import sys import os sys.path.append(os.path.join(os.path.dirname(__file__), "astrolove")) sys.path.append("/usr/lib/astrolove") import ASI import time import scipy.misc print((ASI.list())) c = ASI.Camera(0) print((c.prop())) c.set({'width': 640, 'height': 480, 'start_x': 320, 'start_y': 240}) s = c.stat() assert s['width'] == 640 assert s['height'] == 480 assert s['start_x'] == 320 assert s['start_y'] == 240 # Only for color ones 1: c.set({'type': 1}) c.start() print((c.stat())) for i in range(5): time.sleep(0.5) s = c.stat() im = c.get_image() print(s['captured'], s['vals'][7]/10.0, s['width'], s['height'], s['type'], im.shape) c.set({'start_x': 320 - 20 * (i + 1)})
def list(self): self.asi_list = ASI.list() return self.asi_list
def setUpClass(cls): time_now = time.time() screen_lock = Semaphore(value=1) asctime = str( datetime.datetime.fromtimestamp(time_now).strftime('%Y-%m-%d')) #Setup logging to file logging.basicConfig( level=logging.INFO, format='%(asctime)s %(name)-10s %(levelname)-8s %(message)s', datefmt='%Y-%m-%d %H:%M', filename=f'{asctime} test_ASI log.log', filemode='a') #Define a handler to write INFO messages or higher to system output console = logging.StreamHandler() console.setLevel(logging.INFO) #Setup output format for console. formatter = logging.Formatter( '%(name)-27s : %(levelname)-8s %(message)s') console.setFormatter(formatter) # add the handler to the root logger logging.getLogger('').addHandler(console) system_logs = {'logger': logging} cls.logs = system_logs cls.manage_portfolio = ASI.Manage_Stocks_Portfolio( thread_lock=screen_lock, logs=system_logs, testing=True) # Import Testing Data file_path = f'{root_dir}/test_database/' cls.sector_funds_available = ASI.read_from_File( file_path + 'sector_funds_available.txt') cls.total_sector_funds = ASI.read_from_File(file_path + 'total_sector_funds.txt') file_path = f'{root_dir}/test_database/universe/' growth_stocks = ASI.read_from_File(file_path + 'growth_stocks_universe.txt') dividend_stocks = ASI.read_from_File(file_path + 'dividend_stocks_universe.txt') universe = dict() if isinstance(growth_stocks, dict) and isinstance( dividend_stocks, dict): try: del growth_stocks['None'] except KeyError: pass try: del dividend_stocks['None'] except KeyError: pass universe.update({'dividend_stocks': dividend_stocks}) universe.update({'growth_stocks': growth_stocks}) cls.universe = universe else: universe.update({'dividend_stocks': dict()}) universe.update({'growth_stocks': dict()}) universe['dividend_stocks'].update({'General': ['UVXY', 'SPXL']}) universe['growth_stocks'].update({'General': ['UVXY', 'SPXL']}) print('Error importing trading universe!') cls.universe = universe
def test_positive_integer(self): value = 48 results = ASI.is_negative_number(value) self.assertFalse(results, 'incorrect results for positive integer value')
def test_negative_integer(self): value = -13 results = ASI.is_negative_number(value) self.assertTrue(results, 'incorrect results for negative integer value')
def test_negative_num_less_than_zero(self): value = -0.05637 results = ASI.is_negative_number(value) self.assertTrue(results, 'incorrect results for negative value less than zero')
def test_positive_num_less_than_zero(self): value = 0.00456 results = ASI.is_negative_number(value) self.assertFalse( results, 'incorrect results for positive value less than zero')
def process_instr(inst, year, doy, do_DB=True): # Function to process a single day of ASI (all-sky imaging) data. A # movie and a keogram will be generated. If requested, the information # will be added into the airglow database # # INPUTS: # inst - instrument code to process # year, doy - year and day of year of date to be processed # # OPTIONAL INPUTS: # do_DB - put information into the airglow SQL database (default=True) # # OUTPUTS: # warnings - warning message if processing crashed (a traceback) # If no issues, an empty string will be returned. # TODO: Mimic FPIprocess warnings # # HISTORY: # Written by Jonathan J. Makela on 15 July 2013 # Reworked to use instruments as the input, rather than site on 22 July 2013 # Create the process_dn date process_dn = datetime.datetime(year, 1, 1) + datetime.timedelta(days=doy - 1) warnings = '' # Create needed string variables datestr = process_dn.strftime('%Y%m%d') # User to scp to scp_user = '******' web_stub = 'SummaryImages/' file_stub = '/home/airglowgroup/data/SummaryImages/' inst = inst.lower() # Find out where the instrument was site = asiinfo.get_site_of(inst, process_dn) # Get the info for the instrument on this date inst_info = asiinfo.get_instr_info(inst, process_dn) site_info = asiinfo.get_site_info(site) filters = inst_info['filters'] filter_names = inst_info['filter_names'] unwarp_ht = inst_info['unwarp_ht'] inst_id = inst_info['sql_inst_id'] horz = inst_info['horizon'] t_lat = inst_info['t_lat'] t_lon = inst_info['t_lon'] nfile = inst_info['cal_file'] kernel_size = inst_info['kernel_size'] site_id = site_info['sql_id'] site_name = site_info['Name'] show_countries = site_info['borders'] dark_flag = inst_info['ignore_dark'] # Load in the data from the requested calibration file npzfile = np.load(nfile) el = npzfile['el'] az = npzfile['az'] rx_lat = npzfile['rx_lat'] rx_lon = npzfile['rx_lon'] if 'nf' in inst: #get CNFI's mask that used to be on el/az to apply to data maskDat = npzfile['mask'] else: maskDat = None npzfile.close() # Determine the times between which files will be accepted. # Use local-time noon on doy to local-time noon on doy+1 local = pytz.timezone(site_info['Timezone']) start_dt = datetime.datetime(year, 1, 1) + datetime.timedelta(days=doy - 1, hours=12) stop_dt = datetime.datetime(year, 1, 1) + datetime.timedelta(days=doy - 1, hours=36) start_dt = local.localize(start_dt) stop_dt = local.localize(stop_dt) # Location of files data_stub = '/rdata/airglow/imaging/' + inst + '/' + site + '/' warn_flag = True # Loop through each requested filter for (fils, fil_name, un_ht, in_id, target_lat, target_lon) in zip(filters, filter_names, unwarp_ht, inst_id, t_lat, t_lon): # Variable to hold the files that were created files_created = [] try: con = None # Create a list of relevant filenames by searching in adjacent # days' directories also, since we might have problems with UT/LT, # time zones, etc. files = [] temps = [] darks = [] for day_offset in [-1, 0, 1]: # search folders around the day of interest dir_dt = process_dn + datetime.timedelta(days=day_offset) # Create the YYYYMMDD date format yearstr = dir_dt.strftime('%Y') doystr = dir_dt.strftime('%j') # Create the directory name for the data to be processed data_dir = data_stub + yearstr + '/' + doystr + '/' # Look in the data directory, and grab the files if they were # taken between the start and stop times. #fns = glob.glob(data_dir + site.upper() + '_' + fils + '*.tif') #dks = glob.glob(data_dir + site.upper() + '_[dD][aA][rR][kK]_*.tif') fns = glob.glob(data_dir + '*_' + fils + '*.tif') dks = glob.glob(data_dir + '*_[dD][aA][rR][kK]_*.tif') if len(fns) == 0: # Do old data format if nothing is returned. fns = glob.glob(data_dir + fils + '*.tif') dks = glob.glob(data_dir + '[dD][aA][rR][kK]_*.tif') for fn in fns: d = Image.open(fn) dtime = local.localize(d.info['LocalTime']) if dtime > start_dt and dtime < stop_dt: files.append(fn) temps.append(d.info['CCDTemperature']) for dk in dks: d = Image.open(dk) dtime = local.localize(d.info['LocalTime']) if dtime > start_dt and dtime < stop_dt: darks.append(dk) files.sort() darks.sort() if len(darks) is 0 or dark_flag: darks = None # Go through images for each filter if len(files) is not 0: files.sort() warn_flag = False # Warning if CCD is too hot cool_point = asiinfo.get_instr_info(inst, process_dn)['ccd_temp_set'] over_limit = sum(np.array(temps) >= cool_point + 3) if over_limit > 0: warnings = warnings + 'CCD OVERHEATING: %s- %03i of %03i images over set limit\n' % ( fils, over_limit, len(files)) # Output names movie_name = inst + '_' + site + '_' + datestr + '_' + fils + 'movie.avi' keo_name = inst + '_' + site + '_' + datestr + '_' + fils + 'keogram.png' # Convert from the el/az to lat/lon for the requested unwarp height lat, lon = ASI.ConvertAzEl2LatLon(az, el, un_ht, rx_lat, rx_lon, horizon=horz) min_lat = np.min(lat[np.isfinite(lat)]) max_lat = np.max(lat[np.isfinite(lat)]) min_lon = np.min(lon[np.isfinite(lon)]) max_lon = np.max(lon[np.isfinite(lon)]) # Create the map m = Basemap(llcrnrlon=min_lon - 0.5, llcrnrlat=min_lat - 0.5, urcrnrlon=max_lon + 0.5, urcrnrlat=max_lat + 0.5, projection='merc', area_thresh=1000, resolution='i') # create mask to mask image data if maskDat is not None: mask = np.isnan(lat) | maskDat else: mask = np.isnan(lat) # Get lat and lon without nans so we can plot lat1, lon1 = ASI.ConvertAzEl2LatLon( az, el, un_ht, rx_lat, rx_lon, horizon=-10.0) #-10 so there are no nans # Create the movie ASI.MapMovie(files, m, lat1, lon1, mask, movie_name=movie_name, darks=darks, sitename=site_name, kernel_size=kernel_size, displayCountries=show_countries, filt=fil_name) # SCP the movie over to airglow (call was Popen) subprocess.call( ['scp', movie_name, scp_user + ':' + file_stub]) files_created.append(movie_name) # Create the keogram #pdb.set_trace() f = ASIDisplay.Keogram(files, lat, lon, target_lat, target_lon, sitename=site_name, filt=fil_name, darks=darks) f.savefig(keo_name) subprocess.call(['scp', keo_name, scp_user + ':' + file_stub]) files_created.append(keo_name) # Add info to the database, if requested if do_DB: # Start and stop time of observations d = Image.open(files[0]) startut = d.info['UniversalTime'] d = Image.open(files[-1]) stoput = d.info['UniversalTime'] # Open and populate the database (see http://zetcode.com/databases/mysqlpythontutorial/ for help) con = mdb.connect( host='webhost.engr.illinois.edu', db='airglowgroup_webdatabase', read_default_file='/home/airglow/.my.cnf') cur = con.cursor() # First find out if the entry is in there (i.e., we are just updating the png and avi file) sql_cmd = 'SELECT id FROM DataSet WHERE Site = %d and Instrument = %d and StartUTTime = \"%s\"' % ( site_id, in_id, startut) cur.execute(sql_cmd) rows = cur.fetchall() if len(rows) == 0: sql_cmd = 'INSERT INTO DataSet (Site, Instrument, StartUTTime, StopUTTime,SummaryImage, SummaryMovie) VALUES(%d, %d, \"%s\", \"%s\", \"%s\", \"%s\")' % ( site_id, in_id, startut, stoput, web_stub + keo_name, web_stub + movie_name) cur.execute(sql_cmd) else: # Entry exists. Update it sql_cmd = 'UPDATE DataSet SET SummaryImage=\"%s\", SummaryMovie=\"%s\" WHERE Site = %d and Instrument = %d and StartUTTime = \"%s\"' % ( web_stub + keo_name, web_stub + movie_name, site_id, in_id, startut) cur.execute(sql_cmd) else: print 'No files used for ' + fils + ' on this day.' except: print 'Something bad happenend...' warnings = warnings + traceback.format_exc() + '\n' finally: if con: con.close() # Delete the files that were created for f in files_created: os.remove(f) if warn_flag: warnings = warnings + 'No files in folders!\n' return (warnings)