def do_pipeline(): # Cleaning up old files from the database and the disk DataFileManager.delete_old_files() OverlayManager.delete_old_files() # Check for new feedback surveys or comments, and email them to Flaxen FeedbackHistory.send_feedback_forms() FeedbackQuestionaire.send_feedback_survey() #Downloading the latest datafiles for our models. See the appropriate functions #pl_download/models.py.DataFileManager.get_latest_wave_watch_files() and #pl_download/models.py.DataFileManager.fetch_new_files() respectively try: #Sometimes even though the file downloads this process hangs and fails. #The try catch is a stop-gap fix so that the pipeline doesn't stop here #When it fails in that manner the file is downloaded and can be used wave_watch_files = DataFileManager.get_latest_wave_watch_files() except Exception: print '-' * 60 traceback.print_exc(file=sys.stdout) print '-' * 60 sst_files = DataFileManager.fetch_new_files() # not calling as a task so it runs inline wind_files = DataFileManager.get_wind_file() # If no new files were returned, don't plot or tile anything. try: #This try catch is also for the wave watch timeout bug if not wave_watch_files and not sst_files and not wind_files: print "No New Files Available, Quitting." return None except Exception: print '-' * 60 traceback.print_exc(file=sys.stdout) print '-' * 60 # get the list of plotting tasks based on the files we just downloaded. plot_task_list = OverlayManager.get_tasks_for_base_plots_for_next_few_days() list_of_chains = [] for pt in plot_task_list: if pt.args[0] != 4 and pt.args[0] != 6 and pt.args[0] != 7: # chaining passes the result of first function to second function list_of_chains.append(chain(pt, tile_overlay.s())) else: #Use the Wavewatch tiler for Wavewatch files list_of_chains.append(chain(pt, tile_wave_watch_overlay.s())) job = group(item for item in list_of_chains) print "jobs:" for each in job: print each #and run the group. result = job.apply_async() return result
def do_pipeline(): print "TASKS: before deleting files" DataFileManager.delete_old_files() print "TASKS: after deleting Datafiles" OverlayManager.delete_old_files() print "TASKS: after deleting Overlays" wave_watch_files = DataFileManager.get_latest_wave_watch_files() print "TASKS: after getting wave files" other_files = DataFileManager.fetch_new_files( ) # not calling as a task so it runs inline print "TASKS: after getting sst/currents files" # If no new files were returned, don't plot or tile anything. if not wave_watch_files and not other_files: return None # get the list of plotting tasks based on the files we just downloaded. plot_task_list = OverlayManager.get_tasks_for_base_plots_for_next_few_days( ) list_of_chains = [] for pt in plot_task_list: if pt.args[0] != 4: # chaining passes the result of first function to second function list_of_chains.append(chain(pt, tile_overlay.s())) else: #Use the Wavewatch tiler for Wavewatch files list_of_chains.append(chain(pt, tile_wave_watch_overlay.s())) job = group(item for item in list_of_chains) print "jobs:" for each in job: print each #and run the group. result = job.apply_async() return result
wave = DataFileManager.get_latest_wave_watch_files() wave = DataFile.objects.filter(type='WAVE').latest('model_date') tiles = [] begin = time.time() #first entry is day-1 at 12pm #need to offset 16 to match with sst plot #NOTE it increments in 1 hour changes tiles += OverlayManager.make_wave_watch_plot(4, 16, wave[0]) tiles += OverlayManager.make_wave_watch_plot(6, 16, wave[0]) for t in tiles: tile_wave_watch_overlay(t) finish = time.time() totalTime = (finish - begin)/ 60 print "Time taken for Waves = " + str(round(totalTime, 2)) + " minutes" if sst: sst = DataFileManager.fetch_new_files() tiles = [] #first entry is day at 4am #NOTE it increments in 4 hour changes begin = time.time() tiles += OverlayManager.make_plot(1, 0, sst[0]) tiles += OverlayManager.make_plot(3, 0, sst[0]) for t in tiles: tile_overlay(t) finish = time.time() totalTime = (finish - begin)/ 60 print "Time taken for SST = " + str(round(totalTime, 2)) + " minutes" if wind: winds = DataFileManager.get_wind_file() winds = DataFile.objects.filter(type='WIND').latest('model_date')
def testFetchingFiles(self): print "Running Currents & SS Temperature Download Test: " result = DataFileManager.fetch_new_files() self.assertIsNotNone(result)