def main(): """Run a main program of the KSU F****r.""" if len(sys.argv) < 2: help() return Constants.EXIT_SUCCESS cmd = sys.argv[1] #downloader = PageDownloader() # # Use it if you want to create an estimated student DB automatically. # #downloader.determine_studentID() # # Use it if you want to create an estimated student DB using your hand. # #estimated_students_db_manager = downloader.get_db_manager() #estimated_students_db_manager.register_studentIDs_ranging("g0846002", "g0847498") #entrance_year=2008 #estimated_students_db_manager.register_studentIDs_ranging("g0946010", "g0947622") #entrance_year=2009 #estimated_students_db_manager.register_studentIDs_ranging("g1044011", "g1045344") #entrance_year=2010 #estimated_students_db_manager.register_studentIDs_ranging("g1144010", "g1145505") #entrance_year=2011 #estimated_students_db_manager.label_traced_students_ranging("g1144010", "g1145505", datetime.date(2015,07,14)) #estimated_students_db_manager.register_studentIDs_ranging("g1244028", "g1245397") #entrance_year=2012 #estimated_students_db_manager.register_studentIDs_ranging("g1344018", "g1349031") #entrance_year=2013 #estimated_students_db_manager.register_studentIDs_ranging("g1444026", "g1445539") #entrance_year=2014 #estimated_students_db_manager.register_studentIDs_ranging("g1540074", "g1547932") #entrance_year=2015 # # Download all student data using an estimated student DB above. # if cmd == "download_all": downloader = PageDownloader() downloader.download_all() elif cmd == "upload_to_s3": u = Uploader() u.run("tmp") #u.run(Constants.CC_DOMAIN) #u.run(Constants.CSE_DOMAIN) # # Analyze and save downloaded HTMLs into "cse_student_DB.db". # elif cmd == "analyze_HTMLs": analyzer = StudentAnalyzer(Constants.STUDENT_TABLE_NAME) analyzer.analyze_HTMLs() #analyzer.analyze_images() elif cmd == "create_index_DB": analyzer = StudentAnalyzer(Constants.STUDENT_TABLE_NAME) analyzer.create_index_DB() else: help() return Constants.EXIT_SUCCESS
def uploadfile(data, result): """ 异步执行的上传程序 :param data: get请求带来得参数 :param result: 根据参数从数据库查找出的数据(md5和url) :return: """ if result: connector_reday_for_upload(result) upload = Uploader(result) upload.run() else: filename = data.split('\\')[-1] path = os.path.join(WaitingUploadPath, filename) try: os.remove(path) print("已删除测试文件%s" % filename) except Exception as ex: print(ex)
def main(args): """ Main function - launches the program. :param args: The Parser arguments :type args: Parser object :returns: List :example: >>> ["The latitude and longitude values must be valid numbers", 1] """ v = VerbosityMixin() if args: if 'clip' in args: bounds = convert_to_float_list(args.clip) else: bounds = None if args.subs == 'process': verbose = True if args.verbose else False force_unzip = True if args.force_unzip else False stored = process_image(args.path, args.bands, verbose, args.pansharpen, args.ndvi, force_unzip, args.ndvigrey, bounds) if args.upload: u = Uploader(args.key, args.secret, args.region) u.run(args.bucket, get_file(stored), stored) return ["The output is stored at %s" % stored] elif args.subs == 'search': try: if args.start: args.start = reformat_date(parse(args.start)) if args.end: args.end = reformat_date(parse(args.end)) if args.latest > 0: args.limit = 25 end = datetime.now() start = end - relativedelta(days=+365) args.end = end.strftime("%Y-%m-%d") args.start = start.strftime("%Y-%m-%d") except (TypeError, ValueError): return ["Your date format is incorrect. Please try again!", 1] s = Search() try: lat = float(args.lat) if args.lat else None lon = float(args.lon) if args.lon else None except ValueError: return [ "The latitude and longitude values must be valid numbers", 1 ] result = s.search(paths_rows=args.pathrow, lat=lat, lon=lon, limit=args.limit, start_date=args.start, end_date=args.end, cloud_max=args.cloud) if result['status'] == 'SUCCESS': if args.latest > 0: datelist = [] for i in range(0, result['total_returned']): datelist.append((result['results'][i]['date'], result['results'][i])) datelist.sort(key=lambda tup: tup[0], reverse=True) datelist = datelist[:args.latest] result['results'] = [] for i in range(0, len(datelist)): result['results'].append(datelist[i][1]) result['total_returned'] = len(datelist) else: v.output('%s items were found' % result['total'], normal=True, arrow=True) if result['total'] > 100: return ['Over 100 results. Please narrow your search', 1] else: v.output(json.dumps(result, sort_keys=True, indent=4), normal=True, color='green') return ['Search completed!'] elif result['status'] == 'error': return [result['message'], 1] elif args.subs == 'download': d = Downloader(download_dir=args.dest) try: bands = convert_to_integer_list(args.bands) if args.pansharpen: bands.append(8) if args.ndvi: bands = [4, 5] downloaded = d.download(args.scenes, bands) if args.process: force_unzip = True if args.force_unzip else False for scene, src in downloaded.iteritems(): if args.dest: path = join(args.dest, scene) else: path = join(settings.DOWNLOAD_DIR, scene) # Keep using Google if the image is before 2015 if src == 'google': path = path + '.tar.bz' stored = process_image(path, args.bands, False, args.pansharpen, args.ndvi, force_unzip, bounds=bounds) if args.upload: try: u = Uploader(args.key, args.secret, args.region) except NoAuthHandlerFound: return ["Could not authenticate with AWS", 1] except URLError: return [ "Connection timeout. Probably the region parameter is incorrect", 1 ] u.run(args.bucket, get_file(stored), stored) return ['The output is stored at %s' % stored, 0] else: return ['Download Completed', 0] except IncorrectSceneId: return ['The SceneID provided was incorrect', 1]
def main(args): """ Main function - launches the program """ v = VerbosityMixin() if args: if args.subs == 'process': verbose = True if args.verbose else False stored = process_image(args.path, args.bands, verbose, args.pansharpen) if args.upload: u = Uploader(args.key, args.secret, args.region) u.run(args.bucket, get_file(stored), stored) return ["The output is stored at %s" % stored] elif args.subs == 'search': try: if args.start: args.start = reformat_date(parse(args.start)) if args.end: args.end = reformat_date(parse(args.end)) except (TypeError, ValueError): return ["You date format is incorrect. Please try again!", 1] s = Search() try: lat = float(args.lat) if args.lat else None lon = float(args.lon) if args.lon else None except ValueError: return [ "The latitude and longitude values must be valid numbers", 1 ] result = s.search(paths_rows=args.pathrow, lat=lat, lon=lon, limit=args.limit, start_date=args.start, end_date=args.end, cloud_max=args.cloud) if result['status'] == 'SUCCESS': v.output('%s items were found' % result['total'], normal=True, arrow=True) if result['total'] > 100: return ['Over 100 results. Please narrow your search', 1] else: v.output(json.dumps(result, sort_keys=True, indent=4), normal=True, color='green') return ['Search completed!'] elif result['status'] == 'error': return [result['message'], 1] elif args.subs == 'download': d = Downloader(download_dir=args.dest) try: if d.download(args.scenes, convert_to_integer_list(args.bands)): if args.process: if args.dest: path = join(args.dest, args.scenes[0]) else: path = join(settings.DOWNLOAD_DIR, args.scenes[0]) # Keep using Google if the image is before 2015 if (int(args.scenes[0][12]) < 5 or not args.bands): path = path + '.tar.bz' stored = process_image(path, args.bands, False, args.pansharpen) if args.upload: try: u = Uploader(args.key, args.secret, args.region) except NoAuthHandlerFound: return ["Could not authenticate with AWS", 1] except URLError: return [ "Connection timeout. Probably the region parameter is incorrect", 1 ] u.run(args.bucket, get_file(stored), stored) return ["The output is stored at %s" % stored] else: return ['Download Completed', 0] except IncorrectSceneId: return ['The SceneID provided was incorrect', 1]
def main(args): """ Main function - launches the program. :param args: The Parser arguments :type args: Parser object :returns: List :example: >>> ["The latitude and longitude values must be valid numbers", 1] """ v = VerbosityMixin() if args: if 'clip' in args: bounds = convert_to_float_list(args.clip) else: bounds = None if args.subs == 'process': verbose = True if args.verbose else False force_unzip = True if args.force_unzip else False stored = process_image(args.path, args.bands, verbose, args.pansharpen, args.ndvi, force_unzip, args.ndvigrey, bounds) if args.upload: u = Uploader(args.key, args.secret, args.region) u.run(args.bucket, get_file(stored), stored) return ["The output is stored at %s" % stored] elif args.subs == 'search': try: if args.start: args.start = reformat_date(parse(args.start)) if args.end: args.end = reformat_date(parse(args.end)) if args.latest > 0: args.limit = 25 end = datetime.now() start = end - relativedelta(days=+365) args.end = end.strftime("%Y-%m-%d") args.start = start.strftime("%Y-%m-%d") except (TypeError, ValueError): return ["Your date format is incorrect. Please try again!", 1] s = Search() try: lat = float(args.lat) if args.lat else None lon = float(args.lon) if args.lon else None except ValueError: return ["The latitude and longitude values must be valid numbers", 1] address = args.address if address and (lat and lon): return ["Cannot specify both address and latitude-longitude"] result = s.search(paths_rows=args.pathrow, lat=lat, lon=lon, address=address, limit=args.limit, start_date=args.start, end_date=args.end, cloud_max=args.cloud) if result['status'] == 'SUCCESS': if args.json: return json.dumps(result) if args.latest > 0: datelist = [] for i in range(0, result['total_returned']): datelist.append((result['results'][i]['date'], result['results'][i])) datelist.sort(key=lambda tup: tup[0], reverse=True) datelist = datelist[:args.latest] result['results'] = [] for i in range(0, len(datelist)): result['results'].append(datelist[i][1]) result['total_returned'] = len(datelist) else: v.output('%s items were found' % result['total'], normal=True, arrow=True) if result['total'] > 100: return ['Over 100 results. Please narrow your search', 1] else: v.output(json.dumps(result, sort_keys=True, indent=4), normal=True, color='green') return ['Search completed!'] elif result['status'] == 'error': return [result['message'], 1] elif args.subs == 'download': d = Downloader(download_dir=args.dest) try: bands = convert_to_integer_list(args.bands) if args.process: if args.pansharpen: bands.append(8) if args.ndvi or args.ndvigrey: bands = [4, 5] if not args.bands: bands = [4, 3, 2] downloaded = d.download(args.scenes, bands) if args.process: if not args.bands: args.bands = '432' force_unzip = True if args.force_unzip else False for scene, src in downloaded.iteritems(): if args.dest: path = join(args.dest, scene) else: path = join(settings.DOWNLOAD_DIR, scene) # Keep using Google if the image is before 2015 if src == 'google': path = path + '.tar.bz' stored = process_image(path, args.bands, False, args.pansharpen, args.ndvi, force_unzip, args.ndvigrey, bounds=bounds) if args.upload: try: u = Uploader(args.key, args.secret, args.region) except NoAuthHandlerFound: return ["Could not authenticate with AWS", 1] except URLError: return ["Connection timeout. Probably the region parameter is incorrect", 1] u.run(args.bucket, get_file(stored), stored) return ['The output is stored at %s' % stored, 0] else: return ['Download Completed', 0] except IncorrectSceneId: return ['The SceneID provided was incorrect', 1]
def uploader_thread(self, clientsocket, address): print("(S) Just threaded a new client") # create the uploader uploader = Uploader(self, clientsocket, address) uploader.run()
def main(args): """ Main function - launches the program. :param args: The Parser arguments :type args: Parser object :returns: List :example: >>> ["The latitude and longitude values must be valid numbers", 1] """ v = VerbosityMixin() if args: if 'clip' in args: bounds = convert_to_float_list(args.clip) else: bounds = None if args.subs == 'process': verbose = True if args.verbose else False force_unzip = True if args.force_unzip else False stored = process_image(args.path, args.bands, verbose, args.pansharpen, args.ndvi, force_unzip, args.ndvigrey, bounds) if args.upload: u = Uploader(args.key, args.secret, args.region) u.run(args.bucket, get_file(stored), stored) return ["The output is stored at %s" % stored] elif args.subs == 'search': try: if args.start: args.start = reformat_date(parse(args.start)) if args.end: args.end = reformat_date(parse(args.end)) if args.latest > 0: args.limit = 25 end = datetime.now() start = end - relativedelta(days=+365) args.end = end.strftime("%Y-%m-%d") args.start = start.strftime("%Y-%m-%d") except (TypeError, ValueError): return ["Your date format is incorrect. Please try again!", 1] s = Search() try: if args.lat is not None: lat = float(args.lat) else: lat = None if args.lon is not None: lon = float(args.lon) else: lon = None except ValueError: return ["The latitude and longitude values must be valid numbers", 1] address = args.address if address and (lat and lon): return ["Cannot specify both address and latitude-longitude"] result = s.search(paths_rows=args.pathrow, lat=lat, lon=lon, address=address, limit=args.limit, start_date=args.start, end_date=args.end, cloud_max=args.cloud, geojson=args.geojson) if 'status' in result: if result['status'] == 'SUCCESS': if args.json: return json.dumps(result) if args.latest > 0: datelist = [] for i in range(0, result['total_returned']): datelist.append((result['results'][i]['date'], result['results'][i])) datelist.sort(key=lambda tup: tup[0], reverse=True) datelist = datelist[:args.latest] result['results'] = [] for i in range(0, len(datelist)): result['results'].append(datelist[i][1]) result['total_returned'] = len(datelist) else: v.output('%s items were found' % result['total'], normal=True, arrow=True) if result['total'] > 100: return ['Over 100 results. Please narrow your search', 1] else: v.output(json.dumps(result, sort_keys=True, indent=4), normal=True, color='green') return ['Search completed!'] elif result['status'] == 'error': return [result['message'], 1] if args.geojson: return json.dumps(result) elif args.subs == 'download': d = Downloader(download_dir=args.dest, usgs_user=args.username, usgs_pass=args.password) try: bands = convert_to_integer_list(args.bands) if args.process: if args.pansharpen: bands.append(8) if args.ndvi or args.ndvigrey: bands = [4, 5] if not args.bands: bands = [4, 3, 2] files = d.download(args.scenes, bands) if args.process: if not args.bands: args.bands = '432' force_unzip = True if args.force_unzip else False for f in files: stored = process_image(f, args.bands, False, args.pansharpen, args.ndvi, force_unzip, args.ndvigrey, bounds=bounds) if args.upload: try: u = Uploader(args.key, args.secret, args.region) except NoAuthHandlerFound: return ["Could not authenticate with AWS", 1] except URLError: return ["Connection timeout. Probably the region parameter is incorrect", 1] u.run(args.bucket, get_file(stored), stored) return ['The output is stored at %s' % stored, 0] else: return ['Download Completed', 0] except IncorrectSceneId: return ['The SceneID provided was incorrect', 1] except (RemoteFileDoesntExist, USGSInventoryAccessMissing) as e: return [e.message, 1]
def main(args): """ Main function - launches the program. :param args: The Parser arguments :type args: Parser object :returns: List :example: >>> ["The latitude and longitude values must be valid numbers", 1] """ v = VerbosityMixin() if args: if args.subs == 'process': verbose = True if args.verbose else False force_unzip = True if args.force_unzip else False stored = process_image(args.path, args.bands, verbose, args.pansharpen, args.ndvi, force_unzip, args.ndvi1) if args.upload: u = Uploader(args.key, args.secret, args.region) u.run(args.bucket, get_file(stored), stored) return ["The output is stored at %s" % stored] elif args.subs == 'search': try: if args.start: args.start = reformat_date(parse(args.start)) if args.end: args.end = reformat_date(parse(args.end)) except (TypeError, ValueError): return ["You date format is incorrect. Please try again!", 1] s = Search() try: lat = float(args.lat) if args.lat else None lon = float(args.lon) if args.lon else None except ValueError: return ["The latitude and longitude values must be valid numbers", 1] result = s.search(paths_rows=args.pathrow, lat=lat, lon=lon, limit=args.limit, start_date=args.start, end_date=args.end, cloud_max=args.cloud) if result['status'] == 'SUCCESS': v.output('%s items were found' % result['total'], normal=True, arrow=True) if result['total'] > 100: return ['Over 100 results. Please narrow your search', 1] else: v.output(json.dumps(result, sort_keys=True, indent=4), normal=True, color='green') return ['Search completed!'] elif result['status'] == 'error': return [result['message'], 1] elif args.subs == 'download': d = Downloader(download_dir=args.dest) try: bands = convert_to_integer_list(args.bands) if args.pansharpen: bands.append(8) if args.ndvi: bands = [4, 5] downloaded = d.download(args.scenes, bands) if args.process: force_unzip = True if args.force_unzip else False for scene, src in downloaded.iteritems(): if args.dest: path = join(args.dest, scene) else: path = join(settings.DOWNLOAD_DIR, scene) # Keep using Google if the image is before 2015 if src == 'google': path = path + '.tar.bz' stored = process_image(path, args.bands, False, args.pansharpen, args.ndvi, force_unzip) if args.upload: try: u = Uploader(args.key, args.secret, args.region) except NoAuthHandlerFound: return ["Could not authenticate with AWS", 1] except URLError: return ["Connection timeout. Probably the region parameter is incorrect", 1] u.run(args.bucket, get_file(stored), stored) v.output("The output is stored at %s" % stored, normal=True, arrow=True) return ['Image Processing Completed', 0] else: return ['Download Completed', 0] except IncorrectSceneId: return ['The SceneID provided was incorrect', 1]