def check_if_already_has_license(self): """ returns boolean value indicating whether there is already a file in the current working directory that has 'license' or similar in the filename """ common_license_filenames = [ 'license', 'copying', 'unlicense', 'copying.lesser', 'license.txt', 'unlicense.txt', 'copying.txt' ] cwd_files = os.listdir(self.cwd) possible_licenses = [ x for x in cwd_files if x.lower() in common_license_filenames ] if len(possible_licenses) != 0: print 'This repository appears to contain license information already.' print 'If you would like to apply a new license using garnish, first' print 'remove any license files such as LICENSE, COPYING, or UNLICENSE.' if not self.args.q: print '\n Conflicting files: \n {0}'.format( '\n '.join(possible_licenses)) print '\nNo changes have been made.' logging.info('A license file is already present: {0}'.format( possible_licenses[0])) logging.info('Exiting.') exit(bad=True) else: return False
def __main__(): global parser parser = args_options() args = parser.parse_args() with timer(): exit(*main(args))
def process_argv(argv, profile): if ":" in argv["--refresh-time"]: try: argv["--refresh-time"] = utils.time_string_to_seconds(argv["--refresh-time"]) except ValueError: log.critical("Incorrect value for -r --refresh-time ({}).".format(argv["--refresh-time"])) utils.exit(1) if argv["<chan>"] is None: argv["<chan>"] = "4chan" main_dir_path = os.path.join(profile.dir_path, argv["<chan>"]) cache_dir_path = os.path.join(main_dir_path, "cache") if argv["--download-dir"] is None: argv["--download-dir"] = os.path.join(main_dir_path, "threads") else: argv["--download-dir"] = os.path.expanduser(argv["--download-dir"]) options = { "colour_output": argv["--colour"], "thread_dl_dir_path": argv["--download-dir"], "output_oneline": argv["--oneline"], "single_run": argv["--single-run"], "core_dl_sleep_time": argv["--sleep-time"], "use_cache_on_first_run": argv["--use-cache"], "refresh_time": int(argv["--refresh-time"]), "no_duplicate_threads": argv["--no-duplicates"], "chan": argv["<chan>"], "cache_dir_path": cache_dir_path, "main_dir_path": main_dir_path, } return options
def get_user_id_and_create_if_necessary(self, name): """Returns user ID and create user if necessary. All-in-one wonder method. Searching for the user ID of a given user. If the user does not exist: create the user first and then return the user ID. See also: :meth:`database.create_user`, :meth:`database.get_user_id` Args: name (:obj:`str`): User name. Returns: int: Numeric userID. """ import utils userID = self.get_user_id( utils.nicename(name, self.config['conversion_table'])) if not userID: self.create_user( utils.nicename(name, self.config['conversion_table'])) # - Now loading the newly crates userID userID = self.get_user_id( utils.nicename(name, self.config['conversion_table'])) if not userID: utils.exit( 'OOOH F**K. Created new user for %s but resulting ID was None in get_user_id_and_create_user_if_necessary.' % name) # - Return the id return userID
def process_image(path, bands=None, verbose=False, pansharpen=False, force_unzip=None): """ Handles constructing and image process. :param path: The path to the image that has to be processed :type path: String :param bands: List of bands that has to be processed. (optional) :type bands: List :param verbose: Sets the level of verbosity. Default is False. :type verbose: boolean :param pansharpen: Whether to pansharpen the image. Default is False. :type pansharpen: boolean :returns: (String) path to the processed image """ try: bands = convert_to_integer_list(bands) p = Process(path, bands=bands, verbose=verbose, force_unzip=force_unzip) except IOError: exit("Zip file corrupted", 1) except FileDoesNotExist as e: exit(e.message, 1) return p.run(pansharpen)
def get_cities(self): """Loading city information from the database. Loads all active cities from the database which can then be used to loop over or whatever you'll do with it. Returns: list: A list containing one dict per city where each dict consists of the keys 'name' and 'ID'. .. todo:: Would be nice to return cityclass objects or something. However, needs some effort as I have to chnage a few lines of code. """ print ' * %s' % 'looking active cities' sql = 'SELECT ID, name FROM %swetterturnier_cities WHERE active = 1 ' + \ 'ORDER BY ID' cur = self.cursor() cur.execute(sql % self.prefix) data = cur.fetchall() if len(data) == 0: utils.exit( 'Cannot load city data from database in database.current_tournament' ) res = [] for elem in data: res.append({'name': str(elem[1]), 'ID': int(elem[0])}) return res
def check_if_already_has_license(self): """ returns boolean value indicating whether there is already a file in the current working directory that has 'license' or similar in the filename """ common_license_filenames = [ "license", "copying", "unlicense", "copying.lesser", "license.txt", "unlicense.txt", "copying.txt", ] cwd_files = os.listdir(self.cwd) possible_licenses = [x for x in cwd_files if x.lower() in common_license_filenames] if len(possible_licenses) != 0: print "This repository appears to contain license information already." print "If you would like to apply a new license using garnish, first" print "remove any license files such as LICENSE, COPYING, or UNLICENSE." if not self.args.q: print "\n Conflicting files: \n {0}".format("\n ".join(possible_licenses)) print "\nNo changes have been made." logging.info("A license file is already present: {0}".format(possible_licenses[0])) logging.info("Exiting.") exit(bad=True) else: return False
def main(): while True: ms=pygame.time.get_ticks() for event in pygame.event.get(): if event.type==QUIT: utils.exit() elif event.type == pygame.MOUSEBUTTONDOWN: # allow any button - left, right or middle glow_off() if click(): break display(); pygame.display.flip() # no pointer in case button pressed bu=buttons.check() if bu<>'':do_button(bu) elif event.type==KEYDOWN: if event.key==K_ESCAPE: utils.exit() if event.key==K_x: g.version_display=not g.version_display if g.player_n==0 and not buttons.active('green'): buttons.on("back") display() aim.do() aim.glow(); player.glow() if g.version_display: g.message=g.app+' Version '+g.ver g.message+=' '+str(g.w)+' x '+str(g.h) g.message+=' '+str(g.frame_rate)+'fps' utils.message(g.screen,g.font1,g.message) mx,my=pygame.mouse.get_pos() if my>g.pointer.get_height(): g.screen.blit(g.pointer,(mx,my)) pygame.display.flip() g.clock.tick(40) d=pygame.time.get_ticks()-ms; g.frame_rate=int(1000/d)
def get_username_by_id(self, userID): """Returns username given a user ID. Returns the username for a given user ID. If the user cannot be found, the return value will be False. There is a vice versa function called :py:meth:`database.get_user_id`. Args: userID (:obj:`int`): Numeric user ID. Returns: bool or str: False if user cannot be identified, else string username. """ try: userID = int(userID) except: utils.exit( 'Got wrong input to get_username_by_id. Was no integer!') cur = self.db.cursor() cur.execute('SELECT user_login FROM %susers WHERE ID = %d' % (self.prefix, userID)) data = cur.fetchone() if not data: return False else: return str(data[0])
def main(): utils.check_login_user() # parse input option argv = [encode.convert_to_unicode(a) for a in sys.argv[1:]] args = get_parser().parse_args(argv) # read and check args kwargs = {} if args.name is not None: kwargs['name'] = args.name if args.metadata is not None: kwargs['metadata'] = args.metadata if args.description is not None: kwargs['description'] = args.description if args.file_name is not None: kwargs['file_name'] = args.file_name if args.fpga_image_id is not None: kwargs['fpga_image_id'] = args.fpga_image_id if args.image_id is not None: kwargs['image_id'] = args.image_id if args.page is not None: kwargs['page'] = args.page if args.size is not None: kwargs['size'] = args.size try: utils.check_param(**kwargs) except Exception as e: utils.exit('Error: %s' % encode.exception_to_unicode(e)) # read and check config file config.read_config_and_verify() access_key = os.getenv('OS_ACCESS_KEY') secret_key = os.getenv('OS_SECRET_KEY') bucket_name = os.getenv('OS_BUCKET_NAME') region_id = os.getenv('OS_REGION_ID') domain_id = os.getenv('OS_DOMAIN_ID') project_id = os.getenv('OS_PROJECT_ID') obs_endpoint = os.getenv('OS_OBS_ENDPOINT') vpc_endpoint = os.getenv('OS_VPC_ENDPOINT') fis_endpoint = os.getenv('OS_FIS_ENDPOINT') try: # configure intranet dns of ecs config.configure_intranet_dns_ecs(region_id) # check bucket utils._check_bucket_acl_location(bucket_name, access_key, secret_key, obs_endpoint, region_id, domain_id) # check fis rest.fpga_image_relation_list(access_key, secret_key, project_id, region_id, fis_endpoint) except Exception as e: utils.exit('Error: %s' % encode.exception_to_unicode(e)) if kwargs: print('fis argument(s) and config file are OK') else: print('fis config file is OK')
def process_image( path, bands=None, verbose=False, pansharpen=False, ndvi=False, force_unzip=None, ndvigrey=False, bounds=None ): """ Handles constructing and image process. :param path: The path to the image that has to be processed :type path: String :param bands: List of bands that has to be processed. (optional) :type bands: List :param verbose: Sets the level of verbosity. Default is False. :type verbose: boolean :param pansharpen: Whether to pansharpen the image. Default is False. :type pansharpen: boolean :returns: (String) path to the processed image """ try: bands = convert_to_integer_list(bands) if pansharpen: p = PanSharpen( path, bands=bands, dst_path=settings.PROCESSED_IMAGE, verbose=verbose, force_unzip=force_unzip, bounds=bounds, ) elif ndvigrey: p = NDVI(path, verbose=verbose, dst_path=settings.PROCESSED_IMAGE, force_unzip=force_unzip, bounds=bounds) elif ndvi: p = NDVIWithManualColorMap( path, dst_path=settings.PROCESSED_IMAGE, verbose=verbose, force_unzip=force_unzip, bounds=bounds ) else: p = Simple( path, bands=bands, dst_path=settings.PROCESSED_IMAGE, verbose=verbose, force_unzip=force_unzip, bounds=bounds, ) except IOError: exit("Zip file corrupted", 1) except FileDoesNotExist as e: exit(e.message, 1) return p.run()
def get_map_params(indent, geo_sw_ne, marker_diameter_km, operators, kw_range, connection_types): enter(indent, 'get_map_params', '') geo_bounds = geo_sw_ne.to_geo_bounds() cur_geohash_precision = GeoHashPrecisionFinder.find_geohash_bits_from_width_geo_bounds_kms( geo_bounds ) debug(indent, 'get_map_params', 'got precision ' + str(cur_geohash_precision) + ' for ' + str(geo_bounds.width)) es_result = es.aggregate_search_with_filter(cur_geohash_precision, geo_bounds, operators, kw_range, connection_types) # Aggregate all points geo_clustering = GeoClustering() points = geo_clustering.compute_clusters( indent + 1, geo_sw_ne, marker_diameter_km, es_result.geo_hash_to_count) debug(indent, 'get_map_params', 'after clustering call') result_points = [] if points == None: debug(indent, 'get_map_params', 'no results for bounds') else: debug(indent, 'get_map_params', 'found ' + str(len(points)) + ' points') for point in points: geo_point = point.get_point() item = { "latitude": geo_point.latitude, "longitude": geo_point.longitude, "count": point.count } result_points.append(item) result = { "points": result_points, "operators": es_result.operator_to_count, "kw_min_max": { "min": es_result.kw_min_max.min, "max": es_result.kw_min_max.max }, "connection_types": es_result.connection_type_to_count } exit(indent, 'get_map_params', str(len(result_points))) return jsonify(result)
def import_chan_module(chan_name): try: module = "chans._{}.get_board".format(chan_name) global get_board get_board = importlib.import_module(module) except ImportError: log.critical("Required module for chan {} missing: {}.".format( chan_name, module)) utils.exit(1)
def clip(self): """ Clip images based on bounds provided Implementation is borrowed from https://github.com/brendan-ward/rasterio/blob/e3687ce0ccf8ad92844c16d913a6482d5142cf48/rasterio/rio/convert.py """ self.output("Clipping", normal=True) # create new folder for clipped images path = check_create_folder(join(self.scene_path, 'clipped')) try: temp_bands = copy(self.bands) temp_bands.append('QA') for i, band in enumerate(temp_bands): band_name = self._get_full_filename(band) band_path = join(self.scene_path, band_name) self.output("Band %s" % band, normal=True, color='green', indent=1) with rasterio.open(band_path) as src: bounds = transform_bounds( { 'proj': 'longlat', 'ellps': 'WGS84', 'datum': 'WGS84', 'no_defs': True }, src.crs, *self.bounds) if disjoint_bounds(bounds, src.bounds): bounds = adjust_bounding_box(src.bounds, bounds) window = src.window(*bounds) out_kwargs = src.meta.copy() out_kwargs.update({ 'driver': 'GTiff', 'height': window[0][1] - window[0][0], 'width': window[1][1] - window[1][0], 'transform': src.window_transform(window) }) with rasterio.open(join(path, band_name), 'w', **out_kwargs) as out: out.write(src.read(window=window)) # Copy MTL to the clipped folder copyfile(join(self.scene_path, self.scene + '_MTL.txt'), join(path, self.scene + '_MTL.txt')) return path except IOError as e: exit(e.message, 1)
def ava(configLoc): projectConfig = ProjectConfig.readProjectConfigs(configLoc) dest = projectConfig[utils.PROJECT][utils.DEST] cp = dest for path in projectConfig[utils.PROJECT][utils.CP]: cp += ":" + path compileFiles = projectConfig[utils.PROJECT][utils.COMPILE] javac = ["javac", "-cp", cp, "-d", dest, *compileFiles] utils.out(utils.LINE_H, "ava: ", utils.CMD, " ".join(javac), softest=utils.Q) errLines = 0 for line in utils.execute(javac, stdout=None, stderr=SubProcess.PIPE): utils.out(utils.LINE_H, "javac: ", utils.ERR, line, end="", softest=utils.S) errLines += 1 if errLines == 0: utils.out(utils.LINE_H, "ava: ", utils.AFFIRM, "Compiled " + ", ".join([os.path.basename(file) for file in compileFiles]) + " without any errors", softest=utils.Q) else: utils.exit() java = ["java", "-cp", cp, projectConfig[utils.PROJECT][utils.RUN]] javaString = " ".join(java) utils.out(utils.LINE_H, "ava: ", utils.CMD, javaString, softest=utils.Q) exceptionMatcher = re.compile(r"^.*Exception[^\n]+(\s+at [^\n]+)*\s*\Z", re.MULTILINE | re.DOTALL) runningLine = "" utils.openLog(configLoc, projectConfig[utils.PROJECT][utils.HOME], javaString) for line in utils.execute(java, stderr=SubProcess.STDOUT): runningLine += line outputColor = utils.ERR if exceptionMatcher.match( runningLine) else utils.OUT utils.out(utils.LINE_H, "java: ", outputColor, line, end="", softest=utils.S) utils.log(line) utils.closeLog() utils.exit()
def __main__(): global parser parser = args_options() args = parser.parse_args() if args.subs == 'search' and (hasattr(args, 'json') or hasattr(args, 'geojson')): print(main(args)) else: with timer(): exit(*main(args))
def process_image(path, bands=None, verbose=False, pansharpen=False): try: bands = convert_to_integer_list(bands) p = Process(path, bands=bands, verbose=verbose) except IOError: exit("Zip file corrupted", 1) except FileDoesNotExist as e: exit(e.message, 1) return p.run(pansharpen)
def findProjectConfigFile(name): configLocation = '.' while not os.path.isfile(configLocation + '/' + name): if configLocation == '/': utils.out(utils.LINE_H, "ava: ", utils.ERR, "Project configuration file (", name, ") not found in parent directory", softest=utils.Q) utils.out(utils.LINE_H, "ava: ", utils.ERR, "Run with ", utils.CMD, "--make-project-config", utils.ERR, " or ", utils.CMD, "-m", utils.ERR, " to create a project configuration file", softest=utils.Q) utils.exit() configLocation = os.path.abspath(os.path.join(configLocation, os.pardir)) return os.path.relpath(configLocation + '/' + name)
def check_system(): """ Simple sanity check. """ if not utils.ensure_keys(["name", "logger_config", "mathml"], settings): utils.exit(utils.ENVIRONMENT_ENTRY_NOT_FOUND, "") log_dir = "logs" if not os.path.exists(log_dir): os.mkdir(log_dir)
def usage(what=None): """Script usage. Args: what (:obj:`str`): String to specify which usage should be used. .. todo:: A bug! Change iputcheck, add propper usage. """ import utils, sys, getobs, database config = readconfig('config.conf') db = database.database(config) cities = db.get_cities() IDs, names, hashes = [], [], [] for i in list(range(len(cities))): IDs.append(cities[i]['ID']) names.append(cities[i]['name']) hashes.append(cities[i]['hash']) if what == None: print """ Run into the usage from the inputcheck module with None type. You should set an explcit 'what' type when calling the usage so that I can give you a propper exit statement and some explanation which input options are allowed. """ else: print """ Sorry, wrong usage for type %s. Allowed inputs for this script are: -u/--user: A userID or a user_login name. Most script accept this and compute the points or whatever it is for this user only. -s/--users: A list of user names, seperated by commas, no spaces! -c/--city: City can be given by its ID, name or hash IDs: %s names: %s hashes: %s -a/--alldates Cannot be combined with the -t/--tdate option. If set loop over all available dates. -t/--tdate: Tournament date in days since 1970-01-01 -p/--param: A list of paramIDs. -n/--filename A filename for exporting tables/data. -d/--dates: A range of dates seperated by "," -a/--alldates: ignores -t input. Takes all tournament dates from the database to compute the points. -f/--force: Please DO NOT USE. This was for testing purpuses to bypass some securety features of the scripts!!!! But these securety features are there for some reason. So please do not use. """ % (what, IDs, names, hashes) utils.exit('This is/was the usage (what: %s).' % what)
def _read_bands(self): """ Reads a band with rasterio """ bands = [] try: for i, band in enumerate(self.bands): bands.append(rasterio.open(self.bands_path[i]).read_band(1)) except IOError as e: exit(e.message, 1) return bands
def clip(self): """ Clip images based on bounds provided Implementation is borrowed from https://github.com/brendan-ward/rasterio/blob/e3687ce0ccf8ad92844c16d913a6482d5142cf48/rasterio/rio/convert.py """ self.output("Clipping", normal=True) # create new folder for clipped images path = check_create_folder(join(self.scene_path, 'clipped')) try: temp_bands = copy(self.bands) temp_bands.append('QA') for i, band in enumerate(temp_bands): band_name = self._get_full_filename(band) band_path = join(self.scene_path, band_name) self.output("Band %s" % band, normal=True, color='green', indent=1) with rasterio.open(band_path) as src: bounds = transform_bounds( { 'proj': 'longlat', 'ellps': 'WGS84', 'datum': 'WGS84', 'no_defs': True }, src.crs, *self.bounds ) if disjoint_bounds(bounds, src.bounds): bounds = adjust_bounding_box(src.bounds, bounds) window = src.window(*bounds) out_kwargs = src.meta.copy() out_kwargs.update({ 'driver': 'GTiff', 'height': window[0][1] - window[0][0], 'width': window[1][1] - window[1][0], 'transform': src.window_transform(window) }) with rasterio.open(join(path, band_name), 'w', **out_kwargs) as out: out.write(src.read(window=window)) # Copy MTL to the clipped folder copyfile(join(self.scene_path, self.scene + '_MTL.txt'), join(path, self.scene + '_MTL.txt')) return path except IOError as e: exit(e.message, 1)
def setup_template(self): if self.args.custom_header: try: self.template = open(args.custom_header, 'r').readlines() except IOError: print 'Could not open ', self.args.custom_header exit(bad=True) else: template_resource = 'data/header-template' template = pkg_resources.resource_stream('garnish/', template_resource) self.template = template.readlines()
def __main__(): global parser parser = args_options() args = parser.parse_args() if args.subs == 'search': if args.json: print main(args) sys.exit(0) else: with timer(): exit(*main(args))
def loadfilecontent(self, file): import os import utils self._file = file if not os.path.isfile(file): utils.exit( 'In importbets.loadfilecontent error: cannot find file %s' % file) fid = open(file, 'r') self.raw_lines = fid.readlines() fid.close()
def prepare_script_path(func_log, scriptdir): get_ppi_script_ext = None if utils.systemName in ('Linux', 'Darwin'): # use the get_ppi_script_ext = '.sh' elif utils.systemName in ('Windows'): # get_ppi_script_ext = '.bat' else: func_log.critical('This platform is not supported!') utils.exit() ppi_script_filename = 'list_gender_birth_guid' + get_ppi_script_ext ppi_script_path = utils.join(scriptdir, ppi_script_filename) func_log.info('ppi script is %s' % ppi_script_path) return ppi_script_path
def handle_profiles(name, create, remove, list_profiles): if name is None: name = "default" profile = profiles.Profile(name) if list_profiles: profile.list_all() utils.exit(0) if name == "default": with contextlib.suppress(FileExistsError): profile.create() return profile if create: profile.create() utils.exit(0) if not profile.exists(): log.critical("Profile {} doesn't exist.".format(profile.name)) utils.exit(1) if remove: print( "Are you sure you want to remove profile {}?\n" "All contents of {} will be deleted!".format(profile.name, profile.dir_path) ) answer = input("y/N: ").strip().lower() if answer == "y" or answer == "yes": profile.remove() else: print("Doing nothing.") utils.exit(0) return profile
def compute_clusters(self, indent, geo_sw_ne, max_diameter_km, geo_hash_aggregations): geo_bounds = geo_sw_ne.to_geo_bounds() enter(indent, 'GeoClustering.compute_clusters', '') result = self.compute_clusters_with_geohash_precision( indent + 1, max_diameter_km, geo_sw_ne.to_geo_bounds(), geo_hash_aggregations) exit(indent, 'GeoClustering.compute_clusters', '') return result
def mainloop(options, profile): log.info("Executing mainloop() ...") board_cache_dir_path = os.path.join(options["cache_dir_path"], "boards") if options["use_cache_on_first_run"]: log.info( "--use-cache flag enabled, not removing the " " contents of cache directory {}.".format(board_cache_dir_path) ) else: try: utils.delete_files_in_dir(board_cache_dir_path) except FileNotFoundError: log.info( "FileNotFoundError while trying to remove the contents " "of cache directory {}.".format(board_cache_dir_path) ) else: log.info("Successfully removed contents of cache directory {}.".format(board_cache_dir_path)) dl_error_count = 0 while True: if dl_error_count >= 10: print() error_wait_seconds = 60 log.warning( "Failed to fetch data 10 times, waiting " "{} seconds before retrying ...".format(error_wait_seconds) ) utils.sleep(error_wait_seconds) dl_error_count = 0 utils.clear_terminal() log.info("Executing main() ...") try: main(options, profile) except core.DownloadError as err: dl_error_count += 1 log.error("{}. Retrying ...".format(err)) continue except core.ConfigError as err: log.critical("{}.".format(err)) utils.exit(1) else: log.info("main() executed successfully.") if options["single_run"]: log.info("--single-run flag enabled, exiting ...") print() utils.exit(0) dl_error_count = 0 utils.sleep(options["refresh_time"]) utils.delete_files_in_dir(board_cache_dir_path)
def get_map(): indent = 0 swLatitude = float(request.args['swLatitude']) swLongitude = float(request.args['swLongitude']) neLatitude = float(request.args['neLatitude']) neLongitude = float(request.args['neLongitude']) markerDiameterKM = float(request.args['markerDiameterKM']) if request.args.has_key('operators'): operators = request.args['operators'].split(",") else: operators = [] if request.args.has_key('minKw'): min_kw = float(request.args['minKw']) else: min_kw = -1 if request.args.has_key('maxKw'): max_kw = float(request.args['maxKw']) else: max_kw = -1 if min_kw >= 0 and max_kw >= 0: kw_range = Range(min_kw, max_kw) else: kw_range = None if request.args.has_key('connectionTypes'): connection_types = request.args['connectionTypes'].split(",") else: connection_types = [] ''' print('get_map(swLatitude=' + str(swLatitude) + ', swLongitude=' + str(swLongitude) + ', neLatitude=' + str(neLatitude) + ', neLongitude=' + str(neLongitude) + ', markerDiameterKM=' + str(markerDiameterKM) + ')') ''' geo_sw_ne = GeoSwNe(swLatitude, swLongitude, neLatitude, neLongitude) result = get_map_params(indent + 1, geo_sw_ne, markerDiameterKM, operators, kw_range, connection_types) exit(indent, 'get_map', '') return result
def statistics(db,typ,ID,city,tdate,function=False,betdata=False): if not function: utils.exit( "No function given (min/max/np.median/...)" ) params = db.get_parameter_names() for day in range(1,3): for param in params: paramID = db.get_parameter_id( param ) if not betdata: data = db.get_bet_data(typ,ID,city['ID'],paramID,tdate,day) else: data = betdata[day-1][param] if type(data) == bool: return False bet[day-1][param] = function( data ) return bet
def __insert_bet_to_db__(self, userID, paramID, tdate, bdate, value): from pywetterturnier import utils if value == 'x': return try: value = int(round(float(value) * 10.)) except: utils.exit('could not convert value to float') sql = 'INSERT INTO ' + self.db_bets + ' (userID, cityID, paramID, ' + \ 'tdate, betdate, value) VALUES ' + \ '({0:d},{1:d},{2:d},{3:d},{4:d},{5:d}) ON DUPLICATE KEY UPDATE value = {5:d}' \ .format(int(userID),int(self.cityID),int(paramID),int(tdate),int(bdate),value) cur = self.db.cursor() cur.execute(sql)
def adapter_connect( host, mode, retry_timeout, raise_ex=False ): """ Connect to backend. """ if host is None: utils.exit(utils.ENVIRONMENT_ENTRY_NOT_FOUND, 'settings["backend_host"] missing - please, specify it in settings.py.') try: _logger.info("Connecting to indexer [%s, %s].", host, mode) return solr.SolrInterface(host, mode=mode, retry_timeout=retry_timeout) except: _logger.exception("Connecting to indexer failed.") if raise_ex: raise return None
def download_images(dataset_name): file_id = "" if dataset_name == 'flickr8k': file_id = '16FR_-ftYTX4kyf81jwDnX9oM3K5GA3DL' else: message = '[ERROR] Images not found on Google Drive. Try manually downloading them.' exit(message, -1) images_path = os.path.join('..', 'data', dataset_name, 'images.zip') gdd.download_file_from_google_drive(file_id, dest_path=images_path, unzip=True) os.remove(images_path) message = '[SUCCESS] ' + dataset_name + ' images have been downloaded' exit(message, 1)
def parse_command_line(env): """ Command line. """ import getopt opts = None try: options = [ "help", "dataset=", "operation=", "count=", "verbose", "debug", "production", "input=", "parallel" ] input_options = sys.argv[1:] opts, _1 = getopt.getopt(input_options, "", options) except getopt.GetoptError, e: _help(None) utils.exit(utils.ARGS_PARSE_ERROR, str(e))
def main(): parser = get_parser() if len(sys.argv) <= 1: parser.print_help() return argv = [encode.convert_to_unicode(a) for a in sys.argv[1:]] args = parser.parse_args(argv) if args.subcmd.startswith('fpga-image'): config.read_config_and_verify() elif args.subcmd == 'help': args.subcommands = subcommands args.parser = parser try: args.func(args) except Exception as e: utils.exit('Error: %s' % encode.exception_to_unicode(e))
def get_filepath(self, save=False, title='open file', filetype='file', quit=True, allownew=True, **kwargs): """this is a generic function that can be extended it simply gets a filepath and asserts it's not empty. if it's empty the program quits unless quit is False. when it will throw an error filetype is a string used for error messages and variable names askopenfilename takes other kwargs as well you can look into all of them provided get passed on. - defaultextension - str expression for default extensions - others check out utils.askopenfilename docs for more - initialdir - str path to where you would like to open TODO: figure out how to disallow new files being made/ allow """ fpath = None # I have hard coded the file types to csv and tsv. if save: fpath = utils.asksaveasfilename(title=title, filetypes=(("csv files", "*.csv"), ("all files", "*.*")), **kwargs) else: fpath = utils.askopenfilename(title=title, filetypes=( ("all files", "*.*"), ("tsv files", "*.tsv"), ("csv files", "*.csv"), ), **kwargs) # Check path validity if fpath == '' or len(fpath) == 0: print('no %s file selected. quitting' % filetype) utils.exit() setattr(self, filetype, fpath) man_log.debug('selected %s to be %s.' % (filetype, fpath)) return fpath
def process_image(path, bands=None, verbose=False, pansharpen=False, ndvi=False, force_unzip=None, ndvigrey=False, bounds=None): """ Handles constructing and image process. :param path: The path to the image that has to be processed :type path: String :param bands: List of bands that has to be processed. (optional) :type bands: List :param verbose: Sets the level of verbosity. Default is False. :type verbose: boolean :param pansharpen: Whether to pansharpen the image. Default is False. :type pansharpen: boolean :returns: (String) path to the processed image """ try: bands = convert_to_integer_list(bands) if pansharpen: p = PanSharpen(path, bands=bands, dst_path=settings.PROCESSED_IMAGE, verbose=verbose, force_unzip=force_unzip, bounds=bounds) elif ndvigrey: p = NDVI(path, verbose=verbose, dst_path=settings.PROCESSED_IMAGE, force_unzip=force_unzip, bounds=bounds) elif ndvi: p = NDVIWithManualColorMap(path, dst_path=settings.PROCESSED_IMAGE, verbose=verbose, force_unzip=force_unzip, bounds=bounds) else: p = Simple(path, bands=bands, dst_path=settings.PROCESSED_IMAGE, verbose=verbose, force_unzip=force_unzip, bounds=bounds) except IOError as e: exit(e.message, 1) except FileDoesNotExist as e: exit(e.message, 1) return p.run()
def __download_file__(self): """Downloading the html file and store locally """ import subprocess as sub import utils print ' * Downloading file %s' % self.rawfile cmd = ['wget','http://www.wetterturnier.de/mitteltipps.php', '-O',self.rawfile] p = sub.Popen(cmd,stdout=sub.PIPE,stderr=sub.PIPE) p.communicate() # - Error? if not p.returncode == 0: print 'ERROR: %s' % err print 'OUTPUT: %s' % out utils.exit('Problems downloading the mitteltipps file')
def get_filepath(self, save=False, # This flag is used to specify whether the file is for output or input title='open file', filetype='file', quit=True, allownew=True, **kwargs) -> str: """this is a generic function that can be extended it simply gets a filepath and asserts it's not empty. if it's empty the program quits unless quit is False. when it will throw an error filetype is a string used for error messages and variable names askopenfilename takes other kwargs as well you can look into all of them provided get passed on. - defaultextension - str expression for default extensions - others check out utils.askopenfilename docs for more - initialdir - str path to where you would like to open - initialfile - str default filename TODO: figure out how to disallow new files being made/ allow """ fpath = None # I have hard coded the file types to csv and tsv. if save: fpath = utils.asksaveasfilename(title=title, filetypes=(("csv files", "*.csv"), ("all files", "*.*")), **kwargs) else: fpath = utils.askopenfilename(title=title, filetypes=(("all files", "*.*"), ("tsv files", "*.tsv"), ("csv files", "*.csv"),), **kwargs) # Check path validity if fpath == '' or len(fpath) == 0: print('no %s file selected. quitting' % filetype) utils.exit() setattr(self, filetype, fpath) man_log.debug('selected %s to be %s.' % (filetype, fpath)) return fpath
def adapter_connect(host, mode, retry_timeout, raise_ex=False): """ Connect to backend. """ if host is None: utils.exit( utils.ENVIRONMENT_ENTRY_NOT_FOUND, 'settings["backend_host"] missing - please, specify it in settings.py.' ) try: _logger.info("Connecting to indexer [%s, %s].", host, mode) return solr.SolrInterface(host, mode=mode, retry_timeout=retry_timeout) except: _logger.exception("Connecting to indexer failed.") if raise_ex: raise return None
def __connect__(self): """Open database connection. In case python is not able to open the database connection the script will stop immediately. Returns: MySQLdb: Database handler object. """ print ' * Establishing database connection' host = self.config['mysql_host'] user = self.config['mysql_user'] passwd = self.config['mysql_pass'] db = self.config['mysql_db'] try: res = MySQLdb.connect(host=host, user=user, passwd=passwd, db=db) except Exception as e: print e utils.exit("Could not connect to the database. Stop.") return res
def validate_cli_arguments(self): """ Checks for obvious conflicts in options given by the user. If any are found, stops execution. """ help = "Try -h for help information." log_msg = "Exiting. Failed validate_cli_arguments()" if self.args.license not in self.supported_licenses: print "You have entered an invalid license name." print help logging.error("License not supported.") logging.error(log_msg) exit(bad=True) if self.args.remove_headers and self.args.add_headers: print "You have used conflicting header options." print help logging.error("Cannot simultaneously add and remove headers.") logging.error(log_msg) exit(bad=True)
def process_image(path, bands=None, verbose=False, pansharpen=False, force_unzip=None, ndvi=False, cloudmask=False): """ Handles constructing and image process. :param path: The path to the image that has to be processed :type path: String :param bands: List of bands that has to be processed. (optional) :type bands: List :param verbose: Sets the level of verbosity. Default is False. :type verbose: boolean :param pansharpen: Whether to pansharpen the image. Default is False. :type pansharpen: boolean :returns: (String) path to the processed image """ try: bands = convert_to_integer_list(bands) if isinstance(ndvi, str): p = Process(path, bands=[4,5], verbose=verbose, force_unzip=force_unzip) else: p = Process(path, bands=bands, verbose=verbose, force_unzip=force_unzip) except IOError: exit("Zip file corrupted", 1) except FileDoesNotExist as e: exit(e.message, 1) if isinstance(ndvi, str): out=[p.run_ndvi(mode=ndvi,cmask=cloudmask)] else: out=[p.run_rgb(pansharpen)] return out
def parse_command_line( env ): """ Command line. """ import getopt opts = None try: options = ["help", "dataset=", "operation=", "count=", "verbose", "debug", "production", "input=", "parallel"] input_options = sys.argv[1:] opts, _1 = getopt.getopt(input_options, "", options) except getopt.GetoptError, e: _help(None) utils.exit(utils.ARGS_PARSE_ERROR, str(e))
def main(options, profile): try: match_groups = parse_config.parse_file(os.path.join(options["main_dir_path"], "chtf-conf.yaml")) except parse_config.ConfigError as err: log.critical("{}.".format(err)) utils.exit(1) if options["output_oneline"]: longest_name_len = len(max([match_group["name"] for match_group in match_groups], key=len)) found_threads_urls_prev = [] url_cache_file_path = os.path.join(options["cache_dir_path"], "prev_urls") with contextlib.suppress(FileNotFoundError): for line in utils.read_lines_from_file(url_cache_file_path): found_threads_urls_prev.append(line.strip()) all_found_threads_amount = 0 new_found_threads_amount = 0 colour = TerminalColour(options["colour_output"]) board_cache_dir_path = os.path.join(options["cache_dir_path"], "boards") found_threads_urls = [] date_start = datetime.datetime.now() for match_group, threads in core.generate_threads( options["chan"], match_groups, board_cache_dir_path, options["core_dl_sleep_time"] ): if options["output_oneline"]: oneline_match_name = "{:{}} ".format(match_group["name"], longest_name_len) print(" {}".format(oneline_match_name), end="") else: oneline_match_name = "" print(":: {}".format(match_group["name"]), end="") sys.stdout.flush() found_threads_amount = 0 for thread in threads: log.debug("Thread {} matches keyword {}.".format(thread["url_short"], thread["matching_keyword"])) found_threads_amount += 1 if options["no_duplicate_threads"]: if thread["url_short"] in found_threads_urls: found_threads_urls.append(thread["url_short"]) continue if not options["output_oneline"] and found_threads_amount == 1: print() else: utils.clear_terminal_line() found_threads_urls.append(thread["url_short"]) thread_date = datetime.datetime.fromtimestamp(thread["timestamp"]) term_len = utils.get_terminal_line_len() output_prefix = " " if thread["url_short"] not in found_threads_urls_prev: output_prefix = " {}!{} ".format(colour.get("IGreen"), colour.get("Reset")) if options["colour_output"]: # The colour code len for the new thread indicator ("!"). term_len += 11 output_page = thread["page"] if options["colour_output"]: if output_page <= 3: page_colour = colour.get("IGreen") elif output_page <= 7: page_colour = colour.get("IYellow") elif output_page >= 8: page_colour = colour.get("IRed") else: page_colour = "" output = ( "{prefix}{match_name}/{board:<3} {date} {replies:<3} " "{page_col}{page:<2}{reset} {url:<45} ".format( board=thread["board"] + "/", replies=thread["replies"], page=output_page, url=thread["url"], page_col=page_colour, date=utils.pretty_date_delta(thread_date), reset=colour.get("Reset"), prefix=output_prefix, match_name=oneline_match_name, ) ) thread_subject = thread.get("subject", False) if thread_subject: thread_subject = thread_subject.encode("ascii", "replace").decode("ascii", "replace") thread_subject = utils.strip_html_tags(thread_subject) thread_subject = html.unescape(thread_subject) output += "sub: {}".format(thread_subject) thread_comment = thread.get("comment", False) if thread_comment: thread_comment = thread_comment.encode("ascii", "replace").decode("ascii", "replace") thread_comment = thread_comment.replace("<br>", " ") thread_comment = utils.strip_html_tags(thread_comment) thread_comment = html.unescape(thread_comment) if thread_subject: output += " | " output += "com: {}".format(thread_comment) if options["colour_output"]: # The colour code len for the page number. term_len += 11 print(output[:term_len]) if thread["url_short"] not in found_threads_urls_prev: new_found_threads_amount += 1 if match_group["urlsavelast"]: urlsavelast_dir_path = os.path.join( options["main_dir_path"], "urls_last", match_group["name"], date_start.strftime("%Y"), date_start.strftime("%Y-%m"), ) with contextlib.suppress(FileExistsError): os.makedirs(urlsavelast_dir_path) urlsavelast_file_path = os.path.join(urlsavelast_dir_path, date_start.strftime("%Y-%m-%d")) utils.append_data_to_file(thread["url"] + "\n", urlsavelast_file_path) log.info("Saved thread url {} to file {}".format(thread["url"], urlsavelast_file_path)) if match_group["browser"]: try: utils.open_in_web_browser(thread["url"]) except utils.Error as err: log.error("{}.".format(err)) if found_threads_amount == 0: utils.clear_terminal_line() continue else: all_found_threads_amount += found_threads_amount with contextlib.suppress(FileNotFoundError): os.remove(url_cache_file_path) for url in found_threads_urls: utils.append_data_to_file(url + "\n", url_cache_file_path) dead_threads_amount = 0 for url in found_threads_urls_prev: if url not in found_threads_urls: dead_threads_amount += 1 date_end = datetime.datetime.now() date_next_refresh = date_end + datetime.timedelta(seconds=options["refresh_time"]) print( "\n" "{} thread{}, {} unique; {} new; " "{} that matched on the previous run but not now.\n" "\n" "Start time: {}.\n" "End time: {}.\n" "Next refresh: {}.".format( all_found_threads_amount, "" if all_found_threads_amount == 1 else "s", len(set(found_threads_urls)), "no" if new_found_threads_amount == 0 else new_found_threads_amount, dead_threads_amount, date_start.strftime("%Y-%m-%d %H:%M:%S"), date_end.strftime("%Y-%m-%d %H:%M:%S"), date_next_refresh.strftime("%Y-%m-%d %H:%M:%S"), ), end="", )
def run_rgb(self, pansharpen=True): """ Executes the image processing. :param pansharpen: Whether the process should also run pansharpenning. Default is True :type pansharpen: boolean :returns: (String) the path to the processed image """ self.output("* Image processing started for bands %s" % "-".join(map(str, self.bands)), normal=True) # Read cloud coverage from mtl file cloud_cover = 0 try: with open(self.scene_path + "/" + self.scene + "_MTL.txt", "rU") as mtl: lines = mtl.readlines() for line in lines: if "CLOUD_COVER" in line: cloud_cover = float(line.replace("CLOUD_COVER = ", "")) break except IOError: pass with warnings.catch_warnings(): warnings.simplefilter("ignore") with rasterio.drivers(): bands = [] # Add band 8 for pansharpenning if pansharpen: self.bands.append(8) bands_path = [] for band in self.bands: bands_path.append(join(self.scene_path, self._get_full_filename(band))) try: for i, band in enumerate(self.bands): bands.append(self._read_band(bands_path[i])) except IOError as e: exit(e.message, 1) src = rasterio.open(bands_path[-1]) # Get pixel size from source self.pixel = src.affine[0] # Only collect src data that is needed and delete the rest src_data = {"transform": src.transform, "crs": src.crs, "affine": src.affine, "shape": src.shape} del src crn = self._get_boundaries(src_data) dst_shape = src_data["shape"] dst_corner_ys = [crn[k]["y"][1][0] for k in crn.keys()] dst_corner_xs = [crn[k]["x"][1][0] for k in crn.keys()] y_pixel = abs(max(dst_corner_ys) - min(dst_corner_ys)) / dst_shape[0] x_pixel = abs(max(dst_corner_xs) - min(dst_corner_xs)) / dst_shape[1] dst_transform = (min(dst_corner_xs), x_pixel, 0.0, max(dst_corner_ys), 0.0, -y_pixel) # Delete crn since no longer needed del crn new_bands = [] for i in range(0, 3): new_bands.append(numpy.empty(dst_shape, dtype=numpy.uint16)) if pansharpen: bands[:3] = self._rescale(bands[:3]) new_bands.append(numpy.empty(dst_shape, dtype=numpy.uint16)) self.output("Projecting", normal=True, arrow=True) for i, band in enumerate(bands): self.output("band %s" % self.bands[i], normal=True, color="green", indent=1) reproject( band, new_bands[i], src_transform=src_data["transform"], src_crs=src_data["crs"], dst_transform=dst_transform, dst_crs=self.dst_crs, resampling=RESAMPLING.nearest, ) # Bands are no longer needed del bands if pansharpen: new_bands = self._pansharpenning(new_bands) del self.bands[3] self.output("Final Steps", normal=True, arrow=True) output_file = "%s_bands_%s" % (self.scene, "".join(map(str, self.bands))) if pansharpen: output_file += "_pan" output_file += ".TIF" output_file = join(self.dst_path, output_file) output = rasterio.open( output_file, "w", driver="GTiff", width=dst_shape[1], height=dst_shape[0], count=3, dtype=numpy.uint8, nodata=0, transform=dst_transform, photometric="RGB", crs=self.dst_crs, ) for i, band in enumerate(new_bands): # Color Correction new_bands[i] = self._color_correction(band, self.bands[i], 0, cloud_cover) self.output("Writing to file", normal=True, color="green", indent=1) for i, band in enumerate(new_bands): output.write_band(i + 1, img_as_ubyte(band)) new_bands[i] = None return output_file
def run_ndvi(self, mode="grey", cmask=False): """ :param mode: Whether to create greyscale GTiff or colorized GTiff :type mode string, either 'grey' or 'color' :returns: (String) the path to the processed image """ self.output("* Image processing started for NDVI", normal=True) # Read radiance conversion factors from mtl file try: with open(self.scene_path + "/" + self.scene + "_MTL.txt", "rU") as mtl: lines = mtl.readlines() for line in lines: if "REFLECTANCE_ADD_BAND_3" in line: add_B3 = float(line.replace("REFLECTANCE_ADD_BAND_3 = ", "")) elif "REFLECTANCE_MULT_BAND_3" in line: mult_B3 = float(line.replace("REFLECTANCE_MULT_BAND_3 = ", "")) elif "REFLECTANCE_ADD_BAND_4" in line: add_B4 = float(line.replace("REFLECTANCE_ADD_BAND_4 = ", "")) elif "REFLECTANCE_MULT_BAND_4" in line: mult_B4 = float(line.replace("REFLECTANCE_MULT_BAND_4 = ", "")) except IOError: pass with warnings.catch_warnings(): warnings.simplefilter("ignore") with rasterio.drivers(): bands = [] bands_path = [] if cmask: self.bands.append("QA") for band in self.bands: bands_path.append(join(self.scene_path, self._get_full_filename(band))) try: for i, band in enumerate(self.bands): bands.append(self._read_band(bands_path[i])) except IOError as e: exit(e.message, 1) src = rasterio.open(bands_path[-1]) # Get pixel size from source self.pixel = src.affine[0] # Only collect src data that is needed and delete the rest src_data = {"transform": src.transform, "crs": src.crs, "affine": src.affine, "shape": src.shape} del src crn = self._get_boundaries(src_data) dst_shape = src_data["shape"] dst_corner_ys = [crn[k]["y"][1][0] for k in crn.keys()] dst_corner_xs = [crn[k]["x"][1][0] for k in crn.keys()] y_pixel = abs(max(dst_corner_ys) - min(dst_corner_ys)) / dst_shape[0] x_pixel = abs(max(dst_corner_xs) - min(dst_corner_xs)) / dst_shape[1] dst_transform = (min(dst_corner_xs), x_pixel, 0.0, max(dst_corner_ys), 0.0, -y_pixel) # Delete crn since no longer needed del crn new_bands = [] for i in range(0, 3): new_bands.append(numpy.empty(dst_shape, dtype=numpy.float32)) self.output("Projecting", normal=True, arrow=True) for i, band in enumerate(bands): self.output("band %s" % self.bands[i], normal=True, color="green", indent=1) reproject( band, new_bands[i], src_transform=src_data["transform"], src_crs=src_data["crs"], dst_transform=dst_transform, dst_crs=self.dst_crs, resampling=RESAMPLING.nearest, ) # Bands are no longer needed del bands self.output("Calculating NDVI", normal=True, arrow=True) output_file = "%s_NDVI" % (self.scene) tilemask = numpy.empty_like(new_bands[-1], dtype=numpy.bool) tilemask[(new_bands[1] + new_bands[0] == 0)] = True new_bands[0] = new_bands[0] * mult_B3 + add_B3 new_bands[1] = new_bands[1] * mult_B4 + add_B4 ndvi = numpy.true_divide((new_bands[1] - new_bands[0]), (new_bands[1] + new_bands[0])) ndvi = ((ndvi + 1) * 255 / 2).astype(numpy.uint8) ndvi[tilemask] = 0 if cmask: # clouds are indicated when Bit 15&16 = 11 or 10 ndvi[(new_bands[-1].astype(numpy.uint16) & 49152) >= 49152] = 255 # cirrus are indicated when Bit 13&14 = 11 or 10 ndvi[(new_bands[-1].astype(numpy.uint16) & 12288) >= 12288] = 255 self.output("Final Steps", normal=True, arrow=True) if mode == "grey": output_file += "_grey.TIF" output_file = join(self.dst_path, output_file) output = rasterio.open( output_file, "w", driver="GTiff", width=dst_shape[1], height=dst_shape[0], count=1, dtype=numpy.uint8, nodata=0, transform=dst_transform, crs=self.dst_crs, ) self.output("Writing to file", normal=True, color="green", indent=1) output.write_band(1, ndvi) elif mode == "color": self.output("Converting to RGB", normal=True, color="green", indent=1) output_file += "_color.TIF" output_file = join(self.dst_path, output_file) rgb = self._index2rgb(index_matrix=ndvi) del ndvi output = rasterio.open( output_file, "w", driver="GTiff", width=dst_shape[1], height=dst_shape[0], count=3, dtype=numpy.uint8, nodata=0, transform=dst_transform, photometric="RGB", crs=self.dst_crs, ) self.output("Writing to file", normal=True, color="green", indent=1) for i in range(0, 3): output.write_band(i + 1, rgb[i]) # colorbar self.output("Creating colorbar", normal=True, color="green", indent=1) output_file2 = "%s_colorbar.png" % (self.scene) output_file2 = join(self.dst_path, output_file2) colorrange = numpy.array(range(0, 256)) colorbar = self._index2rgb(index_matrix=colorrange) rgbArray = numpy.zeros((1, 256, 3), "uint8") rgbArray[..., 0] = colorbar[0] rgbArray[..., 1] = colorbar[1] rgbArray[..., 2] = colorbar[2] rgbArray = rgbArray.repeat(30, 0) cbfig = pyplot.figure(figsize=(10, 1.5)) image = pyplot.imshow(rgbArray, extent=[-1, 1, 0, 1], aspect=0.1) pyplot.xticks(numpy.arange(-1, 1.1, 0.2)) pyplot.xlabel("NDVI") pyplot.yticks([]) pyplot.savefig(output_file2, dpi=300, bbox_inches="tight", transparent=True) return output_file
bounds=bounds, ) except IOError: exit("Zip file corrupted", 1) except FileDoesNotExist as e: exit(e.message, 1) return p.run() def __main__(): global parser parser = args_options() args = parser.parse_args() if args.subs == "search": if args.json: print main(args) sys.exit(0) else: with timer(): exit(*main(args)) if __name__ == "__main__": try: __main__() except (KeyboardInterrupt, pycurl.error): exit("Received Ctrl + C... Exiting! Bye.", 1)
def main(): if utils.DATA['version'] != VERSION: print('Your version of Launchcraft ({}) does not match the minimum version of Launchcraft ({}). Please update.'.format(VERSION, utils.DATA['version'])) utils.exit() print('This script will ask you yes or no questions.') print('Any answers in square brackets (e.g. [1.7.10]), or that are capitalized (e.g. [Y/n]) are the default answers, and will be selected when you press enter.') utils.print_separator() version = raw_input('Which version of Minecraft would you like to use? [1.7.10]: ').lower() if version == '': version = '1.7.10' if version not in utils.DATA['versions']: print("Invalid version selected.") utils.exit() utils.MODS = utils.DATA['versions'][version] JAR_DIR = os.path.join(VERSIONS_DIR, version) FORGE_VERSION = '{}-Forge{}'.format(version, utils.MODS['mods']['forge']['version']) FORGE_DIR = os.path.join(VERSIONS_DIR, FORGE_VERSION) print('Entering directory "{}".'.format(MINECRAFT_DIR)) try: os.chdir(MINECRAFT_DIR) except: print('Failed to enter minecraft directory, please install minecraft first.') utils.exit() utils.print_separator() # Set the directory to which the custom profile will be installed. profile_name = raw_input('What would you like to call the profile being created? [launchcraft]: ').lower() if profile_name == '': profile_name = 'launchcraft' PROFILE_DIR = os.path.join(VERSIONS_DIR, profile_name) print('Creating profile {}'.format(profile_name)) # Delete the old profile directory so we can start from scratch. try: shutil.rmtree(PROFILE_DIR) print('Removed old profile directory.') except OSError as ex: if ex.errno == errno.ENOENT: print('No old profile directory found.') else: print(ex) print('Failed to remove old profile directory, exiting...') utils.exit() utils.print_separator() forge = utils.query_yes_no('Would you like to use Forge?', default='no') if forge: if os.path.exists(FORGE_DIR): print('The required Forge version has been detected on your system.') message = 'reinstall' else: print('The required Forge version has not been detected on your system.') message = 'install' # Ask the user whether or not they need Forge. if utils.query_yes_no('Do you need to {} Forge?'.format(message), default='no'): forge = utils.MODS['mods']['forge'] name = forge['name'] version = forge['version'] jarName = 'forge.jar' if sys.platform == 'win32' or sys.platform == 'cygwin': os.chdir(BASE_DIR) # Download the Forge installer. print('Downloading {} version {}'.format(name, version)) utils.downloadFile(forge['url'], jarName) if sys.platform == 'win32' or sys.platform == 'cygwin': print('You must now run the {} that has been downloaded to your Launchcraft directory.'.format(jarName)) utils.exit() else: # Run the installer so the user can install Forge. print('You will now be asked to install Forge version {}.'.format(version)) with open(os.devnull, 'w') as devnull: subprocess.call('java -jar {}'.format(jarName), shell=True, stdout=devnull) os.remove(jarName) utils.print_separator() JAR_FILE = os.path.join(PROFILE_DIR, '{}.jar'.format(profile_name)) JSON_FILE = os.path.join(PROFILE_DIR, '{}.json'.format(profile_name)) if forge: print('Using Forge {} as the base for the profile'.format(utils.MODS['mods']['forge']['version'])) if not os.path.exists(MOD_DIR): os.makedirs(MOD_DIR) utils.INSTALLED_MODS.append('forge') JAR_DIR = FORGE_DIR print('Creating new profile directory.') shutil.copytree(FORGE_DIR, PROFILE_DIR) print('Renaming Forge jar.') shutil.move(os.path.join(PROFILE_DIR, '{}.jar'.format(FORGE_VERSION)), JAR_FILE) SOURCE_JSON_FILE = '{}.json'.format(FORGE_VERSION) print('Entering newly created profile directory.') os.chdir(PROFILE_DIR) else: print('Using Minecraft {} as the base for the profile'.format(version)) # Create the profile directory. try: print('Creating new profile directory.') os.makedirs(PROFILE_DIR) except OSError as ex: print(ex) print('Failed to create new profile directory, exiting...') utils.exit() print('Entering newly created profile directory.') os.chdir(PROFILE_DIR) print('Downloading "{0}.jar" and "{0}.json".'.format(version)) utils.downloadFile('https://s3.amazonaws.com/Minecraft.Download/versions/{0}/{0}.jar'.format(version), '{}.jar'.format(profile_name)) utils.downloadFile('https://s3.amazonaws.com/Minecraft.Download/versions/{0}/{0}.json'.format(version), '{}.json'.format(version)) SOURCE_JSON_FILE = '{}.json'.format(version) print('Creating "{}.json".'.format(profile_name)) with open('{}'.format(SOURCE_JSON_FILE), "r") as file: data = json.load(file) data['id'] = profile_name with open(JSON_FILE, "w") as file: json.dump(data, file, indent=4) print('Deleting "{}".'.format(SOURCE_JSON_FILE)) os.remove(SOURCE_JSON_FILE) utils.print_separator() if utils.query_yes_no('Do you want to install mods?', default='no'): print('Which mods would you like to install?') toInstall = utils.printAskOptions(utils.MODS['mods']) print('Installing mods.') print('') for mod in toInstall: modData = utils.MODS['mods'][mod] skip = False conflicts = [i for i in modData['conflicts'] if i in utils.INSTALLED_MODS] if mod == 'forge': continue # Do not install forge-dependant mods if Forge is not installed. if 'forge' in modData['deps'] and 'forge' not in utils.INSTALLED_MODS: print('Skipping {} due to missing Forge'.format(modData['name'])) skip = True # Skip conflicting mods elif conflicts: conflicting_mods = "" for i in conflicts: conflicting_mods += utils.MODS['mods'][i]['name'] + ", " print('Skipping {} because it conflicts with {}'.format(modData['name'], conflicting_mods[:-2])) skip = True if skip: print('') continue utils.installDep(mod, JAR_FILE) print('') utils.removeMETAINF(JAR_FILE) utils.print_separator() if utils.query_yes_no('Do you want to install texture packs?', default='no'): if not os.path.exists(RESOURCEPACK_DIR): os.makedirs(RESOURCEPACK_DIR) print("What texture packs would you like to install?") toInstall = utils.printAskOptions(utils.MODS['resourcepacks']) print('Installing resourcepacks.') print('') for pack in toInstall: packData = utils.MODS['resourcepacks'][pack] utils.installResourcePack(pack) print('') utils.print_separator() if utils.query_yes_no('Do you want to install shader packs?', default='no'): if not os.path.exists(SHADERPACK_DIR): os.makedirs(SHADERPACK_DIR) print("What shader packs would you like to install?") toInstall = utils.printAskOptions(utils.MODS['shaderpacks']) print('Installing shaderpacks.') print('') for pack in toInstall: packData = utils.MODS['shaderpacks'][pack] utils.installShaderPack(pack) print('') utils.print_separator() print('Completed successfully!') utils.exit() try: input('Press any key to exit...') except: pass
else: p = Simple(path, bands=bands, dst_path=settings.PROCESSED_IMAGE, verbose=verbose, force_unzip=force_unzip, bounds=bounds) except IOError: exit("Zip file corrupted", 1) except FileDoesNotExist as e: exit(e.message, 1) return p.run() def __main__(): global parser parser = args_options() args = parser.parse_args() if args.subs == 'search': if args.json: print main(args) sys.exit(0) else: with timer(): exit(*main(args)) if __name__ == "__main__": try: __main__() except (KeyboardInterrupt, pycurl.error): exit('Received Ctrl + C... Exiting! Bye.', 1)
if log_info: info_file_path = os.path.join(main_dir_path, "info.log") info_file_handler = logging.FileHandler(info_file_path) info_file_handler.setLevel(logging.INFO) info_file_handler.setFormatter(file_formatter) log.addHandler(info_file_handler) if log_debug: debug_file_path = os.path.join(main_dir_path, "debug.log") debug_file_handler = logging.FileHandler(debug_file_path) debug_file_handler.setLevel(logging.DEBUG) debug_file_handler.setFormatter(file_formatter) log.addHandler(debug_file_handler) try: argv = docopt.docopt(__doc__, version="0.0.0") setup_terminal_logging(argv["--info"]) profile = handle_profiles(argv["<profile>"], argv["add"], argv["remove"], argv["list-profiles"]) options = process_argv(argv, profile) setup_file_logging(options["main_dir_path"], argv["--log-info"], argv["--log-debug"]) log.debug("argv: {}.".format(argv)) log.info("Profile {}, dir: {}.".format(profile.name, profile.dir_path)) log.info("Main dir: {}".format(options["main_dir_path"])) log.debug("Options: {}.".format(options)) mainloop(options, profile) except KeyboardInterrupt: log.info("KeyboardInterrupt was raised, exiting ...") print("\nInterrupted by the user, exiting ...") utils.exit(1)
from ast import literal_eval from os import stat import utils import logging from __init__ import secretdir,scriptdir from loggers import func_log PPIfilename = "coinsPersonal.tmp" get_ppi_script_ext = None if utils.systemName in ('Linux', 'Darwin'): # use the get_ppi_script_ext = '.sh' elif utils.systemName in ('Windows'): # get_ppi_script_ext = '.bat' else: func_log.critical('This platform is not supported!') utils.exit() get_ppi_script_filename = 'list_gender_birth_guid' + get_ppi_script_ext get_ppi_script_path = utils.join(scriptdir, get_ppi_script_filename) class UrsiDataManager(object): def __init__(self, secret_dir_path, first_time_enter, PPIfilename, ppiscript = None, func_log = logging): self.first_time_enter = first_time_enter self.temp_file_path = path.join(secret_dir_path, PPIfilename) self.func_log = func_log self.data_list = [] self.ppiscript = ppiscript # prepare the file. If it doesn't exist, prepare it. If it exists, no need for doing anything
def run(self, pansharpen=True): self.output("* Image processing started for bands %s" % "-".join(map(str, self.bands)), normal=True) # Read cloud coverage from mtl file cloud_cover = 0 try: with open(self.scene_path + '/' + self.scene + '_MTL.txt', 'rU') as mtl: lines = mtl.readlines() for line in lines: if 'CLOUD_COVER' in line: cloud_cover = float(line.replace('CLOUD_COVER = ', '')) break except IOError: pass with warnings.catch_warnings(): warnings.simplefilter("ignore") with rasterio.drivers(): bands = [] # Add band 8 for pansharpenning if pansharpen: self.bands.append(8) bands_path = [] for band in self.bands: bands_path.append(join(self.scene_path, self._get_full_filename(band))) try: for i, band in enumerate(self.bands): bands.append(self._read_band(bands_path[i])) except IOError as e: exit(e.message, 1) src = rasterio.open(bands_path[-1]) # Get pixel size from source self.pixel = src.affine[0] # Only collect src data that is needed and delete the rest src_data = { 'transform': src.transform, 'crs': src.crs, 'affine': src.affine, 'shape': src.shape } del src crn = self._get_boundaries(src_data) dst_shape = src_data['shape'] dst_corner_ys = [crn[k]['y'][1][0] for k in crn.keys()] dst_corner_xs = [crn[k]['x'][1][0] for k in crn.keys()] y_pixel = abs(max(dst_corner_ys) - min(dst_corner_ys)) / dst_shape[0] x_pixel = abs(max(dst_corner_xs) - min(dst_corner_xs)) / dst_shape[1] dst_transform = (min(dst_corner_xs), x_pixel, 0.0, max(dst_corner_ys), 0.0, -y_pixel) # Delete crn since no longer needed del crn new_bands = [] for i in range(0, 3): new_bands.append(numpy.empty(dst_shape, dtype=numpy.uint16)) if pansharpen: bands[:3] = self._rescale(bands[:3]) new_bands.append(numpy.empty(dst_shape, dtype=numpy.uint16)) self.output("Projecting", normal=True, arrow=True) for i, band in enumerate(bands): self.output("band %s" % self.bands[i], normal=True, color='green', indent=1) reproject(band, new_bands[i], src_transform=src_data['transform'], src_crs=src_data['crs'], dst_transform=dst_transform, dst_crs=self.dst_crs, resampling=RESAMPLING.nearest) # Bands are no longer needed del bands if pansharpen: new_bands = self._pansharpenning(new_bands) del self.bands[3] self.output("Final Steps", normal=True, arrow=True) output_file = '%s_bands_%s' % (self.scene, "".join(map(str, self.bands))) if pansharpen: output_file += '_pan' output_file += '.TIF' output_file = join(self.dst_path, output_file) output = rasterio.open(output_file, 'w', driver='GTiff', width=dst_shape[1], height=dst_shape[0], count=3, dtype=numpy.uint8, nodata=0, transform=dst_transform, photometric='RGB', crs=self.dst_crs) for i, band in enumerate(new_bands): # Color Correction band = self._color_correction(band, self.bands[i], 0, cloud_cover) output.write_band(i+1, img_as_ubyte(band)) new_bands[i] = None self.output("Writing to file", normal=True, color='green', indent=1) return output_file