def _work(self): """ """ try: self.work() self._render_result(self.errno, self.errmsg, self.response_data) except error.BaseError as e: self._render_result(e.errno, e.errmsg, {}) warning = { "uri": self.request.uri, "logid": self.logid, "errno": e.errno, "errmsg": e.errmsg, "args": str(e.args), "trace": traceback.format_exc(), "ex_type": type(e) } log.warning(warning) sys.stderr.write(pprint.pformat(warning)) except Exception, e: errno = error.ERRNO_UNKNOWN self._render_result(errno, str(e), "") warning = { "uri": self.request.uri, "logid": self.logid, "errno": errno, "errmsg": str(e), "args": str(e.args), "trace": traceback.format_exc(), "ex_type": type(e) } log.fatal("internal_error", warning) sys.stderr.write(pprint.pformat(warning))
def download_images(base_url, img_dir, save_dir, imgs): exts = ['.jpg', '.png', '.JPG', '.PNG'] imgs_to_download, imgs_to_exclude = find_imgs_to_downloads(save_dir, imgs, exts) if imgs_to_exclude: print(f'Ignoring {len(imgs_to_exclude)} images') n_imgs_to_download = len(imgs_to_download) if not imgs_to_download: print('Nothing to download') return else: print(f'Trying to download {n_imgs_to_download} images') loop = asyncio.get_event_loop() failed_imgs = loop.run_until_complete(download_images_async_opt(base_url, img_dir, save_dir, imgs, exts)) n_failed_imgs = len(failed_imgs) if failed_imgs: print(f'Failed to download {n_failed_imgs}') log.warning(f'Failed images: {",".join(failed_imgs)}') n_downloaded = n_imgs_to_download - n_failed_imgs if n_downloaded: print(f'Successfully downloaded {n_downloaded} images')
def checkParamAsString(self, key): p = self.getParamAsString(key, None) if p is None or len(p) == 0: errmsg = "error_param:'{}'".format(key) log.warning(errmsg) raise error.BaseError(errno=error.ERRNO_PARAM, errmsg=errmsg) return p
def delete_cert(): log.warning('Deleting mitmproxy certificate...') if error_code := subprocess.call('certutil -delstore -user Root mitmproxy', shell=True): log.error( f'Certificate could not be deleted: {hex(error_code)} - {str(FormatMessage(error_code)).strip()}' )
def get_grid_points(user_axis, crs_axis): """ Computes the number of grid points corresponding to the axis. :param UserAxis user_axis: the axis as given by the user :param CrsAxis crs_axis: the corresponding crs axis :return: int gridPoints """ # if no interval on the axis (slice), then 1 single grid point if user_axis.interval.high is None: return 1 if isinstance(user_axis, RegularUserAxis): # number of geo-intervals over resolution if user_axis.type != UserAxisType.DATE: # number_of_grid_points = (geo_max - geo_min) / resolution grid_points = abs( (decimal.Decimal(str(user_axis.interval.high)) - decimal.Decimal(str(user_axis.interval.low))) / decimal.Decimal(str(user_axis.resolution))) # The resolution in ingredient file can have big factor to the calculation, so must take care if abs( decimal.Decimal(str(grid_points), ROUND_UP) - grid_points) > HighPixelAjuster.THRESHOLD: log.warning( "The computed number of grid points is not an integer for axis " + user_axis.name + ". This usually indicates that the resolution is not correct." ) grid_points = HighPixelAjuster.adjust_high(grid_points) # Negative axis (e.g: latitude) min <--- max if user_axis.resolution < 0: return int(math.floor(grid_points)) else: # Positive axis (e.g: longitude) min ---> max return int(math.ceil(grid_points)) else: time_difference = user_axis.interval.high - user_axis.interval.low # AS time always point to future (min --> max) if crs_axis.is_time_day_axis(): # days ((seconds / 86400) / resolution) grid_points = abs( (decimal.Decimal(str(time_difference)) / decimal.Decimal(DateTimeUtil.DAY_IN_SECONDS)) / decimal.Decimal(str(user_axis.resolution))) grid_points = HighPixelAjuster.adjust_high(grid_points) return int(math.ceil(grid_points)) else: # seconds (seconds / resolution) grid_points = abs( (decimal.Decimal(str(time_difference)) / decimal.Decimal(str(user_axis.resolution)))) grid_points = HighPixelAjuster.adjust_high(grid_points) return int(math.ceil(grid_points)) else: # number of direct positions (i.e: irregular axis with coefficients [0, 1, 3, 5, 8, 15]) return len(user_axis.directPositions)
def patch_origin_client(self): origin = Client('Origin', 'Origin.exe', 'libeay32.dll', 'EVP_DigestVerifyFinal') eadesktop = Client('EA Desktop', 'EADesktop.exe', 'libcrypto-1_1-x64.dll', 'EVP_DigestVerifyFinal') client = origin try: client_process = Pymem(client.PROCESS_NAME) except ProcessNotFound: client = eadesktop try: client_process = Pymem(client.PROCESS_NAME) except ProcessNotFound: log.warning('Origin/EA Desktop process not found. Patching aborted') return if client_process.process_id == self.last_client_pid: log.debug(f'{client.NAME} client is already patched') return log.info(f'Patching {client.NAME} client') try: dll_module = next(m for m in client_process.list_modules() if m.name.lower() == client.DLL_NAME) except StopIteration: log.error(f'{client.DLL_NAME} is not loaded. Patching aborted') return # The rest should complete without issues in most cases. # Get the Export Address Table symbols # noinspection PyUnresolvedReferences dll_symbols = PE(dll_module.filename).DIRECTORY_ENTRY_EXPORT.symbols # Get the symbol of the EVP_DigestVerifyFinal function verify_func_symbol = next(s for s in dll_symbols if s.name.decode('ascii') == client.FUNCTION_NAME) # Calculate the final address in memory verify_func_addr = dll_module.lpBaseOfDll + verify_func_symbol.address # Instructions to patch. We return 1 to force successful response validation. patch_instructions = bytes([ 0x66, 0xB8, 0x01, 0, # mov ax, 0x1 0xC3 # ret ]) client_process.write_bytes(verify_func_addr, patch_instructions, len(patch_instructions)) # Validate the written memory read_instructions = client_process.read_bytes(verify_func_addr, len(patch_instructions)) if read_instructions != patch_instructions: log.error('Failed to patch the instruction memory') return # At this point we know that patching was successful self.last_client_pid = client_process.process_id log.info(f'Patching {client.NAME} was successful')
def intercept_entitlements(flow: HTTPFlow): if BaseAddon.host_and_path_match( flow, host=EpicAddon.ecom_host, path=r"^/ecommerceintegration/api/public/v2/identities/\w+/entitlements$" ) or BaseAddon.host_and_path_match( flow, host=EpicAddon.api_host, path=r"^/epic/ecom/v1/identities/\w+/entitlements" ): log.info('Intercepted an Entitlements request from Epic Games') url = urlparse(flow.request.url) sandbox_id = parse_qs(url.query)['sandboxId'][0] # Get the game in the config with namespace that matches the sandboxId game = get_epic_game(sandbox_id) try: # Get the entitlements from request params entitlementNames = parse_qs(url.query)['entitlementName'] except KeyError: log.warning( 'No entitlement names were provided, ' 'responding with entitlements defined in the config file' ) # Get the game's entitlements entitlements = game['entitlements'] if game is not None and 'entitlements' in game else [] # Map the list of objects to the list of string entitlementNames = [entitlement['id'] for entitlement in entitlements] [log.debug(f'\t{sandbox_id}:{entitlement}') for entitlement in entitlementNames] # Filter out blacklisted entitlements blacklist = [dlc['id'] for dlc in game['blacklist']] if game is not None and 'blacklist' in game else [] entitlementNames = [e for e in entitlementNames if e not in blacklist] injected_entitlements: List[EpicEntitlement] = [{ 'id': entitlementName, # Not true, but irrelevant 'entitlementName': entitlementName, 'namespace': sandbox_id, 'catalogItemId': entitlementName, 'entitlementType': "AUDIENCE", 'grantDate': "2021-01-01T00:00:00.000Z", 'consumable': False, 'status': "ACTIVE", 'useCount': 0, 'entitlementSource': "LauncherWeb" } for entitlementName in entitlementNames] log.info(f'Injecting {len(injected_entitlements)} entitlements') original_entitlements: List[EpicEntitlement] = json.loads(flow.response.text) merged_entitlements = original_entitlements + injected_entitlements EpicAddon.modify_response(flow, merged_entitlements)
def __save_config_file(self, version: int): json_dict = { 'config_version': version } | {key: self.__getattribute__(key) for key in self.__keys} with open(config_path, 'w') as writer: writer.write(json.dumps(json_dict, indent=2)) log.warning(f'Updated the config file with new properties')
def import_jsonschema(): """ Import jsonschema which is used for validating the options in the ingredients file """ try: import jsonschema return jsonschema except ImportError: log.warning( "The jsonschema package is not installed, ingredient file validation will be skipped. \ To enable validation please install jsonschema (sudo pip3 install jsonschema)" ) pass
def import_glob(): """ Importing glob according to python version """ try: if sys.version_info[0] < 3: import glob2 as glob else: import glob return glob except ImportError: log.warning( "The glob package is not installed, ingredient file validation will be skipped. \ To enable validation please install glob (sudo pip3 install glob)") pass
def install_cert(): log.warning('Installing mitmproxy certificate...') # Init dummy config to generate the certificate ProxyConfig(Options()) crtPath = Path.home().joinpath('.mitmproxy', 'mitmproxy-ca-cert.cer') log.debug(f'certificate path: "{crtPath}"') if error_code := subprocess.call( f'certutil -addstore -user Root "{crtPath}"', shell=True): log.error( f'Certificate could not be installed: {hex(error_code)} - {str(FormatMessage(error_code)).strip()}' ) # noinspection PyProtectedMember,PyUnresolvedReferences os._exit(1)
def intercept_entitlements(flow: HTTPFlow): if BaseAddon.host_and_path_match( flow, host=EpicAddon.ecom_host, path=r"^/ecommerceintegration/api/public/v2/identities/\w+/entitlements$" ): log.info('Intercepted an Entitlements request from Epic Games') url = urlparse(flow.request.url) sandbox_id = parse_qs(url.query)['sandboxId'][0] try: # Get the entitlements from request params entitlementNames = parse_qs(url.query)['entitlementName'] except KeyError: log.warning( 'No entitlement names were provided, ' 'responding with entitlements defined in the config file' ) # Get the game in the config with namespace that matches the sandboxId game = next((game for game in config.platforms['epic'] if game['namespace'] == sandbox_id), None) # Get the game's entitlements entitlements = game['entitlements'] if game is not None else [] # Map the list of objects to the list of string entitlementNames = [entitlement['id'] for entitlement in entitlements] [log.debug(f'\t{sandbox_id}:{entitlement}') for entitlement in entitlementNames] result = [{ 'id': entitlementName, # Not true, but irrelevant 'entitlementName': entitlementName, 'namespace': sandbox_id, 'catalogItemId': entitlementName, 'entitlementType': "AUDIENCE", 'grantDate': "2021-01-01T00:00:00.000Z", 'consumable': False, 'status': "ACTIVE", 'useCount': 0, 'entitlementSource': "eos" } for entitlementName in entitlementNames] EpicAddon.modify_response(flow, result)
def validate(self): """ Implementation of the base recipe validate method """ super(Recipe, self).validate() if 'coverage' not in self.options: raise RecipeValidationException("No coverage parameter supplied in the recipe parameters.") else: # NOTE: only general coverage support this grid coverage type if 'grid_coverage' not in self.options['coverage']: self.options['coverage']['grid_coverage'] = False else: self.options['coverage']['grid_coverage'] = bool(self.options['coverage']['grid_coverage']) if 'crs' not in self.options['coverage']: raise RecipeValidationException("No crs parameter in the coverage parameter of the recipe parameters.") if 'slicer' not in self.options['coverage']: raise RecipeValidationException("No slicer parameter in the coverage parameter of the recipe parameters") if 'type' not in self.options['coverage']['slicer']: raise RecipeValidationException("No type parameter in the slicer parameter of the recipe parameters") if 'bands' not in self.options['coverage']['slicer'] \ and (self.options['coverage']['slicer']['type'] == GRIBToCoverageConverter.RECIPE_TYPE \ or self.options['coverage']['slicer']['type'] == NetcdfToCoverageConverter.RECIPE_TYPE): raise RecipeValidationException( "The netcdf/grib slicer requires the existence of a band parameter inside the slicer parameter.") if 'axes' not in self.options['coverage']['slicer']: raise RecipeValidationException("No axes parameter in the slicer parameter of the recipe parameters") for name, axis in self.options['coverage']['slicer']['axes'].items(): if "min" not in axis: raise RecipeValidationException("No min value given for axis " + name) if "type" in axis and axis["type"] == "ansidate": """backwards compatibility, support axis type 'ansidate' after moving to 'date'""" axis["type"] = UserAxisType.DATE if "type" in axis and not UserAxisType.valid_type(axis["type"]): raise RecipeValidationException("Invalid axis type \"" + axis[ "type"] + "\" for axis " + name + ". Only \"" + UserAxisType.DATE + "\" and \"" + UserAxisType.NUMBER + "\" are supported.") if "resolution" not in axis and "irregular" in axis and not axis["irregular"]: raise RecipeValidationException("No resolution value given for regular axis " + name) if "directPositions" not in axis and "irregular" in axis and axis["irregular"]: log.warning("No direct positions found for irregular axis, assuming slice.") # NOTE: if directPositions was not specified, it means the file does not contains the irregular axis # so the irregular axis must be fetched from file name and considered as slice with coefficient is [0] # However, [0] could be miscalculated with arrow so set it to [None] and return [0] later axis["directPositions"] = AbstractToCoverageConverter.DIRECT_POSITIONS_SLICING if "metadata" in self.options['coverage'] and "type" not in self.options['coverage']['metadata']: raise RecipeValidationException("No type given for the metadata parameter.") if "metadata" in self.options['coverage'] and "type" in self.options['coverage']['metadata']: if not ExtraMetadataSerializerFactory.is_encoding_type_valid(self.options['coverage']['metadata']['type']): raise RecipeValidationException( "No valid type given for the metadata parameter, accepted values are xml and json") if "metadata" in self.options['coverage']: supported_recipe = (self.options['coverage']['slicer']['type'] == "netcdf" or self.options['coverage']['slicer']['type'] == "gdal") if not supported_recipe: # global metadata auto is supported for netCDF/GDAL recipe if "global" in self.options['coverage']['metadata']: # NOTE: if global is not specified in netCDF ingredient file, it is considered auto # which means extract all the global attributes of netcdf file to create global metadata if self.options['coverage']['metadata']['global'] == "auto": raise RecipeValidationException( "Global auto metadata only supported in general recipe with slicer's type: netcdf/gdal.") # bands metadata auto is supported for netCDF recipe if "bands" in self.options['coverage']['metadata']: bands_metadata = self.options['coverage']['metadata']['bands'] if bands_metadata == "auto": raise RecipeValidationException( "Bands auto metadata only supported in general recipe with slicer's type: netcdf.") elif type(bands_metadata) is dict: # Check if one band of bands specified with "auto" for key, value in bands_metadata.items(): if value == "auto": raise RecipeValidationException( "Band auto metadata only supported in general recipe with slicer's type: netcdf, " "violated for band '" + key + "'.")