def __init__(self, pc_change_52_week, high_52_week, low_52_week, moving_avg_50_days, moving_avg_200_days): self.pc_change_52_week = pc_change_52_week self.high_52_week = parse_float(high_52_week) self.low_52_week = parse_float(low_52_week) self.moving_avg_50_days = parse_float(moving_avg_50_days) self.moving_avg_200_days = parse_float(moving_avg_200_days)
def __init__(self, total_cash, total_cash_per_share, total_debt, total_debt_per_equity, curr_ratio, book_val_per_share): self.total_cash = total_cash self.total_cash_per_share = parse_float(total_cash_per_share) self.total_debt = total_debt self.total_debt_per_equity = parse_float(total_debt_per_equity) self.curr_ratio = parse_float(curr_ratio) self.book_val_per_share = parse_float(book_val_per_share)
def listen(self, tail=''): parts = (tail + ' x x').split()[0:2] nap_count = util.parse_int(parts[0], 30) nap_time = util.parse_float(parts[1], 1) for i in range(0, nap_count): log.i([chr(b) for b in self.transport.peek()]) time.sleep(nap_time)
def get_optimization_params(parameters): ret = {} for k, v in parameters.items(): if type(k) != str: continue if any(field in k for field in ["pitch_0", "print_distance_0", "step_count_0"]): new = format_key(k) ret[new] = parse_float(v) return ret
def __init__(self, avg_vol_3_day, avg_vol_10_day, shares_outstanding, shares_float, shares_short, short_ratio, short_pc_of_float, shares_short_prior_month): self.avg_vol_3_day = avg_vol_3_day self.avg_vol_10_day = avg_vol_10_day self.shares_outstanding = shares_outstanding self.shares_float = shares_float self.shares_short = shares_short self.short_ratio = parse_float(short_ratio) self.short_pc_of_float = short_pc_of_float self.shares_short_prior_month = shares_short_prior_month
def _gps_position_to_decimal(dms_str, direction): """ Converts a geographic longitude/latitude point in the format "degrees:minutes:seconds" into a decimal float value. The direction value can be N or S for a latitude, or E or W for a longitude. E.g. "87:43:41","W" is returned as -87.728056 Returns None if the supplied string cannot be parsed. """ try: direction = direction.strip().upper() parts = dms_str.split(':') if len(parts) != 3: raise ValueError('Expected format deg:mm:ss') degrees = parse_float(parts[0]) minutes = parse_float(parts[1]) seconds = parse_float(parts[2]) total_seconds = (minutes * 60.0) + seconds dec_fraction = total_seconds / 3600.0 multiplier = -1.0 if direction in ['W', 'S'] else 1.0 return (degrees + dec_fraction) * multiplier except: return None
def parse_parameters(parameters): ret = defaultdict(list) for k, v in parameters.items(): if type(k) is not str: continue new = format_key(k) if new not in PrintParams.parameters(): continue ret[new].append(parse_float(v)) for k, v in ret.items(): if len(v) == 1: v.append(None) return ret
def __init__(self, market_cap, ent_val, trailing_pe, forward_pe, peg_ratio, price_per_sales_ttm, price_per_book_mrq, ent_val_per_revenue, ent_val_per_ebitda): self.market_cap = market_cap self.ent_val = ent_val self.trailing_pe = parse_float(trailing_pe) self.forward_pe = parse_float(forward_pe) self.peg_ratio = parse_float(peg_ratio) self.price_per_sales_ttm = parse_float(price_per_sales_ttm) self.price_per_book_mrq = parse_float(price_per_book_mrq) self.ent_val_per_revenue = parse_float(ent_val_per_revenue) self.ent_val_per_ebitda = parse_float(ent_val_per_ebitda)
def __init__(self, prev_close, open, bid, ask, days_range, week_range_52, volume, avg_volume, market_cap, beta, pe_ratio_ttm, eps_ttm, earning_date, frwd_div_and_yield, ex_div_date, one_yr_target): self.previous_close = parse_float(prev_close) self.open = parse_float(open) self.bid = bid self.ask = ask self.days_range = days_range self.week_range_52 = week_range_52 self.volume = parse_float(volume) self.avg_volume = parse_float(avg_volume) self.market_cap = market_cap self.beta = parse_float(beta) self.pe_ratio_ttm = parse_float(pe_ratio_ttm) self.eps_ttm = parse_float(eps_ttm) self.earning_date = earning_date self.frwd_div_and_yield = frwd_div_and_yield self.ex_div_date = ex_div_date self.one_yr_target = parse_float(one_yr_target)
def diagnostics_submit(): required = set(('device', 'operating-system', 'udid', 'version', 'git-revision', 'build-date-epoch', 'time-since-launch', 'preprocessor-avg-runtime')) if not required.issubset(set(request.form.keys())): abort(404) report = DiagnosticReport() report.submit_date = datetime.datetime.utcnow() report.device = request.form['device'].rstrip() report.system = request.form['operating-system'].rstrip() report.udid = request.form['udid'].rstrip() report.version = request.form['version'].rstrip() report.gitrev = request.form['git-revision'].rstrip() report.build_date = parse_date_epoch(request.form['build-date-epoch']) report.time_since_launch = parse_float(request.form['time-since-launch']) report.preprocessor_avg_runtime = parse_int(request.form['preprocessor-avg-runtime']) report.put() return ''
def image(): logger.debug(request.method + ' ' + request.url) try: logged_in = session_logged_in() allow_uncache = app.config['BENCHMARKING'] or app.config['DEBUG'] args = request.args # Get URL parameters for the image src = args.get('src', '') page = args.get('page', None) iformat = args.get('format', None) template = args.get('tmp', None) width = args.get('width', None) height = args.get('height', None) halign = args.get('halign', None) valign = args.get('valign', None) autosizefit = args.get('autosizefit', None) rotation = args.get('angle', None) flip = args.get('flip', None) top = args.get('top', None) left = args.get('left', None) bottom = args.get('bottom', None) right = args.get('right', None) autocropfit = args.get('autocropfit', None) fill = args.get('fill', None) quality = args.get('quality', None) sharpen = args.get('sharpen', None) ov_src = args.get('overlay', None) ov_size = args.get('ovsize', None) ov_opacity = args.get('ovopacity', None) ov_pos = args.get('ovpos', None) icc_profile = args.get('icc', None) icc_intent = args.get('intent', None) icc_bpc = args.get('bpc', None) colorspace = args.get('colorspace', None) strip = args.get('strip', None) dpi = args.get('dpi', None) tile = args.get('tile', None) # Get URL parameters for handling options attach = args.get('attach', None) xref = args.get('xref', None) stats = args.get('stats', None) # Get protected admin/internal parameters cache = args.get('cache', '1') if logged_in or allow_uncache else '1' recache = args.get('recache', None) if allow_uncache else None # eRez compatibility mode src = erez_params_compat(src) # Tweak strings as necessary and convert non-string parameters # to the correct data types try: # Image options if page is not None: page = parse_int(page) if iformat is not None: iformat = iformat.lower() if template is not None: template = template.lower() if width is not None: width = parse_int(width) if height is not None: height = parse_int(height) if halign is not None: halign = halign.lower() if valign is not None: valign = valign.lower() if autosizefit is not None: autosizefit = parse_boolean(autosizefit) if rotation is not None: rotation = parse_float(rotation) if flip is not None: flip = flip.lower() if top is not None: top = parse_float(top) if left is not None: left = parse_float(left) if bottom is not None: bottom = parse_float(bottom) if right is not None: right = parse_float(right) if autocropfit is not None: autocropfit = parse_boolean(autocropfit) if fill is not None: fill = parse_colour(fill) if quality is not None: quality = parse_int(quality) if sharpen is not None: sharpen = parse_int(sharpen) if ov_size is not None: ov_size = parse_float(ov_size) if ov_pos is not None: ov_pos = ov_pos.lower() if ov_opacity is not None: ov_opacity = parse_float(ov_opacity) if icc_profile is not None: icc_profile = icc_profile.lower() if icc_intent is not None: icc_intent = icc_intent.lower() if icc_bpc is not None: icc_bpc = parse_boolean(icc_bpc) if colorspace is not None: colorspace = colorspace.lower() if strip is not None: strip = parse_boolean(strip) if dpi is not None: dpi = parse_int(dpi) if tile is not None: tile = parse_tile_spec(tile) # Handling options if attach is not None: attach = parse_boolean(attach) if xref is not None: validate_string(xref, 0, 1024) if stats is not None: stats = parse_boolean(stats) # Admin/internal options if cache is not None: cache = parse_boolean(cache) if recache is not None: recache = parse_boolean(recache) except (ValueError, TypeError) as e: raise httpexc.BadRequest(unicode(e)) # Package and validate the parameters try: # #2694 Enforce public image limits - perform easy parameter checks if not logged_in: width, height, autosizefit = _public_image_limits_pre_image_checks( width, height, autosizefit, tile, template ) # Store and normalise all the parameters image_attrs = ImageAttrs(src, -1, page, iformat, template, width, height, halign, valign, rotation, flip, top, left, bottom, right, autocropfit, autosizefit, fill, quality, sharpen, ov_src, ov_size, ov_pos, ov_opacity, icc_profile, icc_intent, icc_bpc, colorspace, strip, dpi, tile) image_engine.finalise_image_attrs(image_attrs) except ValueError as e: raise httpexc.BadRequest(unicode(e)) # Get/create the database ID (from cache, validating path on create) image_id = data_engine.get_or_create_image_id( image_attrs.filename(), return_deleted=False, on_create=on_image_db_create_anon_history ) if (image_id == 0): raise DoesNotExistError() # Deleted elif (image_id < 0): raise DBError('Failed to add image to database') image_attrs.set_database_id(image_id) # Require view permission or file admin permissions_engine.ensure_folder_permitted( image_attrs.folder_path(), FolderPermission.ACCESS_VIEW, get_session_user() ) # Ditto for overlays if ov_src: permissions_engine.ensure_folder_permitted( filepath_parent(ov_src), FolderPermission.ACCESS_VIEW, get_session_user() ) # v1.17 If this is a conditional request with an ETag, see if we can just return a 304 if 'If-None-Match' in request.headers and not recache: etag_valid, modified_time = _etag_is_valid( image_attrs, request.headers['If-None-Match'], False ) if etag_valid: # Success HTTP 304 return make_304_response(image_attrs, False, modified_time) # Get the requested image data image_wrapper = image_engine.get_image( image_attrs, 'refresh' if recache else cache ) if (image_wrapper is None): raise DoesNotExistError() # #2694 Enforce public image limits - check the dimensions # of images that passed the initial parameter checks if not logged_in: try: _public_image_limits_post_image_checks( image_attrs.width(), image_attrs.height(), image_attrs.template(), image_wrapper.data(), image_wrapper.attrs().format() ) except ValueError as e: raise httpexc.BadRequest(unicode(e)) # As for the pre-check # Success HTTP 200 return make_image_response(image_wrapper, False, stats, attach, xref) except httpexc.HTTPException: # Pass through HTTP 4xx and 5xx raise except ServerTooBusyError: logger.warn(u'503 Too busy for ' + request.url) raise httpexc.ServiceUnavailable() except ImageError as e: logger.warn(u'415 Invalid image file \'' + src + '\' : ' + unicode(e)) raise httpexc.UnsupportedMediaType(unicode(e)) except SecurityError as e: if app.config['DEBUG']: raise log_security_error(e, request) raise httpexc.Forbidden() except DoesNotExistError as e: # First time around the ID will be set. Next time around it # won't but we should check whether the disk file now exists. if image_attrs.database_id() > 0 or path_exists(image_attrs.filename(), require_file=True): image_engine.reset_image(image_attrs) logger.warn(u'404 Not found: ' + unicode(e)) raise httpexc.NotFound(unicode(e)) except Exception as e: if app.config['DEBUG']: raise logger.error(u'500 Error for ' + request.url + '\n' + unicode(e)) raise httpexc.InternalServerError(unicode(e))
def parse(self, response): url = response.url self.logger.info(f"Inside parse for {url}") GROCERY_SELECTOR = '[data-automation-id="productTile"]' SPONSORED_SELECTOR = '[data-automation-id="sponsoredProductTile"]' GROCERIES_SELECTOR = GROCERY_SELECTOR + ',' + SPONSORED_SELECTOR metadata=get_url_metadata(self.cursor,url) section=metadata[1] subsection=metadata[2] for grocery in response.css(GROCERIES_SELECTOR): NAME_SELECTOR = '[data-automation-id="name"] ::attr(name)' name = grocery.css(NAME_SELECTOR).extract_first() #parse the ounces off of the name decimal_regex = "([\d]+[.]?[\d]*|[.\d]+)" ounces = re.findall(decimal_regex + "\s*o(?:z|unces?)", name, re.IGNORECASE) pounds = re.findall(decimal_regex + "\s*(?:pound|lb)s?", name, re.IGNORECASE) count = re.findall("([\d]+)\s*(?:c(?:t|ount)|p(?:k|ack))", name, re.IGNORECASE) self.ounce = ounces self.pounds = pounds self.count = count #Check if the arrays returned from re.findall are empty if ounces: ounces = parse_float(ounces[0]) else: ounces = 0 if pounds: pounds = parse_float(pounds[0]) else: pounds = 0 if count: count = parse_float(count[0]) else: count = 0 if pounds != 0: ounces = 16*pounds elif count != 0: ounces *= count # inspect_response(response,self) SALEPRICE_SELECTOR = '[data-automation-id="salePrice"] ::text' PRICE_SELECTOR = '[data-automation-id="price"] ::text' PRICE_PER_UNIT_SELECTOR = '[data-automation-id="price-per-unit"] ::text' name=grocery.css(NAME_SELECTOR).extract_first() name=clean_string(name,"\"") ounces=ounces pounds=pounds count=count price=str(handle_none(grocery.css(SALEPRICE_SELECTOR).extract_first())).replace('$','') ppu=convert_ppu(grocery.css(PRICE_PER_UNIT_SELECTOR).extract_first()) yield { 'name': name, 'ounces': ounces, 'pounds': pounds, 'count': count, 'price': price, 'price-per-unit': ppu, 'section': section, 'subsection': subsection, 'url': url, } finish_url(self.conn,self.store_id,url) next_url=get_next_url(self.cursor,1,store_id=self.store_id,filter="aisle=") print(f"next_url - {next_url}") if next_url is None: print ("No more urls - finishing") else: request = create_parse_request(next_url, self.parse, EC.element_to_be_clickable( (By.CSS_SELECTOR, '[aria-current="page"]')), meta_url=next_url) yield request
def wait(self, tail=''): secs = util.parse_float(tail, 10.0) log.i('Waiting for {} seconds'.format(secs)) time.sleep(secs)
def __init__(self, pc_held_by_insiders, pc_held_by_institutions): self.pc_insiders = parse_float(pc_held_by_insiders) self.pc_institutions = parse_float(pc_held_by_institutions)
def parse(self, response): GROCERY_SELECTOR = '[data-automation-id="productTile"]' SPONSORED_SELECTOR = '[data-automation-id="sponsoredProductTile"]' GROCERIES_SELECTOR = GROCERY_SELECTOR + ',' + SPONSORED_SELECTOR NEXT_BUTTON = '[data-automation-id="nextButton"]' # Handle pagination url = response.url print (f"working on url - {url}") metadata=get_url_metadata(self.cursor,url) section=metadata[1] subsection=metadata[2] next_page=response.css(NEXT_BUTTON).get() if next_page is not None: #inspect_response(response,self) page_string="&page=" page_str_len=len(page_string) next_page_url=get_next_pagination(page_string,url) store_url(self.conn,next_page_url, self.store_id, lookup_category("",section,subsection) ,section, subsection) for grocery in response.css(GROCERIES_SELECTOR): NAME_SELECTOR = '[data-automation-id="name"] ::attr(name)' self.name = grocery.css(NAME_SELECTOR).extract_first() #parse the ounces off of the name decimal_regex = "([\d]+[.]?[\d]*|[.\d]+)" self.ounces = re.findall(decimal_regex + "\s*o(?:z|unces?)", self.name, re.IGNORECASE) self.pounds = re.findall(decimal_regex + "\s*(?:pound|lb)s?", self.name, re.IGNORECASE) self.count = re.findall("([\d]+)\s*(?:c(?:t|ount)|p(?:k|ack))", self.name, re.IGNORECASE) #Check if the arrays returned from re.findall are empty if self.ounces: self.ounces = parse_float(self.ounces[0]) else: self.ounces = 0 if self.pounds: self.pounds = parse_float(self.pounds[0]) else: self.pounds = 0 if self.count: self.count = parse_float(self.count[0]) else: self.count = 0 if self.pounds != 0: self.ounces = 16*self.pounds elif self.count != 0: self.ounces *= self.count # inspect_response(response,self) SALEPRICE_SELECTOR = '[data-automation-id="salePrice"] ::text' PRICE_SELECTOR = '[data-automation-id="price"] ::text' PRICE_PER_UNIT_SELECTOR = '[data-automation-id="price-per-unit"] ::text' name=grocery.css(NAME_SELECTOR).extract_first() name=clean_string(name,"\"") ounces=self.ounces pounds=self.pounds count=self.count price=str(handle_none(grocery.css(SALEPRICE_SELECTOR).extract_first())).replace('$','') ppu=convert_ppu(grocery.css(PRICE_PER_UNIT_SELECTOR).extract_first()) url=response.url yield { 'name': name, 'ounces': ounces, 'pounds': pounds, 'count': count, 'price': price, 'price-per-unit': ppu, 'section': section, 'subsection': subsection, 'url': url, } finish_url(self.conn,self.store_id,url) next_url=get_next_url(self.cursor,1) print(f"next_url - {next_url}") if next_url is None: print ("No more urls - finishing") else: yield SplashRequest(next_url, self.parse, endpoint='render.html', args={ 'wait': 10, 'section': section, 'subsection': subsection })