def test_build_params(self): self.assertEqual(build_params(one=1, two=2), {'one': 1, 'two': 2}) self.assertEqual(build_params(), {}) self.assertEqual(build_params(a='a', b=None, c='b'), { 'a': 'a', 'c': 'b' })
def __init__(self, filter=False, *largs, **kwargs): """ @parm filter: bool enables filter plugin """ self.js_multiselect_params = build_params(kwargs, self.JS_ATTRS) self.filter = filter if self.filter: self.js_filter_plugin_attrs = build_params(kwargs, self.JS_PLUGIN_FILTER_ATTRS) super(jQueryUIMultiSelect, self).__init__(*largs, **kwargs)
def initialize_transaction(self, amount, email, callback_url=None, reference=None, plan=None, invoice_limit=None, metadata=None, subaccount=None, transaction_charge=None, bearer=None, channels=['card', 'bank']): params = build_params(callback_url=callback_url, reference=reference, amount=amount, email=email, plan=plan, invoice_limit=invoice_limit, metadata=metadata, subaccount=subaccount, transaction_charge=transaction_charge, bearer=bearer, channels=channels) self.ctx.post(self.url, json=params)
def create_invoice(self, customer, amount, due_date, description=None, line_items=None, tax=None, currency='NGN', metadata=None, send_notification=True, draft=False, has_invoice=False, invoice_number=None): params = build_params(customer=customer, amount=amount, due_date=due_date, description=description, line_items=line_items, tax=tax, currency=currency, metadata=metadata, send_notification=send_notification, draft=draft, has_invoice=has_invoice, invoice_number=invoice_number) self.ctx.post(self.url, json=params)
def list_invoices(self, customer=None, paid=None, status=None, currency=None, include_archive=None): params = build_params(customer=customer, paid=paid, status=status, currency=currency, include_archive=include_archive) self.ctx.get(self.url, payload=params)
def create_customer(self, email, first_name=None, last_name=None, phone=None, metadata=None): params = build_params(email=email, first_name=first_name, last_name=last_name, phone=phone, metadata=metadata) self.ctx.post(self.url, json=params)
def create_refund(self, transaction, amount=None, currency=None, customer_note=None, merchant_note=None): params = build_params(transaction=transaction, amount=amount, currency=currency, customer_note=customer_note, merchant_note=None) self.ctx.post(self.url, json=params)
def create_page(self, name, description=None, amount=None, slug=None, redirect_url=None, custom_fields=None): params = build_params(name=name, description=description, amount=amount, slug=slug, redirect_url=redirect_url, custom_fields=custom_fields) url = '{host}/page'.format(host=self.api_url) self.ctx.post(url, json=params)
def freq(): # Params from inputs. url = request.args.get('url') b64 = request.args.get('image') method = request.args.get('method') or 'xing' avg = request.args.get('avg') or 'mean' region = request.args.get('region') ntraces = request.args.get('ntraces') or '10' trace_spacing = request.args.get('trace_spacing') or 'regular' bins = request.args.get('bins') or '11' t_min = request.args.get('tmin') or '0' t_max = request.args.get('tmax') or '1' dt_param = request.args.get('dt') or 'auto' # Booleans. spectrum = request.args.get('spectrum') or 'false' segy = request.args.get('segy') or 'false' nope = {i: False for i in ('none', 'false', 'no', '0')} spectrum = nope.get(spectrum.lower(), True) segy = nope.get(segy.lower(), True) # Condition or generate params. ntraces = int(ntraces) bins = int(bins) t_min = float(t_min) t_max = float(t_max) uuid1 = str(uuid.uuid1()) if region: region = [int(n) for n in region.split(',')] else: region = [] # Fetch and crop image. if url: try: r = requests.get(url) im = Image.open(BytesIO(r.content)) except Exception: payload = {'job_uuid': uuid1} payload['parameters'] = utils.build_params(method, avg, t_min, t_max, dt_param, region, trace_spacing, url=url) mess = 'Unable to open image from target URI.' raise InvalidUsage(mess, status_code=410, payload=payload) elif b64: try: im = Image.open(BytesIO(base64.b64decode(b64))) except Exception: payload = {'job_uuid': uuid1} payload['parameters'] = utils.build_params(method, avg, t_min, t_max, dt_param, region, trace_spacing, url=url) mess = 'Could not decode payload image. Check base64 encoding.' raise InvalidUsage(mess, status_code=410, payload=payload) else: payload = {'job_uuid': uuid1} payload['parameters'] = utils.build_params(method, avg, t_min, t_max, dt_param, region, trace_spacing, url=url) mess = 'You must provide an image.' raise InvalidUsage(mess, status_code=410, payload=payload) if region: try: im = im.crop(region) except Exception: mess = 'Improper crop parameters ' raise InvalidUsage(mess, status_code=410) width, height = im.size[0], im.size[1] # Calculate dt and interpolate if necessary. if dt_param[:4].lower() == 'orig': dt = (t_max - t_min) / (height - 1) else: if dt_param[:4].lower() == 'auto': dts = [0.0005, 0.001, 0.002, 0.004, 0.008] for dt in sorted(dts, reverse=True): target = int(1 + (t_max - t_min) / dt) # Accept the first one that is larger than the current height. if target >= height: break # dt and target are set else: dt = float(dt_param) target = int((t_max - t_min) / dt) # If dt is not orig, we need to inpterpolate. im = im.resize((width, target), Image.ANTIALIAS) # Set up the image. grey = geophysics.is_greyscale(im) i = np.asarray(im) - 128 i = i.astype(np.int8) if (not grey) and (i.ndim == 3): r, g, b = i[..., 0], i[..., 1], i[..., 2] i = np.sqrt(0.299 * r**2. + 0.587 * g**2. + 0.114 * b**2.) elif i.ndim == 3: i = i[..., 0] else: i = i # Get SEGY file link, if requested. if segy: try: databytes = BytesIO() write_segy(i, databytes, dt, t_min) databytes.seek(0) except: print('Write SEGY failed') else: file_link = utils.get_url(databytes, uuid1) # Do analysis. print("Starting analysis") m = {'auto': geophysics.freq_from_autocorr, 'fft': geophysics.freq_from_fft, 'xing': geophysics.freq_from_crossings} traces = geophysics.get_trace_indices(i.shape[1], ntraces, trace_spacing) specs, f_list, p_list, snr_list, mis, mas = geophysics.analyse(i, t_min, t_max, traces, m[method]) print("Finished analysis") # Compute statistics. print("***** f_list:", f_list) fsd, psd = np.nanstd(f_list), np.nanstd(p_list) fn, pn = len(f_list), len(p_list) if avg.lower() == 'trim' and fn > 4: f = geophysics.trim_mean(f_list, 0.2) if np.isnan(f): f = 0 elif avg.lower() == 'mean' or (avg == 'trim' and fn <= 4): f = np.nanmean(f_list) else: mess = 'avg parameter must be trim or mean' raise InvalidUsage(mess, status_code=410) if avg.lower() == 'trim' and pn > 4: p = geophysics.trim_mean(p_list, 0.2) elif avg.lower() == 'mean' or (avg == 'trim' and pn <= 4): p = np.nanmean(p_list) else: mess = 'avg parameter must be trim or mean' raise InvalidUsage(mess, status_code=410) snrsd = np.nanstd(snr_list) snr = np.nanmean(snr_list) # Spectrum. print("Starting spectrum") try: spec = np.nanmean(np.dstack(specs), axis=-1) fs = i.shape[0] / (t_max - t_min) freq = np.fft.rfftfreq(i.shape[0], 1/fs) f_min = np.amin(mis) f_max = np.amax(mas) except: print("Failed spectrum") # Probably the image is not greyscale. payload = {'job_uuid': uuid1} payload['parameters'] = utils.build_params(method, avg, t_min, t_max, dt_param, region, trace_spacing, url=url) mess = 'Analysis error. Probably the colorbar is not greyscale.' raise InvalidUsage(mess, status_code=410, payload=payload) # Histogram. if bins: hist = np.histogram(i, bins=bins) else: hist = None # Construct the result and return. result = {'job_uuid': uuid1} result['status'] = 'success' result['message'] = '' result['result'] = {} result['result']['freq'] = {'peak': np.round(f, 2), 'sd': np.round(fsd, 2), 'n': fn, 'min': np.round(f_min, 2), 'max': np.round(f_max, 2)} result['result']['phase'] = {'avg': np.round(p, 2), 'sd': np.round(psd, 2), 'n': pn} result['result']['snr'] = {'avg': np.round(snr, 2), 'sd': np.round(snrsd, 2)} result['result']['greyscale'] = grey result['result']['dt'] = dt result['result']['img_size'] = {'original_height': height, 'width': width, 'resampled_height': target} if segy: result['result']['segy'] = file_link if spectrum: result['result']['spectrum'] = spec.tolist() result['result']['frequencies'] = freq.tolist() if hist: result['result']['histogram'] = {'counts': hist[0].tolist(), 'bins': hist[1].tolist() } result['parameters'] = utils.build_params(method, avg, t_min, t_max, dt_param, region, trace_spacing, url=url) return jsonify(result)
def freq(): # Params from inputs. url = request.args.get('url') method = request.args.get('method') or 'xing' avg = request.args.get('avg') or 'mean' region = request.args.get('region') ntraces = request.args.get('ntraces') or '10' trace_spacing = request.args.get('trace_spacing') or 'regular' bins = request.args.get('bins') or '9' t_min = request.args.get('tmin') or '0' t_max = request.args.get('tmax') or '1' dt_param = request.args.get('dt') or 'auto' # Booleans. spectrum = request.args.get('spectrum') or 'false' segy = request.args.get('segy') or 'false' nope = {i: False for i in ('none', 'false', 'no', '0')} spectrum = nope.get(spectrum.lower(), True) segy = nope.get(segy.lower(), True) # Condition or generate params. ntraces = int(ntraces) bins = float(bins) t_min = float(t_min) t_max = float(t_max) uuid1 = str(uuid.uuid1()) if region: region = [int(n) for n in region.split(',')] else: region = [] # Fetch and crop image. try: r = requests.get(url) im = Image.open(BytesIO(r.content)) except Exception: result = {'job_uuid': uuid.uuid1()} result['status'] = 'failed' m = 'Error. Unable to open image from target URI. ' result['message'] = m result['parameters'] = utils.build_params(method, avg, t_min, t_max, region, trace_spacing, url=url) return jsonify(result) if region: try: im = im.crop(region) except Exception: m = 'Improper crop parameters ' raise InvalidUsage(m+region, status_code=410) width, height = im.size[0], im.size[1] # Calculate dt and interpolate if necessary. if dt_param[:4].lower() == 'orig': dt = (t_max - t_min) / (height - 1) else: if dt_param[:4].lower() == 'auto': dts = [0.0005, 0.001, 0.002, 0.004, 0.008] for dt in sorted(dts, reverse=True): target = int(1 + (t_max - t_min) / dt) # Accept the first one that is larger than the current height. if target >= height: break # dt and target are set else: dt = float(dt_param) target = int((t_max - t_min) / dt) # If dt is not orig, we need to inpterpolate. im = im.resize((width, target), Image.ANTIALIAS) # Set up the image. grey = geophysics.is_greyscale(im) i = np.asarray(im) - 128 i = i.astype(np.int8) if not grey: r, g, b = i[..., 0], i[..., 1], i[..., 2] i = np.sqrt(0.299 * r**2. + 0.587 * g**2. + 0.114 * b**2.) else: i = i[..., 0] # Get SEGY file link, if requested. if segy: try: databytes = BytesIO() write_segy(i, databytes, dt, t_min) databytes.seek(0) except: print('Write SEGY failed') else: file_link = utils.get_url(databytes, uuid1) # Do analysis. m = {'auto': geophysics.freq_from_autocorr, 'fft': geophysics.freq_from_fft, 'xing': geophysics.freq_from_crossings} traces = geophysics.get_trace_indices(i.shape[1], ntraces, trace_spacing) specs, f_list, p_list, snr_list, mis, mas = geophysics.analyse(i, t_min, t_max, traces, m[method.lower()]) # Compute statistics. fsd, psd = np.nanstd(f_list), np.nanstd(p_list) fn, pn = len(f_list), len(p_list) if avg.lower() == 'trim' and fn > 4: f = geophysics.trim_mean(f_list, 0.2) elif avg.lower() == 'mean' or (avg == 'trim' and fn <= 4): f = np.nanmean(f_list) else: m = 'avg parameter must be trim or mean' raise InvalidUsage(m, status_code=410) if avg.lower() == 'trim' and pn > 4: p = geophysics.trim_mean(p_list, 0.2) elif avg.lower() == 'mean' or (avg == 'trim' and pn <= 4): p = np.nanmean(p_list) else: m = 'avg parameter must be trim or mean' raise InvalidUsage(m, status_code=410) snrsd = np.nanstd(snr_list) snr = np.nanmean(snr_list) # Spectrum. try: spec = np.mean(np.dstack(specs), axis=-1) fs = i.shape[0] / (t_max - t_min) freq = np.fft.rfftfreq(i.shape[0], 1/fs) f_min = np.amin(mis) f_max = np.amax(mas) except: # Probably the image is not greyscale. result = {'job_uuid': uuid.uuid1()} result['status'] = 'failed' m = 'Analysis error. Probably the colorbar is not greyscale.' result['message'] = m result['parameters'] = utils.build_params(method.lower(), avg.lower(), t_min, t_max, region, trace_spacing, url=url) return jsonify(result) # Histogram. if bins: hist = np.histogram(i, bins=bins) else: hist = None # Construct the result and return. result = {'job_uuid': uuid1} result['status'] = 'success' result['message'] = '' result['result'] = {} result['result']['freq'] = {'peak': np.round(f, 2), 'sd': np.round(fsd, 2), 'n': fn, 'min': np.round(f_min, 2), 'max': np.round(f_max, 2)} result['result']['phase'] = {'avg': np.round(p, 2), 'sd': np.round(psd, 2), 'n': pn} result['result']['snr'] = {'avg': np.round(snr, 2), 'sd': np.round(snrsd, 2)} result['result']['greyscale'] = grey result['result']['dt'] = dt result['result']['img_size'] = {'original_height': height, 'width': width, 'resampled_height': target} if segy: result['result']['segy'] = file_link if spectrum: result['result']['spectrum'] = spec.tolist() result['result']['frequencies'] = freq.tolist() if hist: result['result']['histogram'] = {'counts': hist[0].tolist(), 'bins': hist[1].tolist() } result['parameters'] = utils.build_params(method, avg, t_min, t_max, dt_param, region, trace_spacing, url=url) return jsonify(result)