def test_store(): store.set('color', 'Blue') assert store.get('color') == 'Blue' store.set('color', 'Red') assert store.get('color') == 'Red' store.set('size', '42') assert store.get('size') == '42' assert store.get('color') == 'Red'
def test_store(tmpdir): os.environ['STORE_DIR'] = str(tmpdir) # str expected, not LocalPath print(tmpdir) store.set('color', 'Blue') assert store.get('color') == 'Blue' store.set('color', 'Red') assert store.get('color') == 'Red' store.set('size', '42') assert store.get('size') == '42' assert store.get('color') == 'Red'
def find(): item = request.form['item'] print(item) response = store.get(item) response['ip'] = '{}/browserfs.html'.format( cameras_to_ip[response['camera']]) print(response['camera']) return jsonify(response)
def test_get(self): ''' Проверка функции формирования интересов пользователя :return: list(интересы) ''' pattern = r'^\[[,"\w].*\]$' getter = store.get(self.conn.conn, 'list:interests') leng = len(getter) match = re.match(pattern, getter) self.assertEqual(match.endpos, leng)
def get(label): """ @api {get} /:label Get counter value @apiName GetCounter @apiGroup Counter @apiParam {String} label Counter label. @apiSuccess {Number} counter Current value, 0 if label doesn't exist. """ counter = store.get(label) return jsonify(counter=counter)
def lemma_details_page(request, lemma_id): lemma = store.get('lemma', lemma_id) bibl = glom( lemma, Coalesce( ( ( 'passport.bibliography.0.bibliographical_text_field.0', lambda x: re.sub(r';\s*([A-Z])', r'|\1', x).split( '|' ) # Workaround: splitten nur wenn nach ";" kein Großbuchstabe folgt #lambda x: x.split(';') ), [str.strip]), default=[])) lemma['relations'] = { predicate: [ relation for relation in relations if relation.get('eclass') == 'BTSLemmaEntry' ] for predicate, relations in lemma.get('relations', {}).items() } return render( request, 'details/lemma.html', { 'lemma': lemma, 'bibl': bibl, 'ext': { provider: format_ext_refs(provider, refs) for provider, refs in ( lemma.get('externalReferences') or {}).items() }, 'coins': coins_openurl_kev(lemma), 'occurrences': { 'corpus': occurrence_count(lemma_id), }, 'annotations': render_annotations(lemma_annotations(lemma_id)), 'tlaVersion': tlaVersion, 'tlaTitle': tlaTitle, 'tlaVersion': tlaVersion, 'tlaIssue': tlaIssue, 'tlaReleaseDate': tlaReleaseDate, 'tlaEditor': tlaEditor, 'tlaBaseURL': tlaBaseURL, 'dateToday': datetime.now().strftime("%d.%m.%Y"), })
def get_html(label): counter = store.get(label) return render_template("counter.html", label=label, counter=counter)
def get_interests(cid): r = store.get(redis_client.conn, "i:%s" % cid) return json.loads(r) if r else []
def get_interests(store, cid): r = store.get("i:%s" % cid) return json.loads(r) if r else []
import store from project import class_decls from project import function_lib # this is probably tucked away somewhere in the project's startup script store.analyze(class_decls) store.analyze(function_lib) # get the synthesized type from the store Counter = store.get('Counter') # make an instance a_counter = Counter() # increment the counter a_counter.value += 1 # now, at_exit there will be (an anonymous) Counter instance in the store # let's try with something richer Person = store.get('Person') toby = Person(name='Toby', city='SF') # later... from store import query_facade as q toby2 = p.match(class=Person, name='Toby') print(toby2.city) # -> 'SF'
def es_get(request, index, _id): return JsonResponse(store.get(index, _id), safe=False)
def populate_textword_occurrences(hits, **params): """ take text word search results and the original search parameters and enrich the results with like highlighting and stuff. :rtype: list """ occurrences = [] filters = { k: params.get(k) for k in ["lemma", "transcription", "hieroglyphs", "sentence_id"] } for hit in hits: sentence = hit for i, token in enumerate(sentence['tokens']): if token.get('id') == hit['id']: token['highlight'] = 1 else: match = True for k, vv in filters.items(): if vv: match = match and any( map(lambda v: token.get(k) == v, vv)) if 'translation' in filters: match = match and any( map( lambda l: any( map( lambda t: t in filters.get('translations'). get(l, "").lower(), filters.get('trans_lang', []))), filters.get('translation'))) if match: token['highlight'] = 2 text = store.get( 'text', hit.get('context', {}).get('textId'), ) if text: for path in text.get('paths', []): for node in path: #node["url"] = "/view/{}/{}".format( node["url"] = "/{}/{}".format({ "BTSTCObject": "object", "BTSText": "text", }.get(node.get('eclass')), node['id']) try: editor = text['edited']['name'] except: editor = '(not edited)' try: dateEdited = text['edited']['date'] except: dateEdited = '(not edited)' try: date = text['passport']['date'][0]['date'][0]['date'][0][ 'name'] except: date = '(not edited)' occurrences.append({ "occurrence": hit, "sentence": sentence, "text": text, "editor": editor, "date": date, "dateEdited": dateEdited, }) else: print('text not found: ', hit.get('location', {}).get("text")) return occurrences
def on_resume(self): """when the app is recall from sleep get the screen we was on and set it """ store = get_store.get_store("saving") self.sm.current = str(store.get('pause')["value"])