def selectionmaker(action: str, one=None, two=None) -> JSON_STR: """ dispatcher for "/selection/..." requests """ one = depunct(one) two = depunct(two) knownfunctions = { 'make': {'fnc': selectionmade, 'param': [request.args]}, 'clear': {'fnc': clearselections, 'param': [one, two]}, 'fetch': {'fnc': getcurrentselections, 'param': None} } if action not in knownfunctions: return json.dumps(str()) f = knownfunctions[action]['fnc'] p = knownfunctions[action]['param'] if p: j = f(*p) else: j = f() if hipparchia.config['JSONDEBUGMODE']: print('/selection/{f}/\n\t{j}'.format(f=action, j=j)) return j
def textgetter(action: str, one=None, two=None, three=None, four=None, five=None) -> JSON_STR: """ dispatcher for "/text/..." requests """ one = depunct(one) two = depunct(two) three = depunct(three, allowedpunctuationsting='.|') four = depunct(four, allowedpunctuationsting='.|') five = depunct(five, allowedpunctuationsting='.|') knownfunctions = { 'index': { 'fnc': buildindexto, 'param': [one, two, three, four, five] }, 'vocab': { 'fnc': generatevocabfor, 'param': [one, two, three, four, five] }, 'vocab_rawloc': { 'fnc': vocabfromrawlocus, 'param': [one, two, three, four, five] }, 'index_rawloc': { 'fnc': indexfromrawlocus, 'param': [one, two, three, four, five] }, 'make': { 'fnc': textmaker, 'param': [one, two, three, four] }, 'make_rawloc': { 'fnc': texmakerfromrawlocus, 'param': [one, two, three, four] }, } if action not in knownfunctions: return json.dumps(str()) f = knownfunctions[action]['fnc'] p = knownfunctions[action]['param'] j = f(*p) if hipparchia.config['JSONDEBUGMODE']: print('/text/{f}\n\t{j}'.format(f=action, j=j)) return j
def sampleworkcitation(authorid: str, workid: str) -> JSON_STR: """ called by loadsamplecitation() in autocomplete.js we are using the maual input style on the web page so we need some hint on how to do things: check the end line for a sample citation "In Timarchum (w001)" yields... 127.0.0.1 - - [04/Apr/2021 13:48:53] "GET /get/json/samplecitation/gr0026/001 HTTP/1.1" 200 - /get/json/samplecitation {"firstline": "1.1", "lastline": "196.7"} :param authorid: :param workid: :return: """ dbconnection = ConnectionObject() dbcursor = dbconnection.cursor() returnvals = dict() returnvals['firstline'] = str() returnvals['lastline'] = str() authorid = depunct(authorid) workid = depunct(workid) try: ao = authordict[authorid] wo = workdict[authorid + 'w' + workid] except KeyError: returnvals['firstline'] = 'no such author/work combination' return json.dumps(returnvals) toplevel = wo.availablelevels - 1 firstlineindex = returnfirstorlastlinenumber(wo.universalid, dbcursor, disallowt=True, disallowlevel=toplevel) flo = dblineintolineobject( grabonelinefromwork(authorid, firstlineindex, dbcursor)) lastlineidx = returnfirstorlastlinenumber(wo.universalid, dbcursor, findlastline=True) llo = dblineintolineobject( grabonelinefromwork(authorid, lastlineidx, dbcursor)) returnvals['firstline'] = flo.prolixlocus() returnvals['lastline'] = llo.prolixlocus() results = json.dumps(returnvals) dbconnection.connectioncleanup() return results
def responsegetter(fnc: str, param: str) -> FlaskResponse: """ dispatcher for "/get/response/" requests """ param = depunct(param) knownfunctions = { 'cookie': { 'fnc': cookieintosession, 'param': [param] }, 'vectorfigure': { 'fnc': fetchstoredimage, 'param': [param] }, } if fnc not in knownfunctions: response = redirect(url_for('frontpage')) else: f = knownfunctions[fnc]['fnc'] p = knownfunctions[fnc]['param'] response = f(*p) return response
def getauthinfo(authorid: str) -> JSON_STR: """ show local info about the author one is considering in the selection box :return: """ authorid = depunct(authorid) theauthor = authordict[authorid] authinfo = list() authinfo.append(formatauthinfo(theauthor)) if len(theauthor.listofworks) > 1: authinfo.append( '<br /><br /><span class="italic">work numbers:</span><br />') else: authinfo.append('<br /><span class="italic">work:</span><br />') sortedworks = {work.universalid: work for work in theauthor.listofworks} keys = sortedworks.keys() keys = sorted(keys) for work in keys: authinfo.append(woformatworkinfo(sortedworks[work])) authinfo = json.dumps('\n'.join(authinfo)) return authinfo
def setsessionvariable(thevariable, thevalue) -> JSON_STR: """ accept a variable name and value: hand it off to the parser/setter returns: [{"latestdate": "1"}] [{"spuria": "no"}] etc. :return: """ nullresult = json.dumps([{'none': 'none'}]) # need to accept '-' because of the date spinner; '_' because of 'converted_date', etc validpunct = '-_' thevalue = depunct(thevalue, validpunct) if thevalue == 'null': # the js sent out something unexpected while you were in the middle of swapping values # 127.0.0.1 - - [09/Mar/2021 09:50:42] "GET /setsessionvariable/browsercontext/null HTTP/1.1" 500 - return nullresult try: session['authorssummary'] except KeyError: # cookies are not enabled return nullresult modifysessionvariable(thevariable, thevalue) result = json.dumps([{thevariable: thevalue}]) return result
def supplyhints(category, _) -> JSON_STR: """ return JSON to fill a hint box with constantly updated values the json encodes a list of strings from https://api.jqueryui.com/autocomplete/ The Autocomplete plugin does not filter the results, instead a query string is added with a term field, which the server-side script should use for filtering the results. if the source option is set to "https://example.com" and the user types foo, a GET request would be made to https://example.com?term=foo so you you have to have '?term=' you also can't do a bare '/?term='. Instead you need an anchor: '/h?term=' and this '?term=' has to be read via request.args.get() :return: """ query = request.args.get('term', str()) functionmapper = { 'author': offerauthorhints, 'authgenre': augenrelist, 'workgenre': wkgenrelist, 'authlocation': offeraulocationhints, 'worklocation': offerprovenancehints, 'lemmata': offerlemmatahints, } try: fnc = functionmapper[category] except KeyError: return json.dumps(list()) if category == 'author': allowedpunctuationsting = '[' else: allowedpunctuationsting = None strippedquery = depunct(query, allowedpunctuationsting) hintlist = fnc(strippedquery) if hipparchia.config['JSONDEBUGMODE']: print('/hints/{f}\n\t{j}'.format(f=category, j=hintlist)) return json.dumps(hintlist)
def dispatchvectorsearch(vectortype: str, searchid: str, one=None, two=None, three=None) -> JSON_STR: """ dispatcher for "/vectors/..." requests """ if not hipparchia.config['SEMANTICVECTORSENABLED']: so = SearchObject(str(), str(), str(), str(), str(), session) oo = SearchOutputObject(so) target = 'searchsummary' message = '[semantic vectors have not been enabled]' return oo.generatenulloutput(itemname=target, itemval=message) pollid = validatepollid(searchid) one = depunct(one) two = depunct(two) three = depunct(three) simple = [pollid, one] triple = [pollid, one, two, three] knownfunctions = { 'nearestneighborsquery': { 'bso': simple, 'pref': 'CONCEPTMAPPINGENABLED' }, 'analogies': { 'bso': triple, 'pref': 'VECTORANALOGIESENABLED' }, 'topicmodel': { 'bso': simple, 'pref': 'TOPICMODELINGENABLED' }, 'vectortestfunction': { 'bso': simple, 'pref': 'TESTINGVECTORBUTTONENABLED' }, 'unused': { 'fnc': lambda: str(), 'bso': None, 'pref': None }, } if not knownfunctions[vectortype]['pref'] or not hipparchia.config[ knownfunctions[vectortype]['pref']]: return json.dumps('this type of search has not been enabled') bso = knownfunctions[vectortype]['bso'] so = None if len(bso) == 4: so = buildtriplelemmasearchobject(*bso) if len(bso) == 2: so = buildsinglelemmasearchobject(*bso) so.vectorquerytype = vectortype progresspolldict[pollid] = ProgressPoll(pollid) so.poll = progresspolldict[pollid] so.poll.activate() so.poll.statusis('Preparing to vectorize') if hipparchia.config['EXTERNALVECTORHELPER']: j = externalvectors(so) else: j = pythonvectors(so) if hipparchia.config['JSONDEBUGMODE']: print('/vectors/{f}\n\t{j}'.format(f=vectortype, j=j)) try: del so.poll except AttributeError: pass return j
def infogetter(fnc: str, one=None, two=None, three=None) -> JSON_STR: """ dispatcher for "/get/json" requests """ one = depunct(one) two = depunct(two) three = depunct(three, allowedpunctuationsting='.|') knownfunctions = { 'sessionvariables': { 'fnc': getsessionvariables, 'param': None }, 'worksof': { 'fnc': findtheworksof, 'param': [one] }, 'workstructure': { 'fnc': findworkstructure, 'param': [one, two, three] }, 'samplecitation': { 'fnc': sampleworkcitation, 'param': [one, two] }, 'authorinfo': { 'fnc': getauthinfo, 'param': [one] }, 'searchlistcontents': { 'fnc': getsearchlistcontents, 'param': None }, 'genrelistcontents': { 'fnc': getgenrelistcontents, 'param': None }, 'vectorranges': { 'fnc': returnvectorsettingsranges, 'param': None }, 'helpdata': { 'fnc': loadhelpdata, 'param': None }, } if fnc not in knownfunctions: return json.dumps(str()) f = knownfunctions[fnc]['fnc'] p = knownfunctions[fnc]['param'] if p: j = f(*p) else: j = f() if hipparchia.config['JSONDEBUGMODE']: print('/get/json/{f}\n\t{j}'.format(f=fnc, j=j)) return j
def dictsearch(searchterm) -> JSON_STR: """ look up words return dictionary entries json packing :return: """ returndict = dict() searchterm = searchterm[:hipparchia.config['MAXIMUMLEXICALLENGTH']] probeforsessionvariables() dbconnection = ConnectionObject() dbcursor = dbconnection.cursor() if hipparchia.config['UNIVERSALASSUMESBETACODE']: searchterm = replacegreekbetacode(searchterm.upper()) allowedpunct = '^$.' seeking = depunct(searchterm, allowedpunct) seeking = seeking.lower() seeking = re.sub('[σς]', 'ϲ', seeking) stripped = stripaccents(seeking) # don't turn 'injurius' into '[iiII]n[iiII][uuVV]r[iiII][uuVV]s' # that will happen if you call stripaccents() prematurely stripped = re.sub(r'[uv]', '[uvUV]', stripped) stripped = re.sub(r'[ij]', '[ijIJ]', stripped) if re.search(r'[a-z]', seeking): usedictionary = 'latin' usecolumn = 'entry_name' else: usedictionary = 'greek' usecolumn = 'unaccented_entry' if not session['available'][usedictionary + '_dictionary']: returndict['newhtml'] = 'cannot look up {w}: {d} dictionary is not installed'.format(d=usedictionary, w=seeking) return json.dumps(returndict) if not session['available'][usedictionary + '_dictionary']: returndict['newhtml'] = 'cannot look up {w}: {d} dictionary is not installed'.format(d=usedictionary, w=seeking) return json.dumps(returndict) limit = hipparchia.config['CAPONDICTIONARYFINDS'] foundtuples = headwordsearch(stripped, limit, usedictionary, usecolumn) # example: # results are presorted by ID# via the postgres query # foundentries [('scrofa¹', 43118), ('scrofinus', 43120), ('scrofipascus', 43121), ('Scrofa²', 43119), ('scrofulae', 43122)] returnlist = list() if len(foundtuples) == limit: returnlist.append('[stopped searching after {lim} finds]<br>'.format(lim=limit)) if len(foundtuples) > 0: if len(foundtuples) == 1: # sending '0' to browserdictionarylookup() will hide the count number usecounter = False else: usecounter = True wordobjects = [probedictionary(setdictionarylanguage(f[0]) + '_dictionary', 'entry_name', f[0], '=', dbcursor=dbcursor, trialnumber=0) for f in foundtuples] wordobjects = flattenlistoflists(wordobjects) outputobjects = [lexicalOutputObject(w) for w in wordobjects] # very top: list the finds if usecounter: findstemplate = '({n}) <a class="nounderline" href="#{w}_{wdid}">{w}</a>' findslist = [findstemplate.format(n=f[0]+1, w=f[1][0], wdid=f[1][1]) for f in enumerate(foundtuples)] returnlist.append('\n<br>\n'.join(findslist)) # the actual entries count = 0 for oo in outputobjects: count += 1 if usecounter: entry = oo.generatelexicaloutput(countervalue=count) else: entry = oo.generatelexicaloutput() returnlist.append(entry) else: returnlist.append('[nothing found]') if session['zaplunates']: returnlist = [attemptsigmadifferentiation(x) for x in returnlist] returnlist = [abbreviatedsigmarestoration(x) for x in returnlist] returndict['newhtml'] = '\n'.join(returnlist) returndict['newjs'] = '\n'.join([dictionaryentryjs(), insertlexicalbrowserjs()]) jsondict = json.dumps(returndict) dbconnection.connectioncleanup() return jsondict
def knownforms(language, lexicalid, xrefid, headword) -> JSON_STR: """ display all known forms of... you are supposed to be sent here via the principle parts click from a lexical entry this means you have access to a BaseFormMorphology() object that is how/why you know the paramaters already :param xrefid: :return: """ # sanitize all input... headword = headword[:hipparchia.config['MAXIMUMLEXICALLENGTH']] knownlanguages = ['greek', 'latin'] if language not in knownlanguages: language = 'greek' try: lexicalid = str(float(lexicalid)) except ValueError: lexicalid = 'invalid_user_input' try: xrefid = str(int(xrefid)) except ValueError: xrefid = 'invalid_user_input' headword = depunct(headword) headword = re.sub(r'[σς]', 'ϲ', headword) try: bfo = BaseFormMorphology(headword, xrefid, language, lexicalid, session) except: consolewarning('could not initialize BaseFormMorphology() object') return 'could not initialize BaseFormMorphology() object' # if this is active a click on the word will do a lemmatized lookup of it # topofoutput = """ # <div class="center"> # <span class="verylarge">All known forms of <lemmatizable headform="{w}">{w}</lemmatizable></span> # </div> # """ # if this is active a clock on the word will return you to the dictionary entry for it topofoutput = """ <div class="center"> <span class="verylarge">All known forms of <dictionaryidsearch entryid="{eid}" language="{lg}">{w}</dictionaryidsearch></span> </div> """ returnarray = list() if bfo.iammostlyconjugated(): returnarray.append(topofoutput.format(w=bfo.headword, eid=bfo.lexicalid, lg=bfo.language)) returnarray = returnarray + bfo.buildhtmlverbtablerows(session) if bfo.iamdeclined(): returnarray.append(topofoutput.format(w=bfo.headword, eid=bfo.lexicalid, lg=bfo.language)) returnarray = returnarray + bfo.buildhtmldeclinedtablerows() returndict = dict() returndict['newhtml'] = '\n'.join(returnarray) returndict['newjs'] = morphologychartjs() if session['zaplunates']: returndict['newhtml'] = attemptsigmadifferentiation(returndict['newhtml']) returndict['newhtml'] = abbreviatedsigmarestoration(returndict['newhtml']) jsondict = json.dumps(returndict) return jsondict
def reverselexiconsearch(searchid, searchterm) -> JSON_STR: """ attempt to find all of the greek/latin dictionary entries that might go with the english search term 'ape' will drive this crazy; what is needed is a lookup for only the senses this can be built into the dictionary :param searchid: :param searchterm: :return: """ searchterm = searchterm[:hipparchia.config['MAXIMUMLEXICALLENGTH']] pollid = validatepollid(searchid) progresspolldict[pollid] = ProgressPoll(pollid) activepoll = progresspolldict[pollid] activepoll.activate() activepoll.statusis('Searching lexical entries for "{t}"'.format(t=searchterm)) probeforsessionvariables() returndict = dict() returnarray = list() seeking = depunct(searchterm) if justlatin(): searchunder = [('latin', 'hi')] elif justtlg(): searchunder = [('greek', 'tr')] else: searchunder = [('greek', 'tr'), ('latin', 'hi')] limit = hipparchia.config['CAPONDICTIONARYFINDS'] entriestuples = list() for s in searchunder: usedict = s[0] translationlabel = s[1] # first see if your term is mentioned at all wordobjects = reversedictionarylookup(seeking, usedict, limit) entriestuples += [(w.entry, w.id) for w in wordobjects] if len(entriestuples) == limit: returnarray.append('[stopped searching after {lim} finds]\n<br>\n'.format(lim=limit)) entriestuples = list(set(entriestuples)) unsortedentries = [(querytotalwordcounts(e[0]), e[0], e[1]) for e in entriestuples] entries = list() for e in unsortedentries: hwcountobject = e[0] term = e[1] idval = e[2] if hwcountobject: entries.append((hwcountobject.t, term, idval)) else: entries.append((0, term, idval)) entries = sorted(entries, reverse=True) entriestuples = [(e[1], e[2]) for e in entries] # now we retrieve and format the entries if entriestuples: # summary of entry values first countobjectdict = {e: querytotalwordcounts(e[0]) for e in entriestuples} summary = list() count = 0 for c in countobjectdict.keys(): count += 1 try: totalhits = countobjectdict[c].t except: totalhits = 0 # c[0]: the word; c[1]: the id summary.append((count, c[0], c[1], totalhits)) summarytemplate = """ <span class="sensesum">({n}) <a class="nounderline" href="#{w}_{wdid}">{w}</a> <span class="small">({t:,})</span> </span> """ summary = sorted(summary, key=lambda x: x[3], reverse=True) summary = [summarytemplate.format(n=e[0], w=e[1], wdid=e[2], t=e[3]) for e in summary] returnarray.append('\n<br />\n'.join(summary)) # then the entries proper dbconnection = ConnectionObject() dbconnection.setautocommit() dbcursor = dbconnection.cursor() wordobjects = [probedictionary(setdictionarylanguage(e[0]) + '_dictionary', 'entry_name', e[0], '=', dbcursor=dbcursor, trialnumber=0) for e in entriestuples] wordobjects = flattenlistoflists(wordobjects) outputobjects = [lexicalOutputObject(w) for w in wordobjects] if len(outputobjects) > 1: usecounter = True else: usecounter = False count = 0 for oo in outputobjects: count += 1 if usecounter: entry = oo.generatelexicaloutput(countervalue=count) else: entry = oo.generatelexicaloutput() returnarray.append(entry) else: returnarray.append('<br />[nothing found under "{skg}"]'.format(skg=seeking)) returndict['newhtml'] = '\n'.join(returnarray) returndict['newjs'] = '\n'.join([dictionaryentryjs(), insertlexicalbrowserjs()]) jsondict = json.dumps(returndict) del progresspolldict[pollid] return jsondict
def findbyform(observedword, authorid=None) -> JSON_STR: """ this function sets of a chain of other functions find dictionary form find the other possible forms look up the dictionary form return a formatted set of info :return: """ if authorid and authorid not in authordict: authorid = None observedword = observedword[:hipparchia.config['MAXIMUMLEXICALLENGTH']] probeforsessionvariables() dbconnection = ConnectionObject() dbcursor = dbconnection.cursor() sanitationerror = '[empty search: <span class="emph">{w}</span> was sanitized into nothingness]' dberror = '<br />[the {lang} morphology data has not been installed]' notfounderror = '<br />[could not find a match for <span class="emph">{cw}</span> in the morphology table]' nodataerror = '<br /><br />no prevalence data for {w}' # the next is pointless because: 'po/lemon' will generate a URL '/parse/po/lemon' # that will 404 before you can get to replacegreekbetacode() # this is a bug in the interaction between Flask and the JS # if hipparchia.config['UNIVERSALASSUMESBETACODE']: # observedword = replacegreekbetacode(observedword.upper()) # the next makes sense only in the context of pointedly invalid input w = depunct(observedword) w = w.strip() w = tidyupterm(w) w = re.sub(r'[σς]', 'ϲ', w) # python seems to know how to do this with greek... w = w.lower() retainedgravity = w cleanedword = removegravity(retainedgravity) # index clicks will send you things like 'αὖ²' cleanedword = re.sub(r'[⁰¹²³⁴⁵⁶⁷⁸⁹]', str(), cleanedword) # the search syntax is '=' and not '~', so the next should be avoided unless a lot of refactoring will happen # cleanedword = re.sub(r'[uv]', r'[uv]', cleanedword) # cleanedword = re.sub(r'[ij]', r'[ij]', cleanedword) # a collection of HTML items that the JS will just dump out later; i.e. a sort of pseudo-page returndict = dict() try: cleanedword[0] except IndexError: returndict['newhtml'] = sanitationerror.format(w=observedword) return json.dumps(returndict) isgreek = True if re.search(r'[a-z]', cleanedword[0]): cleanedword = stripaccents(cleanedword) isgreek = False morphologyobject = lookformorphologymatches(cleanedword, dbcursor) # print('findbyform() mm',morphologyobject.getpossible()[0].transandanal) # φέρεται --> morphologymatches [('<possibility_1>', '1', 'φέρω', '122883104', '<transl>fero</transl><analysis>pres ind mp 3rd sg</analysis>')] if morphologyobject: oo = multipleWordOutputObject(cleanedword, morphologyobject, authorid) returndict['newhtml'] = oo.generateoutput() else: newhtml = list() if isgreek and not session['available']['greek_morphology']: newhtml.append(dberror.format(lang='Greek')) elif not isgreek and not session['available']['latin_morphology']: newhtml.append(dberror.format(lang='Latin')) else: newhtml.append(notfounderror.format(cw=cleanedword)) prev = getobservedwordprevalencedata(cleanedword) if not prev: newhtml.append(getobservedwordprevalencedata(retainedgravity)) if not prev: newhtml.append(nodataerror.format(w=retainedgravity)) else: newhtml.append(prev) try: returndict['newhtml'] = '\n'.join(newhtml) except TypeError: returndict['newhtml'] = '[nothing found]' returndict['newjs'] = '\n'.join([dictionaryentryjs(), insertlexicalbrowserjs()]) jsondict = json.dumps(returndict) dbconnection.connectioncleanup() return jsondict
def selectionmade(requestargs: MultiDict) -> JSON_STR: """ once a choice is made, parse and register it inside session['selections'] then return the human readable version of the same for display on the page '_AT_' syntax is used to restrict the scope of a search "GET /selection/make/_?auth=lt0474&work=001&locus=13|4&endpoint= HTTP/1.1" request.args ImmutableMultiDict([('auth', 'lt0474'), ('work', '001'), ('locus', '13|4'), ('endpoint', '')]) "GET /selection/make/_?auth=lt0474&work=001&locus=10&endpoint=20&raw=t HTTP/1.1" request.args ImmutableMultiDict([('auth', 'lt0474'), ('work', '001'), ('locus', '10'), ('endpoint', '20'), ('raw', 't')]) "GET /selection/make/_?auth=lt0474&work=001&exclude=t HTTP/1.1" request.args ImmutableMultiDict([('auth', 'lt0474'), ('work', '001'), ('exclude', 't')]) :return: """ probeforsessionvariables() uid = depunct(requestargs.get('auth', str())) workid = depunct(requestargs.get('work', str())) genre = depunct(requestargs.get('genre', str())) auloc = depunct(requestargs.get('auloc', str())) rawdataentry = re.sub('[^tf]', str(), requestargs.get('raw', str())) exclude = re.sub('[^tf]', str(), requestargs.get('exclude', str())) allowedpunct = '|,.' locus = depunct(requestargs.get('locus', str()), allowedpunct) endpoint = depunct(requestargs.get('endpoint', str()), allowedpunct) allowedpunct = '.-?():' wkprov = depunct(requestargs.get('wkprov', str()), allowedpunct) allowedpunct = '.' wkgenre = depunct(requestargs.get('wkgenre', str()), allowedpunct) if exclude != 't': suffix = 'selections' other = 'exclusions' else: suffix = 'exclusions' other = 'selections' if rawdataentry == 't': locus = re.sub(r'\.', '|', locus) endpoint = re.sub(r'\.', '|', endpoint) # the selection box might contain stale info if you deselect a corpus while items are still in the box uid = selectionisactive(uid) if genre and genre not in returnactivelist(authorgenresdict): genre = str() if wkgenre and wkgenre not in returnactivelist(workgenresdict): wkgenre = str() if auloc and auloc not in returnactivelist(authorlocationdict): auloc = str() if wkprov and wkprov not in returnactivelist(workprovenancedict): wkprov = str() # you have validated the input, now do something with it... if uid and workid and locus and endpoint: # a span in an author: 3 verrine orations, e.g. [note that the selection is 'greedy': 1start - 3end] # http://127.0.0.1:5000/makeselection?auth=lt0474&work=005&locus=2|1&endpoint=2|3 # convert this into a 'firstline' through 'lastline' format emptycursor = None workobject = None try: workobject = workdict['{a}w{b}'.format(a=uid, b=workid)] except KeyError: consolewarning('"/selection/make/" sent a bad workuniversalid: {a}w{b}'.format(a=uid, b=workid)) start = locus.split('|') stop = endpoint.split('|') start.reverse() stop.reverse() if workobject: firstline = finddblinefromincompletelocus(workobject, start, emptycursor) lastline = finddblinefromincompletelocus(workobject, stop, emptycursor, findlastline=True) citationtemplate = '{a}w{b}_FROM_{c}_TO_{d}' if firstline['code'] == 'success' and lastline['code'] == 'success': fl = firstline['line'] ll = lastline['line'] loc = citationtemplate.format(a=uid, b=workid, c=fl, d=ll) # print('span selected:', loc) # span selected: lt0474w005_FROM_4501_TO_11915 # Cicero, In Verrem: 2.1.t.1 # Cicero, In Verrem: 2.3.228.15 if ll > fl: session['psg' + suffix].append(loc) session['psg' + suffix] = tidyuplist(session['psg' + suffix]) else: msg = '"makeselection/" sent a firstline greater than the lastine value: {a} > {b} [{c}; {d}]' consolewarning(msg.format(a=fl, b=ll, c=locus, d=endpoint)) rationalizeselections(loc, suffix) else: msg = '"makeselection/" could not find first and last: {a}w{b} - {c} TO {d}' consolewarning(msg.format(a=uid, b=workid, c=locus, d=endpoint)) elif uid and workid and locus: # a specific passage session['psg' + suffix].append(uid + 'w' + workid + '_AT_' + locus) session['psg' + suffix] = tidyuplist(session['psg' + suffix]) rationalizeselections(uid + 'w' + workid + '_AT_' + locus, suffix) elif uid and workid: # a specific work session['wk' + suffix].append(uid + 'w' + workid) session['wk' + suffix] = tidyuplist(session['wk' + suffix]) rationalizeselections(uid + 'w' + workid, suffix) elif uid and not workid: # a specific author session['au' + suffix].append(uid) session['au' + suffix] = tidyuplist(session['au' + suffix]) rationalizeselections(uid, suffix) # if vs elif: allow multiple simultaneous instance if genre: # add to the +/- genre list and then subtract from the -/+ list session['agn' + suffix].append(genre) session['agn' + suffix] = tidyuplist(session['agn' + suffix]) session['agn' + other] = dropdupes(session['agn' + other], session['agn' + suffix]) if wkgenre: # add to the +/- genre list and then subtract from the -/+ list session['wkgn' + suffix].append(wkgenre) session['wkgn' + suffix] = tidyuplist(session['wkgn' + suffix]) session['wkgn' + other] = dropdupes(session['wkgn' + other], session['wkgn' + suffix]) if auloc: # add to the +/- locations list and then subtract from the -/+ list session['aloc' + suffix].append(auloc) session['aloc' + suffix] = tidyuplist(session['aloc' + suffix]) session['aloc' + other] = dropdupes(session['aloc' + other], session['aloc' + suffix]) if wkprov: # add to the +/- locations list and then subtract from the -/+ list session['wloc' + suffix].append(wkprov) session['wloc' + suffix] = tidyuplist(session['wloc' + suffix]) session['wloc' + other] = dropdupes(session['wloc' + other], session['wloc' + suffix]) # after the update to the session, you need to update the page html to reflect the changes # print('session["psgselections"]=', session['psgselections']) # print('session["psgexclusions"]=', session['psgexclusions']) return getcurrentselections()