def linking(json_sheet, post_id=None): if not post_id: post_id=json_sheet['id'] json_sheet_str = str(json_sheet).replace(u'\\\\', u'') url = get_server + "/api/sheets/{}".format(post_id) got = http_request(url, body={"apikey": API_KEY}, method="GET") compile = re.compile(u'(?P<aref><a.*?/sheets.*?>(?P<text>.*?)</a>)') for match in re.finditer(compile, str(got)): json_sheet_str = re.sub(match.group('text'), match.group('aref'), json_sheet_str) return eval(json_sheet_str)
def get_sheet(server, api_key, sheet_id=None, sheet_ssn=None): try: assert sheet_id or sheet_ssn except AssertionError: print " you must supply an id or ssn to get a sheet" return if sheet_ssn: sheet_id = mapping[int(sheet_ssn)] url = server + "/api/sheets/{}".format(sheet_id) got = http_request(url, body={"apikey": api_key}, method="GET") return got
def post_btween_sandboxes(ssn=None, map_ssn_url_post=None, map_ssn_url_get=None, id=None, id_post=None, new_post = False, pdf_action="", repost_to_local = False): if not ssn and not id: print "missing either a ssn or an sheet_id of the sheet to be posted" return if ssn: try: assert map_ssn_url_get and map_ssn_url_post except AssertionError: print "if using ssn you must give the corresponding maps" try: if not new_post: post_sheet_id = map_ssn_url_post[ssn] # the latest sheet with that ssn number we want to repost over get_sheet_id = map_ssn_url_get[ssn] # can also be from a ssn mapping from the correct server the fixed sheet that was created (sometimes from the old sheet itself), need to know it except KeyError: return else: # id - because if it is neither we returns already :) get_sheet_id = id if id_post: post_sheet_id = id_post else: post_sheet_id = id url = get_server + "/api/sheets/{}".format(get_sheet_id) got = http_request(url, body={"apikey": API_KEY}, method="GET") # got = linking(got, post_sheet_id) changed = change_sheet(got, add_tags=[], del_tags=["UI", 'int_tag', 'Hebrew Sheet', 'Bilingual', 'Edited', 'Linked', 'merged', 'QA', "Merged"], pdf_action=pdf_action) if new_post: del changed['id'] else: changed['id'] = post_sheet_id # # delete sheet in post sheet number - note: when it is reposting the sheet will only be in this script RAM so always!!! have a backup # delete_url = post_server + "/api/sheets/{}/delete".format(post_sheet_id) # response = http_request(delete_url, body={"apikey": API_KEY}) # if re.search("Sheet {} not found.".format(post_sheet_id), response): # del changed['id'] if repost_to_local: post_over_sheet = db.sheets.find_one({'id': post_sheet_id}) changed['_id'] = post_over_sheet['_id'] db.sheets.update({'_id': post_over_sheet["_id"]}, changed) else: del changed['_id'] #post the copyed sheet in there response = post_sheet(changed, server=POST_SERVER) if isinstance(response, unicode) and re.search('error', response): with open(u"nopost.html", 'a') as f: writer = f.write(response)
def find_bug(sheet_id, problems): sheet_id = sheet_id url = GET_SERVER + "/api/sheets/{}".format(sheet_id) got = http_request(url, body={"apikey": API_KEY}, method="GET") for s in got['sources']: if 'outsideBiText' in s.keys(): if re.search(compiled, s['outsideBiText']['he']): problems.append(sheet_id) break elif 'outsideText' in s.keys(): if re.search(compiled, s['outsideText']): problems.append(sheet_id) break print problems return problems
'Mishneh_Torah,_Vessels_of_the_Sanctuary_and_Those_who_Serve_Therein', 'Mishneh_Torah,_Things_Forbidden_on_the_Altar', 'Mishneh_Torah,_Daily_Offerings_and_Additional_Offerings', 'Mishneh_Torah,_Defilement_of_Foods', 'Mishneh_Torah,_Murderer_and_the_Preservation_of_Life', 'Mishneh_Torah,_Ownerless_Property_and_Gifts'] mapping = dict(zip(hebrew_titles, english_titles)) # Make a term # Maaseh Rokeah # מעשה רוקח add_term("Maaseh Rokeach", u"מעשה רקח", "commentary_works") # Make categories http_request(SEFARIA_SERVER + "/api/category", body={'apikey': API_KEY}, json_payload={"path":["Halakhah","Mishneh Torah","Commentary","Maaseh Rokeach"], "sharedTitle": "Maaseh Rokeach"}, method="POST") for sefer in sefarim: t = Term().load({"titles.text": sefer}) if not isinstance(t, Term): print u"ARGGG! {}".format(sefer) http_request(SEFARIA_SERVER + "/api/category", body={'apikey': API_KEY}, json_payload={"path": ["Halakhah", "Mishneh Torah", "Commentary", "Maaseh Rokeach", t.get_primary_title("en")], "sharedTitle": t.get_primary_title("en")}, method="POST") for name, data in processed.iteritems(): full_name = u"משנה תורה, " + name if not Ref.is_ref(full_name): full_name = mapping[name]
english_titles = ["Mishneh_Torah,_Rest_on_a_Holiday"] mapping = dict(zip(hebrew_titles, english_titles)) # Make a term # Maaseh Rokeach # מעשה רקח # add_term("Maaseh Rokeach", u"מעשה רקח", "commentary_works") # Make categories for sefer in sefarim: t = Term().load({"titles.text": sefer}) if not isinstance(t, Term): print u"ARGGG! {}".format(sefer) http_request(SEFARIA_SERVER + "/api/category", body={'apikey': API_KEY}, json_payload={"path": ["Halakhah", "Mishneh Torah", "Commentary", "Maaseh Rokeach", t.get_primary_title("en")], "sharedTitle": t.get_primary_title("en")}, method="POST") for name, data in processed.iteritems(): full_name = u"משנה תורה, " + name if not Ref.is_ref(full_name): full_name = mapping[name] r = Ref(full_name) print r.normal() short_base_title = r.normal().replace("Mishneh Torah, ", "") en_title = "Maaseh Rokeach on " + short_base_title he_title = u"מעשה רקח על " + name j = JaggedArrayNode()