def write_input_json(self, is_update=False): generated_input_fn = pathify( [self.extn_dir, 'content', 'js', 'generated_input.js'], file_extension=True) generated_input_f = open(generated_input_fn, 'w') generated_input_f.write(""" /** * Generated by xpi builder on the fly when user downloads extension. */ function generated_input() { return """) data = { "constants_PD_URL": DOMAIN, "constants_PD_API_URL": API_DOMAIN, "is_update": is_update } print "DATA: ", data private_key = "__no_private_key_for_update__" if not is_update: recip_pcts = [] if self.recipient: recip_pcts.append({ "recipient_slug": self.recipient.slug, "percent": 1.0 }) private_key = self.generate_private_key() data["private_key"] = private_key data["preselected_charities"] = recip_pcts json.dump([data], generated_input_f) generated_input_f.write(""" } """) generated_input_f.close() # now build generated_javascript.js print "generate_javascript..." extn_bin = pathify([self.extn_dir, "content", "bin"]) sys.path.append(extn_bin) import safe_globals input_file = pathify(['content', 'overlay_list.txt'], file_extension=True) output_file = pathify(['content', 'js', 'generated_javascript.js'], file_extension=True) safe_globals.concatenate(self.extn_dir, input_file, output_file) print "...done" return private_key
def write_input_json(self, is_update=False): generated_input_fn = pathify([self.extn_dir, 'content', 'js', 'generated_input.js'], file_extension=True) generated_input_f = open(generated_input_fn, 'w') generated_input_f.write(""" /** * Generated by xpi builder on the fly when user downloads extension. */ function generated_input() { return """); data = {"constants_PD_URL": DOMAIN, "constants_PD_API_URL": API_DOMAIN, "is_update": is_update} print "DATA: ", data private_key = "__no_private_key_for_update__" if not is_update: recip_pcts = [] if self.recipient: recip_pcts.append({"recipient_slug": self.recipient.slug, "percent": 1.0}) private_key = self.generate_private_key() data["private_key"] = private_key data["preselected_charities"] = recip_pcts json.dump([data], generated_input_f) generated_input_f.write(""" } """); generated_input_f.close() # now build generated_javascript.js print "generate_javascript..." extn_bin = pathify([self.extn_dir, "content", "bin"]) sys.path.append(extn_bin) import safe_globals input_file = pathify(['content', 'overlay_list.txt'], file_extension=True) output_file = pathify(['content', 'js', 'generated_javascript.js'], file_extension=True) safe_globals.concatenate(self.extn_dir, input_file, output_file) print "...done" return private_key
def generate_xpi(request, slug): if not hasattr(settings, "MAX_USERS") or User.objects.count() < settings.MAX_USERS: recipient = slug != '__none__' and Recipient.get_or_none( slug=slug) or None xpi_builder = XpiBuilder( pathify([PROJECT_PATH, 'procrasdonate', 'ProcrasDonateFFExtn'], file_extension=True), "%s%s" % (MEDIA_ROOT, 'xpi'), "%s%s" % (MEDIA_ROOT, 'rdf'), recipient) private_key = xpi_builder.write_input_json(is_update=False) user = User.add(private_key) Log.Log("Built XPI for download", detail="usage", user=user) (xpi_url, xpi_hash) = xpi_builder.build_xpi(is_update=False) return json_success({ 'xpi_url': xpi_url, 'xpi_hash': xpi_hash, 'wait_list': False, 'wait_list_url': reverse('waitlist') }) else: return json_success({ 'xpi_url': None, 'xpi_hash': None, 'wait_list': True, 'wait_list_url': reverse('waitlist') })
def get_update_info(self): rdf_fn = pathify([self.update_dir, 'update.rdf'], file_extension=True) rdf_f = open(rdf_fn, 'r') rdf = rdf_f.read() rdf_f.close() d = ConvertXmlToDict(rdf) a = '{http://www.w3.org/1999/02/22-rdf-syntax-ns#}RDF' b = '{http://www.w3.org/1999/02/22-rdf-syntax-ns#}Description' u1 = '{http://www.mozilla.org/2004/em-rdf#}updates' u2 = '{http://www.w3.org/1999/02/22-rdf-syntax-ns#}Seq' u3 = '{http://www.w3.org/1999/02/22-rdf-syntax-ns#}li' u4 = '{http://www.w3.org/1999/02/22-rdf-syntax-ns#}Description' u5 = '{http://www.mozilla.org/2004/em-rdf#}version' u6 = '{http://www.mozilla.org/2004/em-rdf#}targetApplication' u7 = '{http://www.w3.org/1999/02/22-rdf-syntax-ns#}Description' x = '{http://www.mozilla.org/2004/em-rdf#}updateLink' h = '{http://www.mozilla.org/2004/em-rdf#}updateHash' """ print "="*30 print d[a][b][u1][u2][u3][u4][u5] print d[a][b][u1][u2][u3][u4][u6][u7].keys() print d[a][b][u1][u2][u3][u4][u6][u7][x] print d[a][b][u1][u2][u3][u4][u6][u7][h] """ return { 'update_link': d[a][b][u1][u2][u3][u4][u6][u7][x], 'update_hash': d[a][b][u1][u2][u3][u4][u6][u7][h] }
def get_update_info(self): rdf_fn = pathify([self.update_dir, 'update.rdf'], file_extension=True) rdf_f = open(rdf_fn, 'r') rdf = rdf_f.read() rdf_f.close() d = ConvertXmlToDict(rdf) a = '{http://www.w3.org/1999/02/22-rdf-syntax-ns#}RDF' b = '{http://www.w3.org/1999/02/22-rdf-syntax-ns#}Description' u1 = '{http://www.mozilla.org/2004/em-rdf#}updates' u2 = '{http://www.w3.org/1999/02/22-rdf-syntax-ns#}Seq' u3 = '{http://www.w3.org/1999/02/22-rdf-syntax-ns#}li' u4 = '{http://www.w3.org/1999/02/22-rdf-syntax-ns#}Description' u5 = '{http://www.mozilla.org/2004/em-rdf#}version' u6 = '{http://www.mozilla.org/2004/em-rdf#}targetApplication' u7 = '{http://www.w3.org/1999/02/22-rdf-syntax-ns#}Description' x = '{http://www.mozilla.org/2004/em-rdf#}updateLink' h = '{http://www.mozilla.org/2004/em-rdf#}updateHash' """ print "="*30 print d[a][b][u1][u2][u3][u4][u5] print d[a][b][u1][u2][u3][u4][u6][u7].keys() print d[a][b][u1][u2][u3][u4][u6][u7][x] print d[a][b][u1][u2][u3][u4][u6][u7][h] """ return {'update_link': d[a][b][u1][u2][u3][u4][u6][u7][x], 'update_hash': d[a][b][u1][u2][u3][u4][u6][u7][h]}
def build_xpi(self, is_update=False): if is_update: name = "Update" elif self.recipient: name = self.recipient.slug else: name = "Generic" print "THE NAME", name, self.recipient xpi_nm = 'ProcrasDonate_%s_%s.xpi' % (name, self.get_version()) xpi_fn = pathify([self.xpi_dir, xpi_nm], file_extension=True) os.chdir(self.extn_dir) print print "XPI_FN", xpi_fn print "EXTN_DIR", self.extn_dir print os.popen('zip -r "%s" *' % xpi_fn) print xpi_url = "%s%s/%s" % (MEDIA_URL, 'xpi', xpi_nm) xpi_file = open(xpi_fn, 'rb') xpi_hash = "sha1:%s" % hashlib.sha1(xpi_file.read()).hexdigest() xpi_file.close() if is_update: self.update_updates_rdf(xpi_url, xpi_hash) return (xpi_url, xpi_hash)
def download_update(request): """ Not called by anyone yet. """ xpi_builder = XpiBuilder(settings.pathify([settings.PROJECT_PATH, 'procrasdonate', 'ProcrasDonateFFExtn'], file_extension=True), "%s%s" % (settings.MEDIA_ROOT, 'xpi'), "%s%s" % (settings.MEDIA_ROOT, 'rdf')) info = xpi_builder.get_update_info() link = info['update_link'] hash = info['update_hash'] return render_response(request, 'procrasdonate/extension_pages/download_xpi.html', locals())
def update_updates_rdf(self, xpi_url, xpi_hash): current_version = self.get_version() update_version = self.get_update_version() rdf_fn = pathify([self.update_dir, 'update.rdf'], file_extension=True) read_rdf_f = open(rdf_fn, 'r') write_lines = [] for line in read_rdf_f.readlines(): line = VERSION_RE.sub("\g<1>%s\g<3>" % current_version, line) line = XPI_URL_RE.sub("\g<1>%s%s\g<3>" % (DOMAIN, xpi_url), line) line = XPI_HASH_RE.sub("\g<1>%s\g<3>" % xpi_hash, line) write_lines.append(line) read_rdf_f.close() write_rdf_f = open(rdf_fn, 'w') for line in write_lines: write_rdf_f.write(line) write_rdf_f.close()
def generate_xpi(request, slug): if not hasattr(settings, "MAX_USERS") or User.objects.count() < settings.MAX_USERS: recipient = slug != "__none__" and Recipient.get_or_none(slug=slug) or None xpi_builder = XpiBuilder( pathify([PROJECT_PATH, "procrasdonate", "ProcrasDonateFFExtn"], file_extension=True), "%s%s" % (MEDIA_ROOT, "xpi"), "%s%s" % (MEDIA_ROOT, "rdf"), recipient, ) private_key = xpi_builder.write_input_json(is_update=False) user = User.add(private_key) Log.Log("Built XPI for download", detail="usage", user=user) (xpi_url, xpi_hash) = xpi_builder.build_xpi(is_update=False) return json_success( {"xpi_url": xpi_url, "xpi_hash": xpi_hash, "wait_list": False, "wait_list_url": reverse("waitlist")} ) else: return json_success( {"xpi_url": None, "xpi_hash": None, "wait_list": True, "wait_list_url": reverse("waitlist")} )
def _get_version(self, dir, name): rdf_fn = pathify([dir, name], file_extension=True) rdf_f = open(rdf_fn, 'r') rdf = rdf_f.read() rdf_f.close() d = ConvertXmlToDict(rdf) a = '{http://www.w3.org/1999/02/22-rdf-syntax-ns#}RDF' b = '{http://www.w3.org/1999/02/22-rdf-syntax-ns#}Description' if name == 'install.rdf': c = "{http://www.mozilla.org/2004/em-rdf#}version" return d[a][b][c] else: u1 = '{http://www.mozilla.org/2004/em-rdf#}updates' u2 = '{http://www.w3.org/1999/02/22-rdf-syntax-ns#}Seq' u3 = '{http://www.w3.org/1999/02/22-rdf-syntax-ns#}li' u4 = '{http://www.w3.org/1999/02/22-rdf-syntax-ns#}Description' u5 = '{http://www.mozilla.org/2004/em-rdf#}version' print "=" * 30 print d[a][b][u1][u2][u3][u4].keys() return d[a][b][u1][u2][u3][u4][u5]
def _get_version(self, dir, name): rdf_fn = pathify([dir, name], file_extension=True) rdf_f = open(rdf_fn, 'r') rdf = rdf_f.read() rdf_f.close() d = ConvertXmlToDict(rdf) a = '{http://www.w3.org/1999/02/22-rdf-syntax-ns#}RDF' b = '{http://www.w3.org/1999/02/22-rdf-syntax-ns#}Description' if name == 'install.rdf': c = "{http://www.mozilla.org/2004/em-rdf#}version" return d[a][b][c] else: u1 = '{http://www.mozilla.org/2004/em-rdf#}updates' u2 = '{http://www.w3.org/1999/02/22-rdf-syntax-ns#}Seq' u3 = '{http://www.w3.org/1999/02/22-rdf-syntax-ns#}li' u4 = '{http://www.w3.org/1999/02/22-rdf-syntax-ns#}Description' u5 = '{http://www.mozilla.org/2004/em-rdf#}version' print "="*30 print d[a][b][u1][u2][u3][u4].keys() return d[a][b][u1][u2][u3][u4][u5]
if settings.DEBUG or settings.DJANGO_SERVER: urlpatterns += patterns('', (r'^media/(?P<path>.*)$', 'django.views.static.serve', {'document_root': settings.MEDIA_ROOT, 'show_indexes':True}), ) urlpatterns += patterns('', # if you have a static intro that still works when database is offline #(r'^', include('app.views.main_urls')), ) if settings.DOWN_FOR_MAINTENANCE: urlpatterns += patterns('', (r'^.*', 'django.views.generic.simple.direct_to_template', { 'template': 'data_munger/down_for_maintenance.html' }), ) for app in settings.APPS: if os.path.exists(pathify([settings.PROJECT_PATH, app, 'views'])): urlpatterns += patterns('', (r'^%s/' % app, include('%s.views.root_urls' % app)), ) # start page urlpatterns += patterns('', (r'^$', include('twitter.views.root_urls')), ) urlpatterns += patterns('', (r'^admin/doc/', include('django.contrib.admindocs.urls')), (r'^admin/(.*)', admin.site.root), )
if __name__ == "__main__": is_update = True slug = '__none__' recipient = None for arg in sys.argv: if arg == '--install': is_update = False slug_idx = sys.argv.index(arg) + 1 if len(sys.argv) > slug_idx: slug = sys.argv[slug_idx] recipient = slug != '__none__' and Recipient.get_or_none( slug=slug) or None xpi_builder = XpiBuilder( pathify([PROJECT_PATH, 'procrasdonate', 'ProcrasDonateFFExtn']), "%s%s" % (MEDIA_ROOT, 'xpi'), "%s%s" % (MEDIA_ROOT, 'rdf'), recipient) private_key = xpi_builder.write_input_json(is_update=is_update) if not is_update: user = User.add(private_key) Log.Log("Built XPI for download", detail="usage", user=user) print "private_key", private_key (xpi_url, xpi_hash) = xpi_builder.build_xpi(is_update=is_update) print "xpi_url", xpi_url print "xpi_hash", xpi_hash if is_update: print "update info", xpi_builder.get_update_info()
write_rdf_f.write(line) write_rdf_f.close() if __name__ == "__main__": is_update = True slug = '__none__' recipient = None for arg in sys.argv: if arg == '--install': is_update = False slug_idx = sys.argv.index(arg) + 1 if len(sys.argv) > slug_idx: slug = sys.argv[slug_idx] recipient = slug != '__none__' and Recipient.get_or_none(slug=slug) or None xpi_builder = XpiBuilder(pathify([PROJECT_PATH, 'procrasdonate', 'ProcrasDonateFFExtn']), "%s%s" % (MEDIA_ROOT, 'xpi'), "%s%s" % (MEDIA_ROOT, 'rdf'), recipient) private_key = xpi_builder.write_input_json(is_update=is_update) if not is_update: user = User.add(private_key) Log.Log("Built XPI for download", detail="usage", user=user) print "private_key", private_key (xpi_url, xpi_hash) = xpi_builder.build_xpi(is_update=is_update) print "xpi_url", xpi_url print "xpi_hash", xpi_hash if is_update: print "update info", xpi_builder.get_update_info()
from django.conf.urls.defaults import * from django.contrib import admin admin.autodiscover() urlpatterns = patterns("") urlpatterns += patterns( "", (r"^admin/doc/", include("django.contrib.admindocs.urls")), url(r"^admin/(.*)", admin.site.root, name="admin"), (r"^", include("flashcards.views.urls")), ) if settings.DJANGO_SERVER: urlpatterns += patterns( "", ( r"^%s(?P<path>.*)$" % settings.MEDIA_URL[1:], "django.views.static.serve", {"document_root": settings.MEDIA_ROOT, "show_indexes": True}, ), ) CUSTOM_URLS_APPS = ("flashcards",) for app in settings.APPS: if app not in CUSTOM_URLS_APPS and os.path.exists(pathify([settings.PROJECT_PATH, app, "views"])): urlpatterns += patterns("", (r"^%s/" % app, include("%s.views.urls" % app)))
f = open(root+os.sep+input_file, 'r') for line in f.readlines(): line = line.strip() if not line or line.startswith("#"): continue js.write("\n\n/**************** %s *****************/\n" % line) shutil.copyfileobj(open(root+os.sep+line, 'r'), js) f.close() js.write("return myOverlay\n"); js.write("})();\n"); js.close() if __name__=="__main__": root = pathify([PROJECT_PATH, 'procrasdonate', 'ProcrasDonateFFExtn']) input_file = pathify(['content', 'overlay_list.txt'], file_extension=True) output_file = pathify(['content', 'js', 'generated_javascript.js'], file_extension=True) concatenate(root, input_file, output_file) """ # procrasdonate files input_file = pathify(['content', 'overlay_procrasdonate.txt'], file_extension=True) output_file = pathify(['content', 'js', 'generated_pd_javascript.js'], file_extension=True) concatenate(root, input_file, output_file) # externals input_file = pathify(['content', 'overlay_externals.txt'], file_extension=True) output_file = pathify(['content', 'js', 'generated_externals_javascript.js'], file_extension=True)
def return_data(request): """ sends back: * data for particular user * latest extension version """ if not settings.DJANGO_SERVER and not request.is_secure(): message = "must secure data via HTTPS: request=%s" % request Log.Error(message, "request_error") return json_failure(message) errors = [] expected_parameters = ["private_key", "since"] response = extract_parameters(request, "GET", expected_parameters) if not response['success']: return json_failure("Something went wrong extracting parameters: %s" % response['reason']) since = response["parameters"]["since"] print "----SINCE----------" print json.dumps(since, indent=2) since = decode_time(since) print since private_key = response["parameters"]["private_key"] print "----private_key----------" print json.dumps(private_key, indent=2) user = User.get_or_none(private_key=private_key) print "---- USER ----" print user if not user: message = "unknown user: %s, request=%s" % (private_key, request) Log.Error(message, "unknown_user") return json_failure(message) recipients = [] #for recipient in Recipient.objects.filter(fpsrecipient__timestamp__gte=since): for recipient in Recipient.objects.all(): recipients.append(recipient.deep_dict()) multiuse_auths = [] has_success = False for multiuse_auth in FPSMultiuseAuth.objects.filter(user=user): multiuse_auths.append(multiuse_auth.deep_dict()) if multiuse_auth.good_to_go(): has_success = True if not has_success: pass #@TODO if not has_success, then ask Amazon for token in case pipeline completed by didn't make it back to server yet. multiuse_pays = [] for multiuse_pay in FPSMultiusePay.objects.filter(user=user): multiuse_pays.append(multiuse_pay.deep_dict()) meta_reports = [] for meta_report in MetaReport.objects.filter(is_draft=False): meta_reports.append(meta_report.deep_dict()) #print '#.'*30; #print "RETURN DATA RETURNED" #print json.dumps({'recipients': recipients, # 'multiuse_auths': multiuse_auths}, indent=2) #print '#.'*30; xpi_builder = XpiBuilder( pathify([PROJECT_PATH, 'procrasdonate', 'ProcrasDonateFFExtn'], file_extension=True), "%s%s" % (MEDIA_ROOT, 'xpi'), "%s%s" % (MEDIA_ROOT, 'rdf')) info = xpi_builder.get_update_info() return json_success({ 'recipients': recipients, 'multiuse_auths': multiuse_auths, 'multiuse_pays': multiuse_pays, 'meta_reports': meta_reports, 'latest_update_version': xpi_builder.get_update_version(), 'update_link': info['update_link'], 'update_hash': info['update_hash'] })
f = open(root + os.sep + input_file, 'r') for line in f.readlines(): line = line.strip() if not line or line.startswith("#"): continue js.write("\n\n/**************** %s *****************/\n" % line) shutil.copyfileobj(open(root + os.sep + line, 'r'), js) f.close() js.write("return myOverlay\n") js.write("})();\n") js.close() if __name__ == "__main__": root = pathify([PROJECT_PATH, 'procrasdonate', 'ProcrasDonateFFExtn']) input_file = pathify(['content', 'overlay_list.txt'], file_extension=True) output_file = pathify(['content', 'js', 'generated_javascript.js'], file_extension=True) concatenate(root, input_file, output_file) """ # procrasdonate files input_file = pathify(['content', 'overlay_procrasdonate.txt'], file_extension=True) output_file = pathify(['content', 'js', 'generated_pd_javascript.js'], file_extension=True) concatenate(root, input_file, output_file) # externals input_file = pathify(['content', 'overlay_externals.txt'], file_extension=True) output_file = pathify(['content', 'js', 'generated_externals_javascript.js'], file_extension=True)
def return_data(request): """ sends back: * data for particular user * latest extension version """ if not settings.DJANGO_SERVER and not request.is_secure(): message = "must secure data via HTTPS: request=%s" % request Log.Error(message, "request_error") return json_failure(message) errors = [] expected_parameters = ["private_key", "since"] response = extract_parameters(request, "GET", expected_parameters) if not response["success"]: return json_failure("Something went wrong extracting parameters: %s" % response["reason"]) since = response["parameters"]["since"] print "----SINCE----------" print json.dumps(since, indent=2) since = decode_time(since) print since private_key = response["parameters"]["private_key"] print "----private_key----------" print json.dumps(private_key, indent=2) user = User.get_or_none(private_key=private_key) print "---- USER ----" print user if not user: message = "unknown user: %s, request=%s" % (private_key, request) Log.Error(message, "unknown_user") return json_failure(message) recipients = [] # for recipient in Recipient.objects.filter(fpsrecipient__timestamp__gte=since): for recipient in Recipient.objects.all(): recipients.append(recipient.deep_dict()) multiuse_auths = [] has_success = False for multiuse_auth in FPSMultiuseAuth.objects.filter(user=user): multiuse_auths.append(multiuse_auth.deep_dict()) if multiuse_auth.good_to_go(): has_success = True if not has_success: pass # @TODO if not has_success, then ask Amazon for token in case pipeline completed by didn't make it back to server yet. multiuse_pays = [] for multiuse_pay in FPSMultiusePay.objects.filter(user=user): multiuse_pays.append(multiuse_pay.deep_dict()) meta_reports = [] for meta_report in MetaReport.objects.filter(is_draft=False): meta_reports.append(meta_report.deep_dict()) # print '#.'*30; # print "RETURN DATA RETURNED" # print json.dumps({'recipients': recipients, # 'multiuse_auths': multiuse_auths}, indent=2) # print '#.'*30; xpi_builder = XpiBuilder( pathify([PROJECT_PATH, "procrasdonate", "ProcrasDonateFFExtn"], file_extension=True), "%s%s" % (MEDIA_ROOT, "xpi"), "%s%s" % (MEDIA_ROOT, "rdf"), ) info = xpi_builder.get_update_info() return json_success( { "recipients": recipients, "multiuse_auths": multiuse_auths, "multiuse_pays": multiuse_pays, "meta_reports": meta_reports, "latest_update_version": xpi_builder.get_update_version(), "update_link": info["update_link"], "update_hash": info["update_hash"], } )