def customize_query(request, query): if request.artist_id: query.filter('artist', db.Key(request.artist_id))
def post(self): """Handles POST requests, when a reviewer submits a review.""" student = self.personalize_page_and_get_enrolled() if not student: return if not self.assert_xsrf_token_or_fail(self.request, 'review-post'): return course = self.get_course() rp = course.get_reviews_processor() unit_id = self.request.get('unit_id') unit = self.find_unit_by_id(unit_id) if not unit or not course.needs_human_grader(unit): self.error(404) return review_step_key = self.request.get('key') if not review_step_key: self.error(404) return try: review_step_key = db.Key(encoded=review_step_key) review_step = rp.get_review_steps_by_keys( unit.unit_id, [review_step_key])[0] except Exception: # pylint: disable=broad-except self.error(404) return # Check that the student is allowed to review this submission. if not student.has_same_key_as(review_step.reviewer_key): self.error(404) return self.template_value['navbar'] = {'course': True} self.template_value['unit_id'] = unit.unit_id # Check that the review due date has not passed. time_now = datetime.datetime.now() review_due_date = unit.workflow.get_review_due_date() if time_now > review_due_date: self.template_value['time_now'] = time_now.strftime( utils.HUMAN_READABLE_DATETIME_FORMAT) self.template_value['review_due_date'] = ( review_due_date.strftime(utils.HUMAN_READABLE_DATETIME_FORMAT)) self.template_value['error_code'] = 'review_deadline_exceeded' self.render('error.html') return mark_completed = (self.request.get('is_draft') == 'false') self.template_value['is_draft'] = (not mark_completed) review_payload = self.request.get('answers') review_payload = transforms.loads( review_payload) if review_payload else [] try: rp.write_review( unit.unit_id, review_step_key, review_payload, mark_completed) course.update_final_grades(student) except domain.TransitionError: self.template_value['error_code'] = 'review_already_submitted' self.render('error.html') return self.render('review_confirmation.html')
def post(self): # Philosophically, posting new time assumes that you are logging # your time as you finish the task at hand. You can override this # behavior by providing a date/time, but that timestamp is # still assumed to be the end point of the task, and the amount # of time entered is subtracted from it. # # I'm not sure why I have to import this here, but it throws errors # if I do it at the top; # TODO get someone to explain why. from datetime import datetime, timedelta # Calculate dates and times. # The submitting form has service, project, date, note, and # multiplier (hours) fields. It is assumed that you log your time # when you *finish* a task and subtract the amount of time (multiplier) # that you just spent on it. You can, of course override "now" and # provide the end time manually, but the base assumption is that you # take the given time and *subtract* the multiplier (hours). hours = float(self.request.get('hours')) # First, we check to see if we're overriding our "now" assumption # if we are, we have to append :00 onto the time (as the datetime html5 # input doesn't seem to support the full this) to conform to # the gdata API; otherwise, we take GMT and format it accoringly so # that we can do the math if self.request.get('date'): # end_time = self.request.get('date') + ":00" end_time = self.request.get('date') else: end_time = time.strftime('%Y-%m-%dT%H:%M:%S', time.gmtime()) the_end = datetime.strptime(end_time, '%Y-%m-%dT%H:%M:%S') # Adjust for timezone. # TODO refactor to support Eastern timezones as this breaks if you # use a positive GMT offset. ender = the_end - timedelta(hours=settings.CAL["GMToffset"]) # subtract the hours spent the_start = ender - timedelta(hours=hours) #format things for calendar API (add the T) event_end = str(ender).replace(" ", "T") event_start = str(the_start).replace(" ", "T") # Get project details from projects model projectid = db.Key(self.request.get('pid')) project_query = Projects.all() project_query.filter('__key__ =', projectid) projects = project_query.fetch(1) # Get the calendar ID so that we can post # to it and add this event to the appropriate calendar calendar_uri = projects[0].calendarid # Update the project status to reflect that it now has billable time. # If we don't do this, we won't be able to select the project in the UI # as we test for project status; if it's "empty" the checkbox is # disabled. projects[0].status = 'normal' projects[0].put() # put together time details to go into time model rateservice = str(self.request.get('service')).split(" || ") linetotal = int(rateservice[1]) * float(self.request.get('hours')) linetotal = "%.2f" % linetotal entry = Time() entry.client = db.Key(self.request.get('clientkey')) entry.project = projectid entry.date = the_end entry.hours = float(self.request.get('hours')) entry.service = rateservice[0] entry.rate = int(rateservice[1]) entry.rateunit = rateservice[2] entry.total = str(linetotal) entry.worker = self.request.get('worker') entry.note = self.request.get('note') entry.status = 'logged' entry.put() # if gcalendar is enabled, use the gdata API to add event to # this project's calendar if (settings.APP["gcalenabled"]): gconnect = Gconnection() gd_client = gconnect.calendar_connect() location = settings.APP["title"] title = rateservice[0] desc = self.request.get('note') desc += "\nWorker: " + self.request.get('worker') desc += "\nHours: " + self.request.get('hours') event_entry = gdata.calendar.CalendarEventEntry() event_entry.title = atom.Title(text=title) event_entry.content = atom.Content(text=desc) event_entry.timezone = gdata.calendar.Timezone( value='America/Los_Angeles') event_entry.when.append( gdata.calendar.When(start_time=event_start, end_time=event_end)) event_entry.where.append( gdata.calendar.Where(value_string=location)) try: cal_event = gd_client.InsertEvent(event_entry, calendar_uri) except gdata.service.RequestError, request_exception: request_error = request_exception[0] if request_error['status'] == 401 or request_error[ 'status'] == 403: self.response.out.write("401 or 403") else: raise
def key_from_str(key_str): if key_str: return db.Key(key_str) else: return None
try: amount = body['amount'] except Exception, e: #no amount to have pass try: description = body['description'] except Exception, e: #no description to have pass #Did they submit something worthwhile? if amount or description: try: item = db.get(db.Key(encoded=str(key))) if amount: item.amount = float(amount) if description: item.description = description item.put() self.response.set_status(200,json.dumps({'key' : key})) self.response.write(key) except Exception, e: logging.info(e) self.response.set_status(500,json.dumps({'error' : str(e)})) self.response.write('Could not complete update')
def process_comment_submission(handler, article): sanitize_comment = get_sanitizer_func(handler, allow_attributes=['href', 'src'], blacklist_tags=['img']) property_hash = restful.get_sent_properties( handler.request.get, [ 'name', 'email', 'homepage', 'title', ('body', sanitize_comment), 'key', 'thread', # If it's given, use it. Else generate it. 'captcha', ('published', get_datetime) ]) # If we aren't administrator, abort if bad captcha if not users.is_current_user_admin(): if property_hash.get('captcha', None) != get_captcha(article.key()): logging.info("Received captcha (%s) != %s", property_hash.get('captcha', None), get_captcha(article.key())) handler.error(401) # Unauthorized return if 'key' not in property_hash and 'thread' not in property_hash: handler.error(401) return # Generate a thread string. if 'thread' not in property_hash: matchobj = re.match(r'[^#]+#comment-(?P<key>\w+)', property_hash['key']) if matchobj: logging.debug("Comment has parent: %s", matchobj.group('key')) comment_key = matchobj.group('key') # TODO -- Think about GQL injection security issue since # it can be submitted by public parent = models.blog.Comment.get(db.Key(comment_key)) thread_string = parent.next_child_thread_string() else: logging.debug("Comment is off main article") comment_key = None thread_string = article.next_comment_thread_string() if not thread_string: handler.error(400) return property_hash['thread'] = thread_string del property_hash['key'] # Get and store some pieces of information from parent article. # TODO: See if this overhead can be avoided if not article.num_comments: article.num_comments = 1 else: article.num_comments += 1 property_hash['article'] = article.put() try: comment = models.blog.Comment(**property_hash) comment.put() except: logging.debug("Bad comment: %s", property_hash) handler.error(400) return # Notify the author of a new comment (from matteocrippa.it) if config.BLOG['send_comment_notification']: recipient = "%s <%s>" % ( config.BLOG['author'], config.BLOG['email'], ) body = ("A new comment has just been posted on %s/%s by %s." % (config.BLOG['root_url'], article.permalink, comment.name)) mail.send_mail(sender=config.BLOG['email'], to=recipient, subject="New comment by %s" % (comment.name, ), body=body) # Render just this comment and send it to client view_path = view.find_file(view.templates, "bloog/blog/comment.html") response = template.render(os.path.join("views", view_path), { 'comment': comment, "use_gravatars": config.BLOG["use_gravatars"] }, debug=config.DEBUG) handler.response.out.write(response) view.invalidate_cache()
def tx(key, login): person = Person.get(db.Key(encoded=key)) person.login = login person.put()
def get(self): # check for a cached version #--------------------------------------------------------------------# match_key = db.Key(self.request.get("match_key")) if self.emit(self.response.out, match_key): return # not cached or evicted from cache; regenerate #--------------------------------------------------------------------# match = Match.get(match_key) if not match: return home_record, away_record = match.get_team_records_query().fetch(2) match_data = MatchData(match) # team_match_table #--------------------------------------------------------------------# def team_name_getter(record): return "<a rel='match_teams' href='%s'>%s</a>" % ( record.team.get_box_href(), record.team.key().name()) def race_getter(record): return "<img src='%s' />" % record.team.race.get_image_src( thumb=True) def glicko_getter(record): return "%4.0f" % record.glicko_r team_match_table = Table( columns=[ Column(" ", "Race", race_getter, center=True), Column("Team", "Team name", team_name_getter), Column("Rating", "Team rating", glicko_getter), Column("TV", "Team value", attrgetter("tv_for")), Column("TD", "Touchdowns", attrgetter("tds_for")), Column("P", "Pass completions", attrgetter("passes_for")), Column("YP", "Yards passing", attrgetter("pyards_for")), Column("R", "Pass receptions", attrgetter("rec_for")), Column("YR", "Yards rushing", attrgetter("ryards_for")), Column("K", "Kills", attrgetter("kills_for")), Column("C", "Casualties", attrgetter("cas_for")), Column("KO", "Knock outs", attrgetter("ko_for")), Column("S", "Stuns", attrgetter("stun_for")), Column("T", "Tackles", attrgetter("tckl_for")), Column("I", "Interceptions", attrgetter("int_for")), ], query=match.get_team_records_query(), cls="tablesorter team_match_table", ) # player_match_tables #--------------------------------------------------------------------# def number_getter(record): return record.player.number def player_name_getter(record): return record.player.get_box_anchor("match_players") def position_getter(record): position = record.player.position if position.starplayer: return "<b>Star Player</b>" else: return position.key().name() player_records = match.get_player_records_query().filter("played =", 1) team_player_records = {} for record in player_records: team_player_records.setdefault( record.team_record.team.key().name(), []).append(record) player_match_tables = [] for team_name, record_set in team_player_records.items(): table = Table( columns=[ Column("#", "Player number", number_getter, center=True), Column("Player", "Player name", player_name_getter), Column("Position", "Player position", position_getter), Column("TD", "Touchdowns", attrgetter("tds_for")), Column("MVP", "Most Valuable Player Awards", attrgetter("mvps")), Column("P", "Pass completions", attrgetter("passes_for")), Column("YP", "Yards passing", attrgetter("pyards_for")), Column("R", "Pass receptions", attrgetter("rec_for")), Column("YR", "Yards rushing", attrgetter("ryards_for")), Column("K+", "Kills for", attrgetter("kills_for")), Column("C+", "Casualties for", attrgetter("cas_for")), Column("KO+", "Knock outs for", attrgetter("ko_for")), Column("S+", "Stuns for", attrgetter("stun_for")), Column("T+", "Tackles for", attrgetter("tckl_for")), Column("I+", "Interceptions for", attrgetter("int_for")), Column("K-", "Kills against", attrgetter("kills_against")), Column("C-", "Casualties against", attrgetter("cas_against")), Column("KO-", "Knock outs against", attrgetter("ko_against")), Column("S-", "Stuns against", attrgetter("stun_against")), Column("T-", "Tackles against", attrgetter("tckl_against")), Column("I-", "Interceptions against", attrgetter("int_against")), ], query=record_set, cls="tablesorter player_match_table", ) player_match_tables.append((team_name, table)) # injury_table #--------------------------------------------------------------------# def team_name_getter(record): return record.team_record.team.key().name() def injury_name_getter(record): for injury_key in record.match_injuries: injury = Injury.get(injury_key) if injury.permanent: break return injury.key().name() def injury_effect_getter(record): for injury_key in record.match_injuries: injury = Injury.get(injury_key) if injury.permanent: break return injury.effect injured_player_records = [ r for r in player_records if r.match_injuries ] if injured_player_records: injury_table = Table( columns=[ Column("#", "Player number", number_getter, center=True), Column("Team", "Team name", team_name_getter), Column("Player", "Player name", player_name_getter), Column("Position", "Player position", position_getter), Column("SPP", "Star player points", attrgetter("spp")), Column("Level", "Level", attrgetter("level")), Column("Injury", "Injury name", injury_name_getter), Column("Effect", "Injury effect", injury_effect_getter), ], query=injured_player_records, cls="tablesorter injury_table", ) else: injury_table = None # render and update #--------------------------------------------------------------------# match_box = misc.render('match_box.html', locals()) self.update(match_box, match_key) self.response.out.write(match_box)
def get(self): ret = None # We have a string or an owner or both. isViable() said so, right? if self.__string: # If they didn't enter something like "1-10", we just search for what they typed. levelRangeMatch = self.LEVEL_RANGE_REGEX.match(self.__string) if not levelRangeMatch: searchStrings = [self.__string] else: lowLevel, highLevel = [ int(g) for g in levelRangeMatch.groups() ] baseSearch = self.__string.replace( '%d-%d' % (lowLevel, highLevel), '') # But for level-ranged searches, we actually perform multiple searches. lowAndHighLevel = [min(lowLevel, 30), min(highLevel, 30)] lowAndHighLevel.sort( ) # in case they specified 20-11 (high first) lowLevel, highLevel = lowAndHighLevel searchStrings = [ '%s %d' % (baseSearch, l) for l in range(lowLevel, highLevel + 1) ] #logging.debug('searchStrings: %(searchStrings)r' % locals()) resultsPerLevel = 200 / len(searchStrings) ret = [] [ ret.extend([r for r in SearchResult.search(ss, limit=resultsPerLevel) if r.isPublic]) \ for ss in searchStrings \ ] if self.__user: userId = self.__user.user_id() # Limit by owner at either the query or result level if ret is not None: ret = [ r for r in ret if userId in (r.viewerIds or ()) or self.__user in r.viewers or self.__user == r.owner ] else: # XXX Hopefully, this is temporary code. It handles the fact that # earlier versions of v31 didn't have the viewers field. badResults = \ [ sr for sr in SearchResult.gql('WHERE owner = :1', self.__user) \ if self.__user not in sr.viewers \ ] db.delete(badResults) [br.model.putSearchResult() for br in badResults] ret = {} for propName, matchValue in \ ( ('viewers', self.__user), ('viewerIds', userId), ): [ ret.update({str(sr.key()): sr}) \ for sr in SearchResult.gql('WHERE %s = :1' % propName, matchValue) \ ] ret = ret.values() # We had a bug for a while that caused 2 SearchResult models for each character. # Clean up from that. newRet, dupResults = {}, [] for thisResult in ret: firstRec = newRet.get(thisResult.modelKey, None) if firstRec is None: newRet[thisResult.modelKey] = thisResult else: dupResults.append(thisResult) dupResults and db.delete(dupResults) ret = newRet.values() ret.sort(lambda x, y: cmp(x.title, y.title)) if self.__campaignKey: from IP4ELibs import models self.__theCampaign = models.Campaign.get(self.__campaignKey) characterKeys = [ str(c.key()) for c in self.__theCampaign.characters ] ret = characterKeys and SearchResult.gql( 'WHERE modelKey IN :1 ORDER BY title', characterKeys) or [] # Either way, we can limit by type ret = ret or [] if self.__typeList is not None: ret = [r for r in ret if r.modelType in self.__typeList] # Finally, make sure the search results refer to models that still exist. # We'd like to remove this code eventually; it exists because we weren't # always deleting search results when models were deleted. Now we are. from IP4ELibs import models rawModelKeysAndTypes = [(r.modelKey, r.modelType) for r in ret] modelKeysAndTypes = [ (db.Key(IP4DB.hrdKey(r.modelKey)), r.modelType) for r in ret ] #raise RuntimeError, 'RAW: %(rawModelKeysAndTypes)r, KEYED: %(modelKeysAndTypes)r' % locals() modelExistenceBooleans = \ [ getattr(models, mType).all(keys_only=True).filter('__key__ =', mKey).count() \ for mKey, mType in modelKeysAndTypes \ ] db.delete([ r for (r, mBool) in zip(ret, modelExistenceBooleans) if not mBool ]) ret = [ r for (r, mBool) in zip(ret, modelExistenceBooleans) if mBool ] return ret
def _7000_migrate_solution(job_key): phase = MigrateServiceJob.PHASE_7000_MIGRATE_SOLUTION next_phase = MigrateServiceJob.PHASE_8000_FINISH_MODELS # Validate that the job still exists job = _get_job(job_key, phase) # Do the work _log_progress(job) if job.solution: logging.info('0/ Migrate the solution models without ancestor, but with service_user as key name.') def trans0(): new_models = list() old_models = list() for model_class in (SolutionMainBranding, SolutionLogo, SolutionAvatar): old_model = db.get(model_class.create_key(job.from_service_user)) if old_model: kwargs = copy_model_properties(old_model) new_model = model_class(key=model_class.create_key(job.to_service_user), **kwargs) new_models.append(new_model) old_models.append(old_model) if new_models: put_and_invalidate_cache(*new_models) if old_models: db.delete(old_models) xg_on = db.create_transaction_options(xg=True) db.run_in_transaction_options(xg_on, trans0) old_sln_settings = get_solution_settings(job.from_service_user) identities = [None] if old_sln_settings.identities: identities.extend(old_sln_settings.identities) logging.info('1/ Migrate RestaurantReservations') for service_identity in identities: from_si_user = create_service_identity_user_wo_default(job.from_service_user, service_identity) to_si_user = create_service_identity_user_wo_default(job.to_service_user, service_identity) while True: reservations = RestaurantReservation.all().filter('service_user', from_si_user).fetch(300) if not reservations: break for r in reservations: r.service_user = to_si_user _put_and_invalidate_cache_and_allocate_ids(*reservations) logging.info('2/ Migrate EventReminders') for service_identity in identities: from_si_user = create_service_identity_user(job.from_service_user) to_si_user = create_service_identity_user(job.to_service_user) while True: reminders = EventReminder.all().filter('service_identity_user', from_si_user).fetch(300) if not reminders: break for r in reminders: r.service_identity_user = to_si_user _put_and_invalidate_cache_and_allocate_ids(*reminders) logging.info('3/ Migrate RestaurantProfile.reserve_flow_part2') restaurant_profile = db.get(RestaurantProfile.create_key(job.from_service_user)) if restaurant_profile and restaurant_profile.reserve_flow_part2: old_part2_key = db.Key(restaurant_profile.reserve_flow_part2) new_part2_key = db.Key.from_path(old_part2_key.kind(), old_part2_key.id_or_name(), parent=parent_key(job.to_service_user)) logging.debug('New key: %r', new_part2_key) restaurant_profile.reserve_flow_part2 = str(new_part2_key) _put_and_invalidate_cache_and_allocate_ids(restaurant_profile) logging.info('4/ Delete all SolutionQRs. They will be recreated when provisioning.') for service_identity in identities: service_identity_user = create_service_identity_user_wo_default(job.from_service_user, service_identity) db.delete(SolutionQR.all(keys_only=True).ancestor(parent_key_unsafe(service_identity_user, job.solution))) logging.info('5/ Delete Loyalty QR. It will be recreated when provisioning.') if SolutionModule.LOYALTY in old_sln_settings.modules: for service_identity in identities: if is_default_service_identity(service_identity): loyalty_i_settings = db.get(SolutionLoyaltySettings.create_key(job.from_service_user)) else: loyalty_i_settings = db.get( SolutionLoyaltyIdentitySettings.create_key(job.from_service_user, service_identity)) if loyalty_i_settings: loyalty_i_settings.image_uri = None loyalty_i_settings.content_uri = None _put_and_invalidate_cache_and_allocate_ids(loyalty_i_settings) logging.info('6/ Change ancestor keys of solution models.') for solution in ['common', job.solution]: for service_identity in identities: if is_default_service_identity(service_identity): _migrate_ancestor_models(job, parent_key(job.from_service_user, solution)) else: service_identity_user = create_service_identity_user_wo_default( job.from_service_user, service_identity) _migrate_ancestor_models(job, parent_key_unsafe(service_identity_user, solution)) sln_settings = get_solution_settings(job.to_service_user) sln_main_branding = get_solution_main_branding(job.to_service_user) users.set_user(job.to_service_user) try: populate_identity(sln_settings, sln_main_branding.branding_key) finally: users.clear_user() # Set the next phase _set_job_in_next_phase(job_key, phase, next_phase)
def get(self): """Handles GET requests.""" student = self.personalize_page_and_get_enrolled() if not student: return course = self.get_course() rp = course.get_reviews_processor() unit, unused_lesson = extract_unit_and_lesson(self) if not course.needs_human_grader(unit): self.error(404) return review_step_key = self.request.get('key') if not unit or not review_step_key: self.error(404) return try: review_step_key = db.Key(encoded=review_step_key) review_step = rp.get_review_steps_by_keys(unit.unit_id, [review_step_key])[0] except Exception: # pylint: disable-msg=broad-except self.error(404) return if not review_step: self.error(404) return # Check that the student is allowed to review this submission. if not student.has_same_key_as(review_step.reviewer_key): self.error(404) return self.template_value['navbar'] = {'course': True} self.template_value['unit_id'] = unit.unit_id self.template_value['key'] = review_step_key submission_key = review_step.submission_key submission_contents = student_work.Submission.get_contents_by_key( submission_key) readonly_student_assessment = create_readonly_assessment_params( course.get_assessment_content(unit), StudentWorkUtils.get_answer_list(submission_contents)) self.template_value['readonly_student_assessment'] = ( readonly_student_assessment) review_due_date = unit.workflow.get_review_due_date() if review_due_date: self.template_value['review_due_date'] = review_due_date.strftime( HUMAN_READABLE_DATETIME_FORMAT) review_key = review_step.review_key rev = rp.get_reviews_by_keys(unit.unit_id, [review_key])[0] if review_key else None time_now = datetime.datetime.now() show_readonly_review = (review_step.state == domain.REVIEW_STATE_COMPLETED or time_now > review_due_date) self.template_value['due_date_exceeded'] = (time_now > review_due_date) if show_readonly_review: readonly_review_form = create_readonly_assessment_params( course.get_review_form_content(unit), StudentWorkUtils.get_answer_list(rev)) self.template_value['readonly_review_form'] = readonly_review_form else: # Populate the review form, self.template_value['assessment_script_src'] = ( self.get_course().get_review_form_filename(unit.unit_id)) saved_answers = (StudentWorkUtils.get_answer_list(rev) if rev else []) self.template_value['saved_answers'] = transforms.dumps( saved_answers) self.template_value[ 'record_events'] = CAN_PERSIST_ACTIVITY_EVENTS.value self.template_value['assessment_xsrf_token'] = ( XsrfTokenManager.create_xsrf_token('review-post')) self.template_value['event_xsrf_token'] = ( XsrfTokenManager.create_xsrf_token('event-post')) self.render('review.html')
def _set_attr_id(self, id): setattr(self, attribute_name, db.Key(encoded=id))
def from_message(cls, message, *args): attributes = {attr: getattr(message, attr) for attr in args} for attribute, property in cls._ref_properties.items(): attributes[attribute] = db.Key(encoded=getattr(message, property)) entity = cls(**attributes) return entity
def keyfromstr(keystr): return db.Key(encoded=keystr)
def get_one(self, encoded_key): return db.get(db.Key(encoded=encoded_key))
def DeleteProgramOrActivity(unused_request, config_key): """Delete a registered users entity like Activity or Program.""" logging.info('Entering DeleteProgramOrActivity config_key:%s', config_key) config_entity = db.get(db.Key(config_key)) if config_entity is None: # Entity has already been deleted, nothing to do logging.info('Config entity is None, exiting.') return http.HttpResponse() (entity_key, request_user_email) = config_entity.config_value.split(',') logging.info('Trying to delete entity_key:%s, initiated by %s', entity_key, request_user_email) entity = utils.GetCachedOr404(entity_key, active_only=False) if entity.deleted: # Entity already deleted, nothing to do logging.info('Entity entity_key:%s already deleted exiting.', entity_key) return http.HttpResponse() if isinstance(entity, models.Activity): registrations = models.UserRegistration.ActiveQuery(activity=entity) else: assert isinstance(entity, models.Program) registrations = models.UserRegistration.ActiveQuery(program=entity) models.UserRegistration.AddRegisterOrder(registrations) registration_list = [register for register in registrations] if registration_list: logging.info('Found %d registrations for entity, delegating task', len(registration_list)) last_registration = registration_list[-1] task_url = urlresolvers.reverse(DeleteProgramOrActivity, kwargs={'config_key': config_key}) # Create a task config to be executed after the offline process completes # unregistered all users in the entity. processor = processors.TaskConfig({'url': task_url, 'method': 'GET'}) for register in registration_list: context = rule_engine.EvalContext.CreateFromUserRegistration( register) # Since the entity is being deleted we want to make sure we can force a # registration. context.force_status = True if register == last_registration: # After the last registration is processed by the offline we can go # ahead and delete the program or activity that is being deleted. Hence # we keep a trigger that does that in the last registration that will be # processed by the rule engine. rule_engine.UnregisterOnline(context, post_process_tasks=[processor]) else: rule_engine.UnregisterOnline(context) else: # No more registrations, we can delete the entitites. logging.info('No registrations found for entity, deleting entity now.') request_user = utils.GetAppEngineUser(request_user_email) db.run_in_transaction(_DeleteEntityUnsafe, entity, config_entity.key(), request_user) return http.HttpResponse()
def post(self): user = users.get_current_user() if user: project_key = self.request.get('project_key') if project_key.strip() == "": html = "<html><head></head><body><span style='color:red'>エラー! プロジェクトが選択されていません。</span></body></html>" return self.response.out.write(html) key = db.Key(project_key) q = FunctionPointProject.gql("WHERE __key__ =:1", key) project = None projects = q.fetch(1) for project in projects: break if project == None: html = "<html><head></head><body><span style='color:red'>エラー! 選択されたプロジェクトが見つかりません。</span></body></html>" return self.response.out.write(html) self.response.headers.add_header("Content-Disposition", 'attachment; filename="fp.xls"') # font_title = xlwt.Font() font_title.bold = True style_title = xlwt.XFStyle() style_title.font = font_title # font_num = xlwt.Font() font_num.colour_index = 4 style_num = xlwt.XFStyle() style_num.num_format_str = '0.00' style_num.font = font_num # font_link = xlwt.Font() font_link.colour_index = 4 font_link.underline = xlwt.Font.UNDERLINE_DOUBLE style_link = xlwt.XFStyle() style_link.font = font_link # font_link = xlwt.Font() font_link.colour_index = 4 font_link.underline = xlwt.Font.UNDERLINE_DOUBLE style_link = xlwt.XFStyle() style_link.font = font_link wb = xlwt.Workbook(encoding='utf-8') ws = wb.add_sheet(u'FUNCTION POINT') # base_col = 1 r = 1 col_w = (0x0500, 0x0500, 0x2800, 0x2400, 0x0a00, 0x0a00, 0x0a00, 0x0a00) for i, v in enumerate(col_w): ws.col(i).width = v ws.write(r, base_col, u'ファンクションポイント', style_title) # n = "HYPERLINK" ws.write( r, base_col + 2, xlwt.Formula( n + '("http://typea-service.appspot.com/fp";"function point")' ), style_link) r = 3 ws.write(r, base_col + 0, u'システム', style_title) ws.write(r, base_col + 2, u'アプリケーション', style_title) ws.write(r, base_col + 3, u'計測タイプ', style_title) r = 4 ws.write(r, base_col + 0, project.system_name) ws.write(r, base_col + 2, project.application_name) ws.write(r, base_col + 3, project.mesurement_type_name()) r = 6 ws.write(r, base_col + 0, u'No', style_title) ws.write(r, base_col + 1, u'要素処理名', style_title) ws.write(r, base_col + 2, u'区分', style_title) ws.write(r, base_col + 3, u'DET', style_title) ws.write(r, base_col + 4, u'RET/FTR', style_title) ws.write(r, base_col + 5, u'複雑度', style_title) ws.write(r, base_col + 6, u'FP', style_title) non_adjust_fp = 0.0 q = FunctionEntity.gql("WHERE project_key=:1 ORDER BY sort_order", project_key) functions = q.fetch(MAX_FUNCTION) for i, func in enumerate(functions): r += 1 ws.write(r, base_col + 0, i + 1) ws.write(r, base_col + 1, func.function_name) ws.write(r, base_col + 2, func.function_category_name()) ws.write(r, base_col + 3, func.measurement_index1, style_num) ws.write(r, base_col + 4, func.measurement_index2, style_num) ws.write(r, base_col + 5, func.complexity()) ws.write(r, base_col + 6, to_float(func.function_point()), style_num) non_adjust_fp = non_adjust_fp + to_float(func.function_point()) r += 1 ws.write(r, base_col + 3, u'未調整FP値 計', style_title) ws.write(r, base_col + 6, non_adjust_fp, style_num) r += 1 ws.write(r, base_col + 3, u'調整係数 計', style_title) ws.write(r, base_col + 6, project.total_adjust_points(), style_num) r += 1 ws.write(r, base_col + 3, u'FP 値', style_title) ws.write(r, base_col + 6, non_adjust_fp + project.total_adjust_points(), style_num) r += 1 dat = ( (u'データ通信(Data Communications)', project.data_communications), (u'分散処理(Distributed Data Processing)', project.distoributed_processing), (u'性能(Performance)', project.performance), (u'高負荷構成(Heavily Used Configuration)', project.heavily_used_configuration), (u'トランザクション量(Transaction Rate)', project.transaction_rate), (u'オンライン入力(Online Data Entry)', project.online_data_entry), (u'エンドユーザー効率(End-User Efficiency)', project.enduser_efficiency), (u'オンライン更新(Online Update)', project.online_update), (u'複雑な処理(Complex Processing)', project.complex_processing), (u'再利用可能性(Reusability)', project.reusability), (u'インストール容易性(Installation Ease)', project.installation_ease), (u'運用性(Operational Ease)', project.operational_ease), (u'複数サイト(Multiple Sites)', project.multiple_sites), (u'変更容易性(Facilitate Change)', project.facilitate_change), ) r = r + 2 ws.write(r, base_col + 2, u'調整係数', style_title) for (k, d) in enumerate(dat): r = r + 1 ws.write(r, base_col + 1, k + 1) ws.write(r, base_col + 2, d[0]) ws.write(r, base_col + 3, d[1], style_num) wb.save(self.response.out) return None else: html = "<html><head></head><body><span style='color:red'>エラー! ログインされていません。ログインしてください。</span></body></html>" return self.response.out.write(html) err = {'error': 'Unknown Error'} return self.response.out.write(json.write(err))
def post(self): method = self.request.get("_method") key = utils.url_to_entity_key(self.request.uri) if key: playlist = db.get(db.Key(key)) if playlist: current_user = utils.get_current_user() #Get corresponding link library_item = playlist.library_item_for_user(current_user) if method == "PUT": need_version_control = False playlist_changed = False if (self.request.get('position') ): #Rights: Can always update this current_user.re_sort_playlists( library_item, int(self.request.get('position'))) if (playlist.collaborative or library_item.is_owner ): #Rights: Owner or collaborators can update this if playlist.smart: utils.parse_smart_filters(playlist, self.request) playlist_changed = True if self.request.get('tracks'): playlist.tracks = self.request.get('tracks') playlist_changed = True need_version_control = playlist.collaborative if library_item.is_owner: #Rights: Only owner can update this if (self.request.get('name') and len(self.request.get('name')) > 0): playlist.name = utils.strip_html( self.request.get('name')) playlist_changed = True if (self.request.get('collaborative')): playlist.collaborative = utils.convert_javascript_bool_to_python( self.request.get('collaborative')) playlist_changed = True if playlist_changed: if need_version_control: if self.request.get('version') and not int( self.request.get( 'version')) < playlist.version: playlist.version += 1 playlist.put() self.response.out.write( utils.status_code_json(200)) else: self.response.out.write( library_item.serialize()) else: playlist.put() self.response.out.write( utils.status_code_json(200)) else: self.response.out.write(utils.status_code_json(200)) elif method == "DELETE": library_item.delete() current_user.re_index_playlists() if not playlist.has_user(): playlist.delete() self.response.out.write(utils.status_code_json(200))
def get(self): person = Person.get(db.Key(encoded=self.request["key"])) self.respond_with(person.to_json())
def post(self): """Put the given result data into the database.""" data = self._GetRequestData() # Touch the machine instance if the instance id is provided. if 'instance_id' in data: client_machine.SetMachineStatus(data['instance_id'], enum.MACHINE_STATUS.RUNNING) if 'key' not in data: update_suite_info = False suite_data = simplejson.loads(data['suiteInfo']) suite = test_suite.GetOrInsertSuite( suite_data['date'], suite_data['refBrowser'], suite_data['refBrowserChannel']) channel = None if 'channel' in data: channel = data['channel'] # Following code is to take care of the scenario where test browser comes # up first with the data. Hence, at that time we don't have sufficient # data to put in ref browser. # If reference browser information is incomplete then let's mark # it for update. if suite.ref_browser.os is None: parser = useragent_parser.UAParser(data['userAgent']) if (parser.GetBrowserVersion() == suite.ref_browser.version and channel==suite_data['refBrowserChannel']): # If flag is present anywhere then it has to match. if 'flag' in data or suite.ref_browser.flag: if suite.ref_browser.flag == data['flag']: update_suite_info = True else: update_suite_info = True test_data = page_data.PageData() if 'screenshot' in data: test_data.screenshot = screenshot.AddScreenshot(data['screenshot']) test_data.test_suite = suite flag = None if 'flag' in data: flag = data['flag'] test_data.browser = browser.GetOrInsertBrowser( data['userAgent'], channel=channel, flag=flag) if update_suite_info: logging.info('Updating Reference Browser in Test Suite. old_ref: %s,' ' new_ref: %s', suite.ref_browser.key(), test_data.browser.key()) test_suite.UpdateRefBrowser(suite, test_data.browser, delete_old_ref=True) # Let's get run log key from suite_data. if 'key' not in suite_data: raise PutDataError('The run log "key" is a required parameter.') my_run_log = db.get(db.Key(suite_data['key'])) url_config_key = my_run_log.config.key() test_data.site = site.GetOrInsertSiteFromUrl(data['url'], url_config_key) test_data.nodes_table = data['nodesTable'] test_data.dynamic_content_table = data['dynamicContentTable'] test_data.layout_table = data_list.CreateEmptyDataList() test_data.width = int(data['width']) test_data.height = int(data['height']) if 'metaData' in data: test_data.metadata = data['metaData'] # If browser key matches with test_suite's ref browser then # let's mark the page_data as reference. if str(test_data.browser.key()) == str(suite.ref_browser.key()): test_data.is_reference = True else: test_data.is_reference = False test_data.put() if not test_data.is_reference: suite.AddTestBrowser(test_data.browser) response = { 'key': str(test_data.key()), 'nPieces': data_list.NUM_ENTRIES} self.response.out.write(simplejson.dumps(response)) else: test_data = db.get(db.Key(data['key'])) layout_table = simplejson.loads(data['layoutTable']) test_data.layout_table.AddEntry(int(data['i']), layout_table) self.response.out.write('received')
def destroy(self): person = Person.get(db.Key(encoded=self.request.get("key"))) if person: person.delete()
def get(self): user = models.UserData.current().user exercise_name = self.request.get('name') if not exercise_name: self.response.out.write( "No exercise submitted, please resubmit if you just logged in." ) return query = models.Exercise.all() query.filter('name =', exercise_name) exercise = query.get() if not exercise: exercise = models.Exercise(name=exercise_name) exercise.prerequisites = [] exercise.covers = [] exercise.author = user exercise.summative = self.request_bool("summative", default=False) v_position = self.request.get('v_position') h_position = self.request.get('h_position') short_display_name = self.request.get('short_display_name') exercise.prerequisites = [] for c_check_prereq in range(0, 1000): prereq_append = self.request_string("prereq-%s" % c_check_prereq, default="") if prereq_append and not prereq_append in exercise.prerequisites: exercise.prerequisites.append(prereq_append) exercise.covers = [] for c_check_cover in range(0, 1000): cover_append = self.request_string("cover-%s" % c_check_cover, default="") if cover_append and not cover_append in exercise.covers: exercise.covers.append(cover_append) if v_position: exercise.v_position = int(v_position) if h_position: exercise.h_position = int(h_position) if short_display_name: exercise.short_display_name = short_display_name exercise.live = self.request_bool("live", default=False) if not exercise.is_saved(): # Exercise needs to be saved before checking related videos. exercise.put() video_keys = [] for c_check_video in range(0, 1000): video_name_append = self.request_string("video-%s-readable" % c_check_video, default="") if video_name_append: video = models.Video.get_for_readable_id(video_name_append) if not video.key() in video_keys: video_keys.append(str(video.key())) video_append = self.request_string("video-%s" % c_check_video, default="") if video_append and not video_append in video_keys: video_keys.append(video_append) query = models.ExerciseVideo.all() query.filter('exercise =', exercise.key()) existing_exercise_videos = query.fetch(1000) existing_video_keys = [] for exercise_video in existing_exercise_videos: existing_video_keys.append(exercise_video.video.key()) if not exercise_video.video.key() in video_keys: exercise_video.delete() for video_key in video_keys: if not video_key in existing_video_keys: exercise_video = models.ExerciseVideo() exercise_video.exercise = exercise exercise_video.video = db.Key(video_key) exercise_video.exercise_order = models.VideoPlaylist.all( ).filter('video =', exercise_video.video).get().video_position exercise_video.put() exercise.put() #Start ordering ExerciseVideos = models.ExerciseVideo.all().filter( 'exercise =', exercise.key()).fetch(1000) playlists = [] for exercise_video in ExerciseVideos: playlists.append( models.VideoPlaylist.get_cached_playlists_for_video( exercise_video.video)) if playlists: playlists = list(itertools.chain(*playlists)) titles = map(lambda pl: pl.title, playlists) playlist_sorted = [] for p in playlists: playlist_sorted.append([p, titles.count(p.title)]) playlist_sorted.sort(key=lambda p: p[1]) playlist_sorted.reverse() playlists = [] for p in playlist_sorted: playlists.append(p[0]) playlist_dict = {} exercise_list = [] playlists = list(set(playlists)) for p in playlists: playlist_dict[p.title] = [] for exercise_video in ExerciseVideos: if p.title in map( lambda pl: pl.title, models.VideoPlaylist. get_cached_playlists_for_video( exercise_video.video)): playlist_dict[p.title].append(exercise_video) # ExerciseVideos.remove(exercise_video) if playlist_dict[p.title]: playlist_dict[p.title].sort( key=lambda e: models.VideoPlaylist.all().filter( 'video =', e.video).filter('playlist =', p).get( ).video_position) exercise_list.append(playlist_dict[p.title]) if exercise_list: exercise_list = list(itertools.chain(*exercise_list)) for e in exercise_list: e.exercise_order = exercise_list.index(e) e.put() self.redirect('/editexercise?saved=1&name=' + exercise_name)
def get(self, action=""): user = self.session.get_user() if action == "callback": #key_nameが来たらそれを使う(現在callbackできないYammer用) oauth_key = self.request.cookies.get('oauth_key', None) if oauth_key == "": self.error(400) return account_key = db.Key(oauth_key) account = Account.get(account_key) try: token = oauth.Token(account.request_token, account.secret) oauth_verifier = self.request.get("oauth_verifier") client = self.newOAuthClient(token) resp, content = client.request(self.access_token_url, "POST", body="oauth_verifier=%s" % oauth_verifier) if resp['status'] != '200': raise Exception('Invalid response %s.' % resp['status']) access_token = dict(parse_qsl(content)) account.access_token = access_token['oauth_token'] account.secret = access_token['oauth_token_secret'] account.put() self.account = account account_name, display_name, group_image_url, account_info = self.get_account_info( ) account.account_name = account_name account.display_name = display_name account.group_image_url = group_image_url account.account_info = account_info if account.account_name == None or account.account_info == None: raise Exception('Cannot get account information.') account.put() #24時間アクセストークンをキャッシュ key = "oauth_token/" + user.user_id + "/" + self.service + "/" + account.account_name memcache.set(key, account, 24 * 60 * 60) #既に同じアカウントが登録されていたら削除します saved_accounts = Account.gql( "WHERE service = :1 and user_ref = :2 and account_name = :3", self.service, user.key(), account.account_name) for saved_account in saved_accounts: if saved_account.key() != account.key(): saved_account.delete() except: #どこかでエラーが発生したらゴミが残らないように削除する account.delete() logging.exception(sys.exc_info()) raise sys.exc_info() #ウィンドウを閉じます tmpl = os.path.join(os.path.dirname(__file__), "../view/oauth_callback.html") return self.response.out.write( template.render(tmpl, {'account': account})) if action == "request": client = self.newOAuthClient() account_key = Account(user_ref=user, service=self.service).put() params = {'oauth_callback': self.request.relative_url('callback')} resp, content = client.request(self.request_token_url, 'POST', body=urllib.urlencode(params, True)) if resp['status'] != '200': raise Exception('Invalid response %s. : %s' % (resp['status'], content)) request_token = dict(parse_qsl(content)) account = Account.get(account_key) account.request_token = request_token['oauth_token'] account.secret = request_token['oauth_token_secret'] account.put() self.response.headers.add_header( 'Set-Cookie', 'oauth_key=%s;' % str(account_key)) self.redirect(self.authorize_url + "?oauth_token=" + request_token['oauth_token']) return self.error(400)
def process(org_key): """Processes a single organization. Organization status is updated to ACCEPTED or REJECTED if the current status has been set to PRE_ACCEPTED or PRE_REJECTED, respectively, by program administrators. Args: org_key: Organization key. """ context = mapreduce_context.get() program_key = db.Key(context.mapreduce_spec.mapper.params['program_key']) if program_key.kind() == 'GSoCProgram': url_names = soc_urls.UrlNames elif program_key.kind() == 'GCIProgram': url_names = ci_urls.UrlNames else: raise ValueError('Invalid program type %s' % program_key.kind()) program = db.get(program_key) site = site_logic.singleton() org_key = ndb.Key.from_old_key(org_key) org_admins = profile_logic.getOrgAdmins(org_key) # We are "prefetching" the ProgramMessages entity here instead of fetching # it where it is required i.e. when the message templates are required # to build the email message body. We do this because we perform the # operation of fetching the ProgramMessages entity if it exists or create # it if it doesn't in a Appengine regular "db" transation whereas rest # of the updating of organization entities happen within an ndb transaction # because Organization model is an ndb model and such cross API nested # transactions are incompatible in Appengine. program_messages = program.getProgramMessages() @ndb.transactional def updateOrganizationStatus(): """Transactionally updates organization status.""" # only organizations defined for the specified program should be processed organization = org_key.get() if organization.program.to_old_key() == program_key: if organization.status == org_model.Status.PRE_ACCEPTED: org_logic.setStatus(organization, program, site, program_messages, org_model.Status.ACCEPTED, links.ABSOLUTE_LINKER, url_names, org_admins=org_admins) elif organization.status == org_model.Status.PRE_REJECTED: org_logic.setStatus(organization, program, site, program_messages, org_model.Status.REJECTED, links.ABSOLUTE_LINKER, url_names, org_admins=org_admins) updateOrganizationStatus()
def get(self): """Handles GET requests.""" student = self.personalize_page_and_get_enrolled() if not student: return course = self.get_course() rp = course.get_reviews_processor() unit, unused_lesson = extract_unit_and_lesson(self) if not course.needs_human_grader(unit): self.error(404) return review_step_key = self.request.get('key') if not unit or not review_step_key: self.error(404) return try: review_step_key = db.Key(encoded=review_step_key) review_step = rp.get_review_steps_by_keys( unit.unit_id, [review_step_key])[0] except Exception: # pylint: disable=broad-except self.error(404) return if not review_step: self.error(404) return # Check that the student is allowed to review this submission. if not student.has_same_key_as(review_step.reviewer_key): self.error(404) return model_version = course.get_assessment_model_version(unit) assert model_version in courses.SUPPORTED_ASSESSMENT_MODEL_VERSIONS self.template_value['model_version'] = model_version if model_version == courses.ASSESSMENT_MODEL_VERSION_1_4: configure_assessment_view = self.configure_assessment_view_1_4 configure_readonly_review = self.configure_readonly_review_1_4 configure_active_review = self.configure_active_review_1_4 elif model_version == courses.ASSESSMENT_MODEL_VERSION_1_5: configure_assessment_view = self.configure_assessment_view_1_5 configure_readonly_review = self.configure_readonly_review_1_5 configure_active_review = self.configure_active_review_1_5 else: raise ValueError('Bad assessment model version: %s' % model_version) self.template_value['navbar'] = {'course': True} self.template_value['unit_id'] = unit.unit_id self.template_value['key'] = review_step_key submission_key = review_step.submission_key submission_contents = student_work.Submission.get_contents_by_key( submission_key) configure_assessment_view(unit, submission_contents) review_due_date = unit.workflow.get_review_due_date() if review_due_date: self.template_value['review_due_date'] = review_due_date.strftime( utils.HUMAN_READABLE_DATETIME_FORMAT) review_key = review_step.review_key rev = rp.get_reviews_by_keys( unit.unit_id, [review_key])[0] if review_key else None time_now = datetime.datetime.now() show_readonly_review = ( review_step.state == domain.REVIEW_STATE_COMPLETED or time_now > review_due_date) self.template_value['due_date_exceeded'] = (time_now > review_due_date) if show_readonly_review: configure_readonly_review(unit, rev) else: # Populate the review form, configure_active_review(unit, rev) self.template_value['assessment_xsrf_token'] = ( crypto.XsrfTokenManager.create_xsrf_token('review-post')) self.template_value['event_xsrf_token'] = ( crypto.XsrfTokenManager.create_xsrf_token('event-post')) # pylint: disable=protected-access self.render('review.html', additional_dirs=[assessments._TEMPLATES_DIR])
def from_path(model_name, path): return db.Key('%s/%s' % (model_name, path))
def get(self): if (self.request.get('action') == "delete"): service = db.Key(self.request.get('sid')) db.delete(service) action = '/projects?clientkey=' + self.request.get('clientkey') self.redirect(action)
def get(self): key = db.Key(self.request.get('id')) recipe = Recipe.get(key) self.response.out.write( render_template('view.html', recipe=recipe))
def get(self): message = '' if (self.request.get('action') == "delete"): # At the moment, we don't want to actually delete real numbered # invoices from the datastore, as it will adversly effect how # I've implemented the invoice numbering; we can't handle # real sequential numbering without resorting to sharding # (as per stackoverflow on this topic); for the time being # we're just running an all.count() + 1 on the invoices model, # filtering out drafts. # If we actually delete stuff, we could get an invoice issued # after the last count, but with a smaller invoice number # TODO perhaps zero out the other fields to avoid confusion? i = db.Key(self.request.get('iid')) deleteit = db.get(i) # First, let's check to see if this invoice has had an inum # assigned to it; if it does, then we just assign the status # as deleted; if it doesn't have an inum, then it's safe # to remove the entry from the datastore entirely. if deleteit.inum: deleteit.status = 'deleted' deleteit.put() else: deleteit.delete() # TODO Revert the statuses in the associated Time model records too # IDEA: Maybe write a garbage collector routine that runs daily # or whatever # itime = InvoiceTime.all() # itime.filter("invoice", i) # itime.delete() action = '/invoices?clientkey=' + self.request.get('cid') self.redirect(action) if (self.request.get('action') == "finalize"): # When we first create an invoice, we leave a few things blank, # including the checksum and real invoice number. # With this action, we complete the process by generating # what's missing and thus preparing the invoice to be sent. # First, get invoice details. invoiceid = db.Key(self.request.get('iid')) invoice_query = Invoices.all() invoice_query.filter("__key__", invoiceid) invoice = invoice_query.fetch(1) invoicekey = str(invoiceid) invoicetotal = str(invoice[0].totalbill) # Get client information ck = db.Key(self.request.get('cid')) ck_query = Clients.all() ck_query.filter('__key__ =', ck) client = ck_query.fetch(1) clientname = client[0].business clientemail = client[0].email # Get a keys-only (for speed) query of all the invoices # in the datastore that have an inum issued to them. # Take that count and add 1 to it to get the new inum. invcount = Invoices.all(keys_only=True) invcount.filter("inum >", 0) count = invcount.count() inumber = count + 1 invoice[0].inum = inumber # Calculate a checksum for the invoice. # This should be more-better (sic) thought out, but the idea # is that we need a way to veryify with the client that the invoice # they're viewing (when they view it) is the original invoice that # was sent to them. # Hash the clients name, the date, # and the total amount of the invoice. # TODO fill the hash with complete invoice details _somehow_ checkthatsum = clientname + invoice[0].date + invoicetotal invoicechecksum = hashlib.md5(checkthatsum).hexdigest() invoice[0].checksum = invoicechecksum invoice[0].status = "invoiced" try: # Use the Google Chart API to generate the QR codes. # TODO Investigate ways to do this that might work offline: # https://github.com/bernii/PyQRNativeGAE chartapi = "http://chart.apis.google.com/chart" chartapi += "?cht=qr&chs=300x300&chld=H|0&chl=" invoiceurl = settings.APP["URL"] invoiceurl += "invoice-gen?ichecksum=" invoiceurl += invoicechecksum qrurl = chartapi + invoiceurl qrchecksum = chartapi + invoicechecksum qrattach = urlfetch.fetch(qrurl) qrcheck = urlfetch.fetch(qrchecksum) qlink = qrattach.content qchk = qrcheck.content except: qlink = "offline" qchk = "offline" invoice[0].qrlink = qlink invoice[0].qrchecksum = qchk invoice[0].put() #message = 'Finalized.' message = 'Finalized. <a href="/invoice?iid=' + self.request.get( 'iid') + '"><span class="icon-remove-sign"><span></a>' if (self.request.get('action') == "send"): # Get client information ck = db.Key(self.request.get('cid')) ck_query = Clients.all() ck_query.filter('__key__ =', ck) client = ck_query.fetch(1) clientname = client[0].business clientemail = client[0].email invoiceid = db.Key(self.request.get('iid')) invoice_query = Invoices.all() invoice_query.filter("__key__", invoiceid) invoice = invoice_query.fetch(1) invoice[0].status = "sent" invoice[0].put() invoicekey = str(invoiceid) invoicetotal = str(invoice[0].totalbill) invoicechecksum = invoice[0].checksum invoiceurl = settings.APP["URL"] invoiceurl += "invoice-gen?ichecksum=" invoiceurl += invoicechecksum import urllib2 response = urllib2.urlopen('http://hitchless.com/') # self.response.out.write(response.read()) asset = conversion.Asset("text/html", response.read(), "invoice.html") conversion_obj = conversion.Conversion(asset, "application/pdf") rpc = conversion.create_rpc() conversion.make_convert_call(rpc, conversion_obj) result = rpc.get_result() if result.assets: # Note: in most cases, we will return data all in one asset. # Except that we return multiple assets for multiple pages image. for asset in result.assets: invoiceattachment = asset.data self.response.headers['Content-Type'] = "application/pdf" self.response.out.write(asset.data) # attachname = settings.COMPANY["name"] + "-invoice#" + str(invoice[0].inum) + ".pdf" # emailsubject = "Invoice #" + str(invoice[0].inum) + " for $" # emailsubject += invoicetotal # emailsender = settings.COMPANY["name"] # emailsender += "<" + settings.COMPANY["email"] + ">" # message = mail.EmailMessage(sender=emailsender, subject=emailsubject) # message.to = "Allan Kenneth <*****@*****.**>" # message.attachments = [(attachname,invoiceattachment)] # message.body = "Dear " + clientname + ",\n\n" # message.body += "To view your invoice, please click the link below:\n\n" # message.body += invoiceurl # message.send() # else: message = result.error_code + " - " + result.error_text # TODO a template or something? It's UI decision time! #message = 'Sent.' message = 'Sent. <a href="/invoice?iid=' + self.request.get( 'iid') + '"><span class="icon-remove-sign"><span></a>' if self.request.get('action') == 'statusupdate': i = db.Key(self.request.get('iid')) update = db.get(i) update.status = self.request.get('status') update.put() message = 'Updated as ' + self.request.get( 'status') + '. <a href="/invoice?iid=' + self.request.get( 'iid') + '"><span class="icon-remove-sign"><span></a>' # After all of the different actions to be taken, # we finally just print out the invoice with the appropriate message # at the top. statuses = ["draft", "invoiced", "paid", "sent", "deleted"] i = db.Key(self.request.get('iid')) times_query = Time.all() times_query.filter('invoice =', i) times = times_query.fetch(100) totalhours = 0 subtotal = 0 for hours in times: totalhours = totalhours + float(hours.hours) subtotal = subtotal + float(hours.total) # TODO figure out how we want to serve the images from the datastore # qrq = Invoices.all() # qrq.filter("__key__", i) # qr = qrq.fetch(1) # qrlink = qr[0].qrlink taxrate = settings.APP["taxrate"] tax = subtotal * taxrate totalinvoice = subtotal + tax # format the values to two decimal places before sending to template # TODO remove this because it shouldn't be necessary; they should # come out the datastore formatted correctly! # # times[0].invoice.totalbill # subtotal = "%.2f" % subtotal tax = "%.2f" % tax totalinvoice = "%.2f" % totalinvoice logopath = settings.COMPANY['logopath'] template_values = { 'message': message, 'statuses': statuses, 'times': times, 'totalhours': totalhours, 'subtotal': subtotal, 'tax': tax, 'totalinvoice': totalinvoice, 'logopath': logopath, 'companyname': settings.COMPANY['name'], 'companyaddress': settings.COMPANY['street'], 'companycity': settings.COMPANY['city'], 'companyprovince': settings.COMPANY['province'], 'companycode': settings.COMPANY['code'] } path = os.path.join(os.path.dirname(__file__), 'views/invoice.html') self.response.out.write(template.render(path, template_values))
def guestbook_key(guestbook_name=DEFAULT_GUESTBOOK_NAME): """Constructs a Datastore key for a Guestbook entity with guestbook_name.""" return db.Key('Guestbook', guestbook_name)