def sync_outdated_events(configuration, condition): outdated = fusion_tables.select(configuration['master table'], condition=condition) logging.info("Syncing %d outdated public rows in %s master %s" % (len(outdated), configuration['id'], configuration['master table'])) for row in outdated: # select old slave rows condition = "'event slug' = '%s'" % row['event slug'] slaves = fusion_tables.select(configuration['slave table'], cols=['datetime slug'], condition=condition, filter_obsolete_rows=False) # create slave dicts datetime_slugs = [slave['datetime slug'] for slave in slaves] (new_slaves, final_date) = fusion_tables.master_to_slave(row) # store slave dicts logging.info("Inserting approx. %d future rows in %s slave %s" % (len(new_slaves) - len(slaves), configuration['id'], configuration['slave table'])) for new_slave in new_slaves: if new_slave['datetime slug'] not in datetime_slugs: fusion_tables.insert_hold(configuration['slave table'], new_slave) # set master event state to 'public' update = { 'rowid': row['rowid'], 'state': 'public', 'sync date': datetime.today().strftime(FUSION_TABLE_DATE_TIME_FORMAT), 'final date': final_date } logging.info("Updated sync timestamp and final date in regenerated row in %s master %s" % (configuration['id'], configuration['master table'])) fusion_tables.update_with_implicit_rowid(configuration['master table'], update) running_too_long(don_t_run_too_long=True) fusion_tables.insert_go(configuration['slave table']) logging.info("Done syncing outdated public rows in %s master %s" % (configuration['id'], configuration['master table']))
def sync_old_version_of_updated_events(configuration, condition): updated_master = fusion_tables.select(configuration['master table'], condition=condition) logging.info( "Deleting old slave rows for %d updated events in %s master %s" % (len(updated_master), configuration['id'], configuration['master table'])) for updated_master_row in updated_master: # find the old slave row(s) condition = "'event slug' = '%s' AND 'sequence' < %s" % ( updated_master_row['event slug'], updated_master_row['sequence']) old_slave = fusion_tables.select(configuration['slave table'], condition=condition, filter_obsolete_rows=False) # delete the old row(s) logging.info("Deleting %d old event rows in %s slave %s" % (len(old_slave), configuration['id'], configuration['slave table'])) delete_slaves(configuration['slave table'], old_slave) logging.info("Deleted %d old rows in %s slave %s" % (len(old_slave), configuration['id'], configuration['slave table'])) # unflag the updated master row unflagged_row = {} unflagged_row['rowid'] = updated_master_row['rowid'] unflagged_row['update after sync'] = 'false' fusion_tables.update_with_implicit_rowid(configuration['master table'], unflagged_row) logging.info("Unflagged updated row %s in %s master %s" % (updated_master_row['rowid'], configuration['id'], configuration['master table'])) running_too_long(don_t_run_too_long=True) logging.info("Done deleting old slave rows in %s slave %s" % (configuration['id'], configuration['slave table']))
def sync_cancelled_events(configuration, condition): cancelled = fusion_tables.select(configuration['master table'], condition=condition) logging.info( "Syncing %d cancelled rows in %s master %s" % (len(cancelled), configuration['id'], configuration['master table'])) for row in cancelled: # delete cancelled slave rows condition = "'event slug' = '%s'" % row['event slug'] slaves = fusion_tables.select(configuration['slave table'], cols=['rowid'], condition=condition, filter_obsolete_rows=False) logging.info( "Deleting %d cancelled rows in %s slave %s" % (len(slaves), configuration['id'], configuration['slave table'])) delete_slaves(configuration['slave table'], slaves) # set master event state to 'cancellation' update = { 'rowid': row['rowid'], 'state': 'cancellation', 'sync date': datetime.today().strftime(FUSION_TABLE_DATE_TIME_FORMAT) } fusion_tables.update_with_implicit_rowid(configuration['master table'], update) running_too_long(don_t_run_too_long=True) logging.info("Done syncing cancelled rows in %s master %s" % (configuration['id'], configuration['master table']))
def sync_updated_events(configuration, condition, don_t_run_too_long=False): updated = fusion_tables.select(configuration['master table'], condition=condition) logging.info("Syncing %d updated rows in %s master %s" % (len(updated), configuration['id'], configuration['master table'])) for row in updated: # old rows are not deleted! New slave rows are just added with incremented sequence number # create slave dicts (slaves, final_date) = fusion_tables.master_to_slave(row) # store slave dicts for slave in slaves: fusion_tables.insert_hold(configuration['slave table'], slave) # delete the old master row (the updated row was a copy!) condition = "'event slug' = '%s' and 'state' = 'public'" % row['event slug'] old = fusion_tables.select(configuration['master table'], cols=['rowid'], condition=condition) for old_row in old: # should be only a single row!! fusion_tables.delete_with_implicit_rowid(configuration['master table'], old_row) # set master event state to 'public' update = { 'rowid': row['rowid'], 'state': 'public', 'sync date': datetime.today().strftime(FUSION_TABLE_DATE_TIME_FORMAT), 'final date': final_date } fusion_tables.update_with_implicit_rowid(configuration['master table'], update) running_too_long(don_t_run_too_long) fusion_tables.insert_go(configuration['slave table']) logging.info("Done syncing updated rows in %s master %s" % (configuration['id'], configuration['master table']))
def sync_events_with_final_date_passed(configuration, condition): outdated = fusion_tables.select(configuration['master table'], condition=condition) logging.info("Deleting %d finally past events in %s master (and slave) %s" % (len(outdated), configuration['id'], configuration['master table'])) for row in outdated: # delete old slave rows condition = "'event slug' = '%s'" % row['event slug'] slaves = fusion_tables.select(configuration['slave table'], cols=['rowid'], condition=condition, filter_obsolete_rows=False) logging.info("Deleting %d finally past events in %s slave %s" % (len(slaves), configuration['id'], configuration['slave table'])) delete_slaves(configuration['slave table'], slaves) # delete cancellation master rows fusion_tables.delete_with_implicit_rowid(configuration['master table'], row) running_too_long(don_t_run_too_long=True) logging.info("Done deleting finally past events in %s master (and slave) %s" % (configuration['id'], configuration['master table']))
def sync_new_events(configuration, condition, don_t_run_too_long=False): new = fusion_tables.select(configuration['master table'], condition=condition) logging.info( "Syncing %d new rows in %s master %s" % (len(new), configuration['id'], configuration['master table'])) for row in new: # create slave dicts (slaves, final_date) = fusion_tables.master_to_slave(row) # store slave dicts for slave in slaves: fusion_tables.insert_hold(configuration['slave table'], slave) # set master event state to 'public' update = { 'rowid': row['rowid'], 'state': 'public', 'sync date': datetime.today().strftime(FUSION_TABLE_DATE_TIME_FORMAT), 'final date': final_date } fusion_tables.update_with_implicit_rowid(configuration['master table'], update) running_too_long(don_t_run_too_long) fusion_tables.insert_go(configuration['slave table']) logging.info("Done syncing new rows in %s master %s" % (configuration['id'], configuration['master table']))
def get(self, event_slug=None, datetime_slug=None): configuration = customer_configuration.get_configuration(self.request) # detect language and use configuration as default language = get_language(self.request, configuration) localization = get_localization() # query on event condition = "'event slug' = '%s'" % event_slug if datetime_slug: condition += " AND " condition += "'datetime slug' = '%s'" % datetime_slug data = fusion_tables.select(configuration['slave table'], condition=condition) no_results_message = '' if not data: no_results_message = localization[configuration['language']]['no-results'] data = data[0] if data else {} # if data has no address, fetch it if not data['address']: data['address'] = address(data['latitude'], data['longitude'], language) template = jinja_environment.get_template('event.html') content = template.render( configuration=configuration, data=data, date_time_reformat=date_time_reformat, date_time_reformat_iso=date_time_reformat_iso, no_results_message=no_results_message, localization=localization[language] ) # return the web-page content self.response.out.write(content) return
def get(self): configuration = customer_configuration.get_configuration(self.request) count = fusion_tables.count(configuration['slave table']) template = jinja_environment.get_template('sitemapindexbylocation.xml') # get a list of all locations (location slug) condition = "'sequence' > 0" # apply commercial limit limit = customer_configuration.get_limit(self.request) if limit: condition += " AND 'start' < '%s'" % limit condition += " GROUP BY 'location slug'" no_results_message = '' locations = fusion_tables.select(configuration['slave table'], condition=condition, cols=[ 'location slug' ]) if not locations: no_results_message = '# No results' content = template.render( configuration=configuration, locations=locations ) # return the web-page content self.response.headers['Content-Type'] = "application/xml" self.response.out.write(content) return
def get(self): configuration = customer_configuration.get_configuration(self.request) offset = self.request.get("offset") batch = self.request.get("batch") condition = "'sequence' > 0" # apply commercial limit limit = customer_configuration.get_limit(self.request) if limit: condition += " AND 'start' < '%s'" % limit if offset: condition += " OFFSET %s" % offset # at least for debugging, limit to 100 results if batch: condition += " LIMIT %s" % batch no_results_message = '' data = fusion_tables.select(configuration['slave table'], condition=condition, cols=[ 'event slug', 'datetime slug', 'sequence', 'start' ]) if not data: no_results_message = '# No results' template = jinja_environment.get_template('sitemap.txt') content = template.render( configuration=configuration, data=data, no_results_message=no_results_message ) # return the web-page content self.response.headers['Content-Type'] = "text/plain" self.response.out.write(content) return
def get(self): configuration = customer_configuration.get_configuration(self.request) location = self.request.get("location") condition = "'sequence' > 0" # apply commercial limit limit = customer_configuration.get_limit(self.request) if limit: condition += " AND 'start' < '%s'" % limit if location: condition += " AND 'location slug' = '%s'" % location condition += " ORDER BY 'datetime slug'" no_results_message = '' data = fusion_tables.select(configuration['slave table'], condition=condition, cols=[ 'event slug', 'datetime slug', 'sequence', 'start' ]) if not data: no_results_message = '# No results' template = jinja_environment.get_template('sitemap.txt') content = template.render( configuration=configuration, data=data, no_results_message=no_results_message ) # return the web-page content self.response.headers['Content-Type'] = "text/plain" self.response.out.write(content) return
def get(self): now = self.request.get("now") if not now: now = datetime.datetime.strftime(datetime.datetime.now(), DATE_TIME_FORMAT) # fallback to server time condition = "'previous start' <= '" + now + "' and 'end' >= '" + now + "'" cols = ['datetime slug', 'event slug', 'sequence', 'latitude', 'longitude', 'location slug', 'tags', 'hashtags'] configuration = customer_configuration.get_configuration(self.request) # query on event data = fusion_tables.select(configuration['slave table'], condition=condition, cols=cols) # convert the results to GeoJSON features = [] for row in data: features.append({ "type": "Feature", "geometry": { "type": "Point", "coordinates": [row['longitude'], row['latitude']] }, "properties": { "datetime slug": row['datetime slug'], "event slug": row['event slug'], "sequence": row['sequence'], "location slug": row['location slug'], "tags": row['tags'], "hashtags": row['hashtags'] } }) feature_collection = { "type": "FeatureCollection", "features": features } # return the web-page content self.response.headers['Content-Type'] = 'application/json' self.response.out.write(json.dumps(feature_collection)) return
def get(self): configuration = customer_configuration.get_configuration(self.request) location = self.request.get("location") condition = "'sequence' > 0" # apply commercial limit limit = customer_configuration.get_limit(self.request) if limit: condition += " AND 'start' < '%s'" % limit if location: condition += " AND 'location slug' = '%s'" % location condition += " ORDER BY 'datetime slug'" no_results_message = '' data = fusion_tables.select( configuration['slave table'], condition=condition, cols=['event slug', 'datetime slug', 'sequence', 'start']) if not data: no_results_message = '# No results' template = jinja_environment.get_template('sitemap.txt') content = template.render(configuration=configuration, data=data, no_results_message=no_results_message) # return the web-page content self.response.headers['Content-Type'] = "text/plain" self.response.out.write(content) return
def get(self): configuration = customer_configuration.get_configuration(self.request) offset = self.request.get("offset") batch = self.request.get("batch") condition = "'sequence' > 0" # apply commercial limit limit = customer_configuration.get_limit(self.request) if limit: condition += " AND 'start' < '%s'" % limit if offset: condition += " OFFSET %s" % offset # at least for debugging, limit to 100 results if batch: condition += " LIMIT %s" % batch no_results_message = '' data = fusion_tables.select( configuration['slave table'], condition=condition, cols=['event slug', 'datetime slug', 'sequence', 'start']) if not data: no_results_message = '# No results' template = jinja_environment.get_template('sitemap.txt') content = template.render(configuration=configuration, data=data, no_results_message=no_results_message) # return the web-page content self.response.headers['Content-Type'] = "text/plain" self.response.out.write(content) return
def get(self, event_slug=None, datetime_slug=None): configuration = customer_configuration.get_configuration(self.request) # detect language and use configuration as default language = get_language(self.request, configuration) localization = get_localization() # query on event condition = "'event slug' = '%s'" % event_slug if datetime_slug: condition += " AND " condition += "'datetime slug' = '%s'" % datetime_slug data = fusion_tables.select(configuration['slave table'], condition=condition) no_results_message = '' if not data: no_results_message = localization[ configuration['language']]['no-results'] data = data[0] if data else {} # if data has no address, fetch it if not data['address']: data['address'] = address(data['latitude'], data['longitude'], language) template = jinja_environment.get_template('event.html') content = template.render( configuration=configuration, data=data, date_time_reformat=date_time_reformat, date_time_reformat_iso=date_time_reformat_iso, no_results_message=no_results_message, localization=localization[language]) # return the web-page content self.response.out.write(content) return
def sync_one_month_old_cancellations(configuration, condition): cancellation = fusion_tables.select(configuration['master table'], condition=condition) logging.info("Syncing %d cancellation rows in %s master %s" % (len(cancellation), configuration['id'], configuration['master table'])) for row in cancellation: # delete cancellation master rows fusion_tables.delete_with_implicit_rowid(configuration['master table'], row) logging.info("Done syncing cancellation rows in %s master %s" % (configuration['id'], configuration['master table'])) running_too_long(don_t_run_too_long=True)
def sync_cancelled_events(configuration, condition): cancelled = fusion_tables.select(configuration['master table'], condition=condition) logging.info("Syncing %d cancelled rows in %s master %s" % (len(cancelled), configuration['id'], configuration['master table'])) for row in cancelled: # delete cancelled slave rows condition = "'event slug' = '%s'" % row['event slug'] slaves = fusion_tables.select(configuration['slave table'], cols=['rowid'], condition=condition, filter_obsolete_rows=False) logging.info("Deleting %d cancelled rows in %s slave %s" % (len(slaves), configuration['id'], configuration['slave table'])) delete_slaves(configuration['slave table'], slaves) # set master event state to 'cancellation' update = { 'rowid': row['rowid'], 'state': 'cancellation', 'sync date': datetime.today().strftime(FUSION_TABLE_DATE_TIME_FORMAT) } fusion_tables.update_with_implicit_rowid(configuration['master table'], update) running_too_long(don_t_run_too_long=True) logging.info("Done syncing cancelled rows in %s master %s" % (configuration['id'], configuration['master table']))
def sync_old_version_of_updated_events(configuration, condition): updated_master = fusion_tables.select(configuration['master table'], condition=condition) logging.info("Deleting old slave rows for %d updated events in %s master %s" % (len(updated_master), configuration['id'], configuration['master table'])) for updated_master_row in updated_master: # find the old slave row(s) condition = "'event slug' = '%s' AND 'sequence' < %s" % (updated_master_row['event slug'], updated_master_row['sequence']) old_slave = fusion_tables.select(configuration['slave table'], condition=condition, filter_obsolete_rows=False) # delete the old row(s) logging.info("Deleting %d old event rows in %s slave %s" % (len(old_slave), configuration['id'], configuration['slave table'])) delete_slaves(configuration['slave table'], old_slave) logging.info("Deleted %d old rows in %s slave %s" % (len(old_slave), configuration['id'], configuration['slave table'])) # unflag the updated master row unflagged_row = {} unflagged_row['rowid'] = updated_master_row['rowid'] unflagged_row['update after sync'] = 'false' fusion_tables.update_with_implicit_rowid(configuration['master table'], unflagged_row) logging.info("Unflagged updated row %s in %s master %s" % (updated_master_row['rowid'], configuration['id'], configuration['master table'])) running_too_long(don_t_run_too_long=True) logging.info("Done deleting old slave rows in %s slave %s" % (configuration['id'], configuration['slave table']))
def sync_passed_events(configuration, condition): outdated = fusion_tables.select(configuration['slave table'], condition=condition, filter_obsolete_rows=False) logging.info( "Deleting %d past events in %s slave %s" % (len(outdated), configuration['id'], configuration['slave table'])) delete_slaves(configuration['slave table'], outdated) logging.info("Done deleting past events in %s slave %s" % (configuration['id'], configuration['master table'])) running_too_long(don_t_run_too_long=True)
def sync_outdated_events(configuration, condition): outdated = fusion_tables.select(configuration['master table'], condition=condition) logging.info( "Syncing %d outdated public rows in %s master %s" % (len(outdated), configuration['id'], configuration['master table'])) for row in outdated: # select old slave rows condition = "'event slug' = '%s'" % row['event slug'] slaves = fusion_tables.select(configuration['slave table'], cols=['datetime slug'], condition=condition, filter_obsolete_rows=False) # create slave dicts datetime_slugs = [slave['datetime slug'] for slave in slaves] (new_slaves, final_date) = fusion_tables.master_to_slave(row) # store slave dicts logging.info("Inserting approx. %d future rows in %s slave %s" % (len(new_slaves) - len(slaves), configuration['id'], configuration['slave table'])) for new_slave in new_slaves: if new_slave['datetime slug'] not in datetime_slugs: fusion_tables.insert_hold(configuration['slave table'], new_slave) # set master event state to 'public' update = { 'rowid': row['rowid'], 'state': 'public', 'sync date': datetime.today().strftime(FUSION_TABLE_DATE_TIME_FORMAT), 'final date': final_date } logging.info( "Updated sync timestamp and final date in regenerated row in %s master %s" % (configuration['id'], configuration['master table'])) fusion_tables.update_with_implicit_rowid(configuration['master table'], update) running_too_long(don_t_run_too_long=True) fusion_tables.insert_go(configuration['slave table']) logging.info("Done syncing outdated public rows in %s master %s" % (configuration['id'], configuration['master table']))
def get_configuration(request): # TODO as long as the instance keeps running, the global variable won't be refreshed # from the configuration table, so after adding or modifying a configuration, # there must be made a call to some kind of force_get_configuration() cal global _configuration id = get_id(request) if id not in _configuration: condition = "'id' = '%s'" % id configurations = fusion_tables.select(CONFIGURATION_TABLE_ID, condition=condition) if not configurations: raise webapp2.abort(404) _configuration[id] = configurations[0] return _configuration[id]
def get(self): configuration = customer_configuration.get_configuration(self.request) # detect language and use configuration as default language = get_language(self.request, configuration) localization = get_localization() offset = self.request.get("offset") condition = "'state' = 'public'" # apply commercial limit limit = customer_configuration.get_limit(self.request) if limit: condition += " AND 'start' < '%s'" % limit if offset: condition += " OFFSET %s" % offset # at least for debugging, limit to 100 results condition += " LIMIT 100" no_results_message = '' data = fusion_tables.select(configuration['master table'], condition=condition) if not data: no_results_message = localization[ configuration['language']]['no-results'] # remove duplicates unique_data = [] location_slugs = [] for d in data: location_slug = d['location slug'] if location_slug not in location_slugs: unique_data.append(d) location_slugs.append(location_slug) next_url = self.request.path_url + "?offset=%s" % str( int(offset if offset else 0) + 100) # for debugging, the id must be added to an url as parameter id_appendix = "" if self.request.get("id"): id_appendix = "?id=%s" % self.request.get("id") next_url += "&id=%s" % self.request.get("id") template = jinja_environment.get_template('locations.html') content = template.render(configuration=configuration, data=unique_data, date_time_reformat=date_time_reformat, no_results_message=no_results_message, localization=localization[language], id_appendix=id_appendix, offset=offset, next_url=next_url) # return the web-page content self.response.out.write(content) return
def get(self): configuration = customer_configuration.get_configuration(self.request) # detect language and use configuration as default language = get_language(self.request, configuration) localization = get_localization() offset = self.request.get("offset") condition = "'state' = 'public'" # apply commercial limit limit = customer_configuration.get_limit(self.request) if limit: condition += " AND 'start' < '%s'" % limit if offset: condition += " OFFSET %s" % offset # at least for debugging, limit to 100 results condition += " LIMIT 100" no_results_message = '' data = fusion_tables.select(configuration['master table'], condition=condition) if not data: no_results_message = localization[configuration['language']]['no-results'] # remove duplicates unique_data = [] location_slugs = [] for d in data: location_slug = d['location slug'] if location_slug not in location_slugs: unique_data.append(d) location_slugs.append(location_slug) next_url = self.request.path_url + "?offset=%s" % str(int(offset if offset else 0) + 100) # for debugging, the id must be added to an url as parameter id_appendix = "" if self.request.get("id"): id_appendix = "?id=%s" % self.request.get("id") next_url += "&id=%s" % self.request.get("id") template = jinja_environment.get_template('locations.html') content = template.render( configuration=configuration, data=unique_data, date_time_reformat=date_time_reformat, no_results_message=no_results_message, localization=localization[language], id_appendix=id_appendix, offset=offset, next_url=next_url ) # return the web-page content self.response.out.write(content) return
def sync_updated_events(configuration, condition, don_t_run_too_long=False): updated = fusion_tables.select(configuration['master table'], condition=condition) logging.info( "Syncing %d updated rows in %s master %s" % (len(updated), configuration['id'], configuration['master table'])) for row in updated: # old rows are not deleted! New slave rows are just added with incremented sequence number # create slave dicts (slaves, final_date) = fusion_tables.master_to_slave(row) # store slave dicts for slave in slaves: fusion_tables.insert_hold(configuration['slave table'], slave) # delete the old master row (the updated row was a copy!) condition = "'event slug' = '%s' and 'state' = 'public'" % row[ 'event slug'] old = fusion_tables.select(configuration['master table'], cols=['rowid'], condition=condition) for old_row in old: # should be only a single row!! fusion_tables.delete_with_implicit_rowid( configuration['master table'], old_row) # set master event state to 'public' update = { 'rowid': row['rowid'], 'state': 'public', 'sync date': datetime.today().strftime(FUSION_TABLE_DATE_TIME_FORMAT), 'final date': final_date } fusion_tables.update_with_implicit_rowid(configuration['master table'], update) running_too_long(don_t_run_too_long) fusion_tables.insert_go(configuration['slave table']) logging.info("Done syncing updated rows in %s master %s" % (configuration['id'], configuration['master table']))
def sync_events_with_final_date_passed(configuration, condition): outdated = fusion_tables.select(configuration['master table'], condition=condition) logging.info( "Deleting %d finally past events in %s master (and slave) %s" % (len(outdated), configuration['id'], configuration['master table'])) for row in outdated: # delete old slave rows condition = "'event slug' = '%s'" % row['event slug'] slaves = fusion_tables.select(configuration['slave table'], cols=['rowid'], condition=condition, filter_obsolete_rows=False) logging.info( "Deleting %d finally past events in %s slave %s" % (len(slaves), configuration['id'], configuration['slave table'])) delete_slaves(configuration['slave table'], slaves) # delete cancellation master rows fusion_tables.delete_with_implicit_rowid(configuration['master table'], row) running_too_long(don_t_run_too_long=True) logging.info( "Done deleting finally past events in %s master (and slave) %s" % (configuration['id'], configuration['master table']))
def get(self): now = self.request.get("now") if not now: now = datetime.datetime.strftime( datetime.datetime.now(), DATE_TIME_FORMAT) # fallback to server time condition = "'previous start' <= '" + now + "' and 'end' >= '" + now + "'" cols = [ 'datetime slug', 'event slug', 'sequence', 'latitude', 'longitude', 'location slug', 'tags', 'hashtags' ] configuration = customer_configuration.get_configuration(self.request) # query on event data = fusion_tables.select(configuration['slave table'], condition=condition, cols=cols) # convert the results to GeoJSON features = [] for row in data: features.append({ "type": "Feature", "geometry": { "type": "Point", "coordinates": [row['longitude'], row['latitude']] }, "properties": { "datetime slug": row['datetime slug'], "event slug": row['event slug'], "sequence": row['sequence'], "location slug": row['location slug'], "tags": row['tags'], "hashtags": row['hashtags'] } }) feature_collection = { "type": "FeatureCollection", "features": features } # return the web-page content self.response.headers['Content-Type'] = 'application/json' self.response.out.write(json.dumps(feature_collection)) return
def sync_new_events(configuration, condition, don_t_run_too_long=False): new = fusion_tables.select(configuration['master table'], condition=condition) logging.info("Syncing %d new rows in %s master %s" % (len(new), configuration['id'], configuration['master table'])) for row in new: # create slave dicts (slaves, final_date) = fusion_tables.master_to_slave(row) # store slave dicts for slave in slaves: fusion_tables.insert_hold(configuration['slave table'], slave) # set master event state to 'public' update = { 'rowid': row['rowid'], 'state': 'public', 'sync date': datetime.today().strftime(FUSION_TABLE_DATE_TIME_FORMAT), 'final date': final_date } fusion_tables.update_with_implicit_rowid(configuration['master table'], update) running_too_long(don_t_run_too_long) fusion_tables.insert_go(configuration['slave table']) logging.info("Done syncing new rows in %s master %s" % (configuration['id'], configuration['master table']))
def get(self, now=datetime.datetime.strftime(datetime.datetime.now(), DATE_TIME_FORMAT), location_slug=None): configuration = customer_configuration.get_configuration(self.request) # detect language and use configuration as default language = get_language(self.request, configuration) localization = get_localization() condition = "start >= '%s'" % now # apply commercial limit limit = customer_configuration.get_limit(self.request) if limit: condition += " AND 'start' < '%s'" % limit # query on location condition += " AND 'location slug' = '%s'" % location_slug # sort by datetime slug condition += " ORDER BY 'datetime slug'" no_results_message = '' data = fusion_tables.select(configuration['slave table'], condition=condition) if not data: no_results_message = 'Geen activiteiten voldoen aan de zoekopdracht.' condition = "'location slug' = '%s'" % location_slug # search without time filter data = fusion_tables.select_first(configuration['slave table'], condition) if not data: # TODO what if the location's events have been deleted? logging.error("No events found for location (%s)" % condition) raise webapp2.abort(404) qr_url = self.request.url url = qr_url.replace('/qr/location/','/all/location/') template = jinja_environment.get_template('qr.html') content = template.render( configuration=configuration, data=data, date_time_reformat=date_time_reformat, no_results_message=no_results_message, url=url, localization=localization[language] ) # return the web-page content self.response.out.write(content) return
def get(self): configuration = customer_configuration.get_configuration(self.request) count = fusion_tables.count(configuration['slave table']) template = jinja_environment.get_template('sitemapindexbylocation.xml') # get a list of all locations (location slug) condition = "'sequence' > 0" # apply commercial limit limit = customer_configuration.get_limit(self.request) if limit: condition += " AND 'start' < '%s'" % limit condition += " GROUP BY 'location slug'" no_results_message = '' locations = fusion_tables.select(configuration['slave table'], condition=condition, cols=['location slug']) if not locations: no_results_message = '# No results' content = template.render(configuration=configuration, locations=locations) # return the web-page content self.response.headers['Content-Type'] = "application/xml" self.response.out.write(content) return
def get_configurations(): global _configurations if not _configurations: _configurations = fusion_tables.select(CONFIGURATION_TABLE_ID) return _configurations
def sync_passed_events(configuration, condition): outdated = fusion_tables.select(configuration['slave table'], condition=condition, filter_obsolete_rows=False) logging.info("Deleting %d past events in %s slave %s" % (len(outdated), configuration['id'], configuration['slave table'])) delete_slaves(configuration['slave table'], outdated) logging.info("Done deleting past events in %s slave %s" % (configuration['id'], configuration['master table'])) running_too_long(don_t_run_too_long=True)
def get(self, location_slug=None, timeframe=None, tags=None, hashtags=None): now = self.request.get("now") if not now: now = datetime.datetime.strftime(datetime.datetime.now(), DATE_TIME_FORMAT) # fallback to server time configuration = customer_configuration.get_configuration(self.request) localization = get_localization() # detect language and use configuration as default language = get_language(self.request, configuration) # calculate midnight, midnight1 and midnight 7 based on now now_p = datetime.datetime.strptime(now, DATE_TIME_FORMAT) midnight_p = datetime.datetime.combine(now_p + datetime.timedelta(days=1), datetime.time.min) midnight1_p = datetime.datetime.combine(now_p + datetime.timedelta(days=2), datetime.time.min) midnight7_p = datetime.datetime.combine(now_p + datetime.timedelta(days=8), datetime.time.min) midnight = datetime.datetime.strftime(midnight_p, DATE_TIME_FORMAT) midnight1 = datetime.datetime.strftime(midnight1_p, DATE_TIME_FORMAT) midnight7 = datetime.datetime.strftime(midnight7_p, DATE_TIME_FORMAT) # query on timeframe if timeframe == 'now': # start < now and end > now condition = "start <= '" + now + "' and end >= '" + now + "'" elif timeframe == 'today': # end > now and start < midnight condition = "end >= '" + now + "' and start <= '" + midnight + "'" elif timeframe == 'tomorrow': # end > midnight and start < midnight + 1 day condition = "end >= '" + midnight + "' and start <= '" + midnight1 + "'" elif timeframe == 'week': # end > now and start < midnight + 7 days condition = "end >= '" + now + "' and start <= '" + midnight7 + "'" else: # 'all' and other timeframes are interpreted as 'all' # end > now condition = "end >= '" + now + "'" # apply commercial limit limit = customer_configuration.get_limit(self.request) if limit: condition += " AND 'start' < '%s'" % limit # query on tags if tags: tags_p = tags.split(',') for tag in tags_p: condition += " AND tags CONTAINS '#" + tag + "#'" # tags in the fusion table are surrounded by hash characters to avoid # confusion if one tag would be a substring of another tag # query on hashtags if hashtags: hashtags_p = hashtags.split(',') for hashtag in hashtags_p: condition += " AND hashtags CONTAINS '#" + hashtag + "#'" # query on location condition += " AND 'location slug' = '" + location_slug + "'" # sort by datetime slug condition += " ORDER BY 'datetime slug'" no_results_message = '' data = fusion_tables.select(configuration['slave table'], condition=condition) if not data: no_results_message = localization[configuration['language']]['no-results'] condition = "'location slug' = '" + location_slug + "'" # search without timeframe or tags filter data = fusion_tables.select_first(configuration['slave table'], condition=condition) if not data: # TODO what if the location's events have been deleted? # is foreseen: fallback to query on event_slug only logging.error("No events found for location (%s)" % condition) raise webapp2.abort(404) template = jinja_environment.get_template('location.html') content = template.render( configuration=configuration, data=data, date_time_reformat=date_time_reformat, no_results_message=no_results_message, localization=localization[language] ) # return the web-page content self.response.out.write(content) return
def get(self, location_slug=None, timeframe=None, tags=None, hashtags=None): now = self.request.get("now") if not now: now = datetime.datetime.strftime( datetime.datetime.now(), DATE_TIME_FORMAT) # fallback to server time configuration = customer_configuration.get_configuration(self.request) localization = get_localization() # detect language and use configuration as default language = get_language(self.request, configuration) # calculate midnight, midnight1 and midnight 7 based on now now_p = datetime.datetime.strptime(now, DATE_TIME_FORMAT) midnight_p = datetime.datetime.combine( now_p + datetime.timedelta(days=1), datetime.time.min) midnight1_p = datetime.datetime.combine( now_p + datetime.timedelta(days=2), datetime.time.min) midnight7_p = datetime.datetime.combine( now_p + datetime.timedelta(days=8), datetime.time.min) midnight = datetime.datetime.strftime(midnight_p, DATE_TIME_FORMAT) midnight1 = datetime.datetime.strftime(midnight1_p, DATE_TIME_FORMAT) midnight7 = datetime.datetime.strftime(midnight7_p, DATE_TIME_FORMAT) # query on timeframe if timeframe == 'now': # start < now and end > now condition = "start <= '" + now + "' and end >= '" + now + "'" elif timeframe == 'today': # end > now and start < midnight condition = "end >= '" + now + "' and start <= '" + midnight + "'" elif timeframe == 'tomorrow': # end > midnight and start < midnight + 1 day condition = "end >= '" + midnight + "' and start <= '" + midnight1 + "'" elif timeframe == 'week': # end > now and start < midnight + 7 days condition = "end >= '" + now + "' and start <= '" + midnight7 + "'" else: # 'all' and other timeframes are interpreted as 'all' # end > now condition = "end >= '" + now + "'" # apply commercial limit limit = customer_configuration.get_limit(self.request) if limit: condition += " AND 'start' < '%s'" % limit # query on tags if tags: tags_p = tags.split(',') for tag in tags_p: condition += " AND tags CONTAINS '#" + tag + "#'" # tags in the fusion table are surrounded by hash characters to avoid # confusion if one tag would be a substring of another tag # query on hashtags if hashtags: hashtags_p = hashtags.split(',') for hashtag in hashtags_p: condition += " AND hashtags CONTAINS '#" + hashtag + "#'" # query on location condition += " AND 'location slug' = '" + location_slug + "'" # sort by datetime slug condition += " ORDER BY 'datetime slug'" no_results_message = '' data = fusion_tables.select(configuration['slave table'], condition=condition) if not data: no_results_message = localization[ configuration['language']]['no-results'] condition = "'location slug' = '" + location_slug + "'" # search without timeframe or tags filter data = fusion_tables.select_first(configuration['slave table'], condition=condition) if not data: # TODO what if the location's events have been deleted? # is foreseen: fallback to query on event_slug only logging.error("No events found for location (%s)" % condition) raise webapp2.abort(404) template = jinja_environment.get_template('location.html') content = template.render(configuration=configuration, data=data, date_time_reformat=date_time_reformat, no_results_message=no_results_message, localization=localization[language]) # return the web-page content self.response.out.write(content) return