def post(self): logging.info(self.request.body) id = self.request.get("pk") team = Team.get_by_id(long(id)) team.category = utils.stringify(self.request.get("value")) logger.info('Id: %s Value: %s' % (id, team.category)) team.put() logger.debug('Team Data: ' + str(team)) return
def post(self): logging.info(self.request.body) id = self.request.get("pk") user = User.get_by_id(long(id)) user.name = utils.stringify(self.request.get("value")) logger.info('Id: %s Value: %s' % (id, user.name)) user.put() logger.debug('User Data: ' + str(user)) #return json.dumps(result) #or, as it is an empty json, you can simply use return "{}" return
def post(self): logging.info(self.request.body) id = self.request.get("pk") team = Team.get_by_id(long(id)) team.name = utils.stringify(self.request.get("value")) team.alias = utils.slugify(self.request.get("value")) logger.info('Id: %s Value: %s' % (id, team.name)) team.put() logger.debug('Team Data: ' + str(team)) #return json.dumps(result) #or, as it is an empty json, you can simply use return "{}" return
def post(self): logging.info(self.request.body) id = self.request.get("pk") user = User.get_by_id(long(id)) user.locality = utils.stringify(self.request.get("value")) locality_exist = self.importDao.query_by_place_name(user.locality.title()) if locality_exist: user.locality_id = locality_exist.place_id logger.info('Id: %s Value: %s' % (id, user.locality)) user.put() logger.debug('User Data: ' + str(user)) #return json.dumps(result) #or, as it is an empty json, you can simply use return "{}" return
def process_csv(self, blob_info, start_record): update = {} upload_count = 0 row_count = 0 blob_reader = blobstore.BlobReader(blob_info.key()) datareader = csv.reader(blob_reader) for row in datareader: row_count += 1 if row_count >= (start_record + 1): #to skip the header row logger.info('Starting to parse %s, %s' % (row_count, row[1])) entity_type = row[0].lower() update['name'] = utils.stringify(row[1]) # Name is mandatory for all entities if update['name'] == '': logger.error('Name is empty. Skipping this record') continue update['locality'] = utils.stringify(row[5]).lower() update['city'] = utils.stringify(row[8]).lower() #Locality and city mandatory for playground and trainingcentre if entity_type != 'event': if update['locality'] == '' or update['city'] == '': logger.error( 'Locality or city is empty. Skipping this record') continue alias_name = utils.slugify(update['name'].lower()) try: update['description'] = utils.stringify(row[2]) update['sport'] = utils.stringify(row[3]).lower() update['person_name'] = utils.stringify(row[10]) update['phone'] = utils.stringify(row[11]) update['email'] = utils.stringify(row[12]) update['website'] = utils.stringify(row[13]) update['facebook'] = utils.stringify(row[18]) update['twitter'] = utils.stringify(row[19]) update['youtube'] = utils.stringify(row[20]) update['gplus'] = utils.stringify(row[21]) update['line1'] = utils.stringify(row[6]) update['line2'] = utils.stringify(row[7]) update['pin'] = int( row[9].strip()) if row[9] != '' else None #update['start_datetime'] = row[22] #update['end_datetime'] = row[23] logger.debug('Constructed Structure for upload ' + str(update)) logger.info('Entity type to be created, ' + entity_type) if entity_type == 'ground': import_data = self.form_to_dao_ground( alias_name, **update) elif entity_type == 'club': import_data = self.form_to_dao_center( alias_name, **update) elif entity_type == 'event': import_data = self.form_to_dao_event( alias_name, **update) # for add locality table if import_data.address.locality != '': place_name = import_data.address.locality logger.info('Place: %s' % place_name) newfeed_url = 'https://maps.googleapis.com/maps/api/place/autocomplete/xml?types=(regions)&input=' + urllib.quote( place_name) + '&key=' + constants.PLACES_API_KEY logging.info('newfeed url %s' % newfeed_url) newroot = self.parse(newfeed_url) auto_status = newroot.getElementsByTagName( 'status')[0].firstChild.data logger.info('Auto Status: %s ' % auto_status) if auto_status == 'OK': items = newroot.getElementsByTagName( 'prediction')[0] place_id = items.getElementsByTagName( 'place_id')[0].firstChild.data place_name = items.getElementsByTagName( 'value')[0].firstChild.data #description logger.info('Place Name: %s Place Id: %s ' % (place_name, place_id)) import_data.address.locality_id = place_id logger.info('Locality Id: %s ' % import_data.address.locality_id) locality_add = self.process_locality( place_name, place_id, constants.PLACES_API_KEY) #if import_data.address.latlong == '': #locality = self.importDao.query_by_place_id(place_id) #import_data.address.latlong = locality.latlong #logger.info('Geo Location New: %s ' % import_data.address.latlong) else: logger.error('Error: %s' % auto_status) logger.debug('Populated File Data ' + str(import_data)) business_key = self.create_or_update_business( alias_name, import_data) import_data.business_id = business_key if entity_type == 'ground': ground = self.playgroundDao.query_by_alias(alias_name) if ground is not None: self.playgroundDao.copy_playground_model( ground, import_data) key = self.playgroundDao.persist( ground, self.user_info) upload_count += 1 logger.info( 'Playground updated for %s with key %s' % (alias_name, key)) else: key = self.playgroundDao.persist( import_data, self.user_info) upload_count += 1 logger.info('New playground created for %s' % (update['name'])) elif entity_type == 'club': tc = self.trainingCentreDao.query_by_alias(alias_name) if tc is not None: self.trainingCentreDao.copy_trainingCentre_model( tc, import_data) key = self.trainingCentreDao.persist( tc, self.user_info) upload_count += 1 logger.info( 'TrainingCentre updated for %s with key %s' % (alias_name, key)) else: key = self.trainingCentreDao.persist( import_data, self.user_info) upload_count += 1 logger.info('New training centre created for %s' % (update['name'])) elif entity_type == 'event': event = self.eventDao.query_by_alias(alias_name) if event is not None: self.eventDao.copy_event_model(event, import_data) key = self.eventDao.persist(event, self.user_info) upload_count += 1 logger.info('Event updated for %s with key %s' % (alias_name, key)) else: key = self.eventDao.persist( import_data, self.user_info) upload_count += 1 logger.info('Event created for %s' % (update['name'])) if key is not None: logger.info( str(entity_type) + ' succesfully created/updated') else: logger.error('Already Exist of %s:%s' % (entity_type, update['name'])) except StandardError as e: #skipping to next record logger.error('Error occured, %s, for %s' % (str(e), alias_name)) else: logger.info("skipping record number, %s " % row_count) return upload_count
def process_csv(self, blob_info, start_record, event_id=None): update = {} upload_count = 0 row_count = 0 blob_reader = blobstore.BlobReader(blob_info.key()) datareader = csv.reader(blob_reader) for row in datareader: row_count += 1 if row_count >= (start_record + 1): #to skip the header row logger.info('Starting to parse %s, %s' % (row_count, row[1])) entity_type = row[0].lower() update['name'] = utils.stringify(row[1]) # Name is mandatory for all entities if update['name'] == '': logger.error('Name is empty. Skipping this record') continue alias_name = utils.slugify(update['name'].lower()) # Event Id check for matches if event_id is not None and len(event_id) > 1: event_data = self.eventDao.get_record(event_id) event_alias_name = event_data.alias else: event_alias_name = '' try: update['sport'] = utils.stringify(row[2]).lower() if entity_type == 'match': update['start_datetime'] = datetime.strptime( row[3], '%d-%m-%Y %I:%M%p') update['end_datetime'] = datetime.strptime( row[4], '%d-%m-%Y %I:%M%p') update['result'] = utils.stringify(row[5]) event_alias_name = utils.slugify(row[6].lower( )) if row[6] != '' else event_alias_name update['participant_type'] = utils.stringify( row[7]).lower() elif entity_type == 'player': update['email'] = utils.stringify(row[3]) update['phone'] = utils.stringify(row[4]) update['teamName'] = utils.stringify(row[5]) team_alias_name = utils.slugify( update['teamName'].lower()) logger.debug('Constructed Structure for upload ' + str(update)) logger.info('Entity type to be created, ' + entity_type) if entity_type == 'match': import_data = self.form_to_dao_match( alias_name, **update) elif entity_type == 'team': import_data = self.form_to_dao_team( alias_name, **update) elif entity_type == 'player': import_data = self.form_to_dao_player( alias_name, **update) logger.debug('Populated File Data ' + str(import_data)) if entity_type == 'match': event = self.eventDao.query_by_alias(event_alias_name) if event is not None: match_exist = self.matchDao.query_by_alias( alias_name, event.key, update['sport']) if match_exist is None: import_data.event_id = event.key key = self.matchDao.persist( import_data, self.user_info) upload_count += 1 logger.info( 'New Match Created for %s with key %s' % (alias_name, key)) else: logger.error('Already Exist of %s:%s' % (entity_type, update['name'])) else: logger.error('Event Name %s doesnot exist' % (event_alias_name)) elif entity_type == 'team': team_exist = self.teamDao.query_by_alias( alias_name, update['sport']) logger.info('Team Exist Data: ' + str(team_exist)) if team_exist is None: key = self.teamDao.persist(import_data, self.user_info) upload_count += 1 logger.info('New Team Created for %s with key %s' % (alias_name, key)) else: logger.error('Already Exist of %s:%s' % (entity_type, update['name'])) elif entity_type == 'player': player_exist = self.playerDao.query_by_email( update['email']) logger.info('Player Exist Data: ' + str(player_exist)) if player_exist is None: team_exist = self.teamDao.query_by_team_alias( team_alias_name, self.user_info) logger.info('Exist Team for player: ' + str(team_exist)) if team_exist is None: team_import_data = self.form_to_dao_team_auto( team_alias_name, **update) team_key = self.teamDao.persist( team_import_data, self.user_info) logger.info( 'New Team Created for %s with key %s' % (team_alias_name, team_key)) import_data.teams = team_key else: import_data.teams = team_exist.key key = self.playerDao.persist( import_data, self.user_info) upload_count += 1 logger.info( 'New Player Created for %s with key %s' % (alias_name, key)) else: logger.error('Already Exist of %s:%s' % (entity_type, update['name'])) if key is not None: logger.info( str(entity_type) + ' succesfully created/updated') else: logger.error('Already Exist of %s:%s' % (entity_type, update['name'])) except StandardError as e: #skipping to next record logger.error('Error occured, %s, for %s' % (str(e), alias_name)) else: logger.info("skipping record number, %s " % row_count) return upload_count