def test_child_record_parent_table_is_updated_when_sheet_is_renamed(self): survey = create_survey_from_xls( _logger_fixture_path('childrens_survey_with_a_very_long_name.xls')) export_builder = ExportBuilder() export_builder.set_survey(survey) xls_file = NamedTemporaryFile(suffix='.xlsx') filename = xls_file.name export_builder.to_xls_export(filename, self.long_survey_data) xls_file.seek(0) wb = load_workbook(filename) # get the children's sheet ws1 = wb.get_sheet_by_name('childrens_survey_with_a_very_l1') # parent_table is in cell K2 parent_table_name = ws1.cell('K2').value expected_parent_table_name = 'childrens_survey_with_a_very_lo' self.assertEqual(parent_table_name, expected_parent_table_name) # get cartoons sheet ws2 = wb.get_sheet_by_name('childrens_survey_with_a_very_l2') parent_table_name = ws2.cell('G2').value expected_parent_table_name = 'childrens_survey_with_a_very_l1' self.assertEqual(parent_table_name, expected_parent_table_name) xls_file.close()
def test_to_xls_export_respects_custom_field_delimiter(self): survey = self._create_childrens_survey() export_builder = ExportBuilder() export_builder.GROUP_DELIMITER = ExportBuilder.GROUP_DELIMITER_DOT export_builder.set_survey(survey) xls_file = NamedTemporaryFile(suffix='.xls') filename = xls_file.name export_builder.to_xls_export(filename, self.data) xls_file.seek(0) wb = load_workbook(filename) # check header columns main_sheet = wb.get_sheet_by_name('childrens_survey') expected_column_headers = [ u'name', u'age', u'geo.geolocation', u'geo._geolocation_latitude', u'geo._geolocation_longitude', u'geo._geolocation_altitude', u'geo._geolocation_precision', u'tel.tel.office', u'tel.tel.mobile', u'_id', u'meta.instanceID', u'_uuid', u'_submission_time', u'_index', u'_parent_index', u'_parent_table_name', u'_tags', '_notes', '_version', '_duration', '_submitted_by'] column_headers = [c[0].value for c in main_sheet.columns] self.assertEqual(sorted(column_headers), sorted(expected_column_headers)) xls_file.close()
def get_from_storage(target, alias): if target._meta.model_name == u'imagerevision': url = target.thumbnail(alias, sync=True) else: url = target.thumbnail(alias, '0', sync=True) if url is None or "placeholder" in url: raise ThumbnailNotReadyException if settings.MEDIA_URL not in url: media_url = settings.MEDIA_URL if media_url.endswith('/'): media_url = media_url[:-1] last_part_of_media = media_url.rsplit('/', 1)[-1] first_part_of_url = url.strip('/').split('/')[0] if (last_part_of_media == first_part_of_url): media_url = media_url.strip(last_part_of_media).strip('/') url = media_url + url r = requests.get(url, verify=False, allow_redirects=True, headers={'User-Agent': 'Mozilla/5.0'}) img = NamedTemporaryFile(delete=True) img.write(r.content) img.flush() img.seek(0) return File(img)
def data_with_facebook(self, access_token): # TODO apply facebook v2 api. profile = get_object('me', fields="id,name,bio,birthday,locale,timezone,gender,email,picture", access_token=access_token) fb_id = profile['id'] bio = profile.get('bio', '') photo_url = "http://graph.facebook.com/%s/picture?width=400&height=400" % fb_id \ if not profile['picture']['data']['is_silhouette'] else None if photo_url: img_temp = NamedTemporaryFile() img_temp.write(urlopen(photo_url).read()) img_temp.flush() img_temp.seek(0) photo = File(img_temp) else: photo = None name = profile['name'] email = profile.get('email', '#' + fb_id + '@inva.lid') timezone = profile.get('timezone', 0) dob = datetime.datetime.strptime(profile['birthday'], "%m/%d/%Y") if 'birthday' in profile else None locale = profile.get('locale', 'en')[:2] gender = profile.get('gender', 'U') if gender == 'male': gender = 'M' elif gender == 'female': gender = 'F' data = {'name': name, 'dob': dob, 'locale': locale, 'email': email, 'timezone_offset': timezone, 'gender': gender, 'bio': bio, 'fb_id': fb_id} if photo: files = {'photo': photo} else: files = None return data, files
def download_thumbnail(url, instance, field_name): """ Downloads a thumbnail and stores it in the given instance field. Returns final storage path. """ url = iri_to_uri(url) response = requests.get(url, stream=True) if response.status_code != 200: raise Exception temp = NamedTemporaryFile() # May raise IOError. temp.write(response.raw.read()) temp.seek(0) # May raise various Exceptions. im = Image.open(temp) im.verify() ext = os.path.splitext(urlparse.urlsplit(url).path)[1] args = (url, now().isoformat()) filename = ''.join((make_hash(*args, step=2), ext)) f = instance._meta.get_field(field_name) storage_path = f.generate_filename(instance, filename) return default_storage.save(storage_path, File(temp))
def getPGSQL(self, projection="EPSG:4326"): # Create temporary named file of GeoJSON output tmp_json_file = NamedTemporaryFile(mode='w+', delete=True, suffix='.geo.json') tmp_json_file.write(self.getGeoJSON(projection)) tmp_json_file.seek(0) # Create temporary named file for new pgsql output # we're using NamedTemporaryFile to create the unique filename... tmp_sql_file = NamedTemporaryFile(mode='w+', delete=True, suffix='.sql') tmp_sql_file_name = tmp_sql_file.name tmp_sql_file.close() # ... But we let ogr2ogr actually create the file and manually delete later. # call ogr2ogr to populate .sql file os.system( 'ogr2ogr -f "PGDUMP" %s %s -lco SRID=4326 -lco SCHEMA=public -lco EXTRACT_SCHEMA_FROM_LAYER_NAME="NO"' % (tmp_sql_file_name, tmp_json_file.name)) # get sql string from .sql file tmp_sql_file = open(tmp_sql_file_name) sql_str = tmp_sql_file.read() # close all temporary files tmp_json_file.close() # tmp_sql_file.close() os.remove(tmp_sql_file_name) # return sql string. return sql_str
def upload_fb(request): """Uploads the user's picture from Facebook.""" if request.method == "POST": form = FacebookPictureForm(request.POST) if form.is_valid(): # Need to download the image from the url and save it. photo_temp = NamedTemporaryFile(delete=True) fb_url = form.cleaned_data["facebook_photo"] photo_temp.write(urllib2.urlopen(fb_url).read()) photo_temp.flush() photo_temp.seek(0) # Delete old avatars if they exist avatars = Avatar.objects.filter(user=request.user) for avatar in avatars: avatar.avatar.storage.delete(avatar.avatar.name) avatar.avatar.delete() avatar.delete() path = avatar_file_path(user=request.user, filename="fb_photo.jpg") avatar = Avatar( user=request.user, primary=True, avatar=path, ) # print "saving facebook photo to " + path avatar.avatar.storage.save(path, File(photo_temp)) avatar.save() return HttpResponseRedirect( reverse("profile_index") + "?changed_avatar=True") raise Http404
def test_child_record_parent_table_is_updated_when_sheet_is_renamed(self): survey = create_survey_from_xls(_logger_fixture_path( 'childrens_survey_with_a_very_long_name.xls')) export_builder = ExportBuilder() export_builder.set_survey(survey) xls_file = NamedTemporaryFile(suffix='.xlsx') filename = xls_file.name export_builder.to_xls_export(filename, self.long_survey_data) xls_file.seek(0) wb = load_workbook(filename) # get the children's sheet ws1 = wb.get_sheet_by_name('childrens_survey_with_a_very_l1') # parent_table is in cell K2 parent_table_name = ws1.cell('K2').value expected_parent_table_name = 'childrens_survey_with_a_very_lo' self.assertEqual(parent_table_name, expected_parent_table_name) # get cartoons sheet ws2 = wb.get_sheet_by_name('childrens_survey_with_a_very_l2') parent_table_name = ws2.cell('G2').value expected_parent_table_name = 'childrens_survey_with_a_very_l1' self.assertEqual(parent_table_name, expected_parent_table_name) xls_file.close()
def test_to_xls_export_respects_custom_field_delimiter(self): survey = self._create_childrens_survey() export_builder = ExportBuilder() export_builder.GROUP_DELIMITER = ExportBuilder.GROUP_DELIMITER_DOT export_builder.set_survey(survey) xls_file = NamedTemporaryFile(suffix='.xls') filename = xls_file.name export_builder.to_xls_export(filename, self.data) xls_file.seek(0) wb = load_workbook(filename) # check header columns main_sheet = wb.get_sheet_by_name('childrens_survey') expected_column_headers = [ u'name', u'age', u'geo.geolocation', u'geo._geolocation_latitude', u'geo._geolocation_longitude', u'geo._geolocation_altitude', u'geo._geolocation_precision', u'tel.tel.office', u'tel.tel.mobile', u'_id', u'meta.instanceID', u'_uuid', u'_submission_time', u'_index', u'_parent_index', u'_parent_table_name', u'_tags', '_notes', '_version', '_duration', '_submitted_by' ] column_headers = [c[0].value for c in main_sheet.columns] self.assertEqual(sorted(column_headers), sorted(expected_column_headers)) xls_file.close()
def get_from_storage(image, alias, revision_label): url = image.thumbnail(alias, { 'sync': True, 'revision_label': revision_label }) if "placeholder" in url: raise ThumbnailNotReadyException if settings.MEDIA_URL not in url: media_url = settings.MEDIA_URL if media_url.endswith('/'): media_url = media_url[:-1] last_part_of_media = media_url.rsplit('/', 1)[-1] first_part_of_url = url.strip('/').split('/')[0] if (last_part_of_media == first_part_of_url): media_url = media_url.strip(last_part_of_media).strip('/') url = media_url + url r = requests.get(url, verify=False, allow_redirects=True, headers={'User-Agent': 'Mozilla/5.0'}) img = NamedTemporaryFile(delete=True) img.write(r.content) img.flush() img.seek(0) return File(img)
def saveProject(self, args): print u'%s' % args try: obj = Project.objects.get(project_name = args['name'],url = args['url']) return None except Project.DoesNotExist: obj = Project( submitted_event = self.event, project_name = args['name'], short_description = args['desc'], url = args['url'], project_type = args['type'] ) img_temp = NamedTemporaryFile(delete=True) if args['image']: img_temp.write(urllib2.urlopen(args['image']).read()) img_temp.flush() img_temp.seek(0) img_filepath = urlparse(args['image']).path.split('/')[-1] obj.image.save(img_filepath, File(img_temp)) obj.save() else : obj.save() return obj
def post(self, request, *args, **kwargs): solution = get_object_or_404(Solution, pk=kwargs.pop('pk')) solver = Solver() status = solver.status(solution.submission_id) if status == Solver.SUCCESS: info = solver.info(solution.submission_id) solution.objects_in_field = ', '.join(info['objects_in_field']) solution.ra = "%.3f" % info['calibration']['ra'] solution.dec = "%.3f" % info['calibration']['dec'] solution.orientation = "%.3f" % info['calibration']['orientation'] solution.radius = "%.3f" % info['calibration']['radius'] solution.pixscale = "%.3f" % corrected_pixscale(solution, info['calibration']['pixscale']) try: target = solution.content_type.get_object_for_this_type(pk=solution.object_id) except solution.content_type.model_class().DoesNotExist: # Target image was deleted meanwhile context = {'status': Solver.FAILED} return HttpResponse(simplejson.dumps(context), content_type='application/json') # Annotate image annotations_obj = solver.annotations(solution.submission_id) solution.annotations = simplejson.dumps(annotations_obj) annotator = Annotator(solution) try: annotated_image = annotator.annotate() except ThumbnailNotReadyException: solution.status = Solver.PENDING solution.save() context = {'status': solution.status} return HttpResponse(simplejson.dumps(context), content_type='application/json') filename, ext = os.path.splitext(target.image_file.name) annotated_filename = "%s-%d%s" % (filename, int(time.time()), ext) if annotated_image: solution.image_file.save(annotated_filename, annotated_image) # Get sky plot image url = solver.sky_plot_zoom1_image_url(solution.submission_id) if url: img = NamedTemporaryFile(delete=True) img.write(urllib2.urlopen(url).read()) img.flush() img.seek(0) f = File(img) try: solution.skyplot_zoom1.save(target.image_file.name, f) except IntegrityError: pass solution.status = status solution.save() context = {'status': solution.status} return HttpResponse(simplejson.dumps(context), content_type='application/json')
def opentmp(self): tmpfile = NamedTemporaryFile(mode="w+b", suffix=os.path.splitext(self._name)[1]) self.key.open() for chunk in self.key: tmpfile.write(chunk) tmpfile.seek(0) self.tmpfile = tmpfile self.file = tmpfile.file
def export_to_temp_file(self, encoding='utf-8'): """ Export all shop inventory to a named temporary file and return the file handler of the temporary file created. """ f = NamedTemporaryFile() self.export_to_stream(f, encoding) f.seek(0) return f
def generate_osm_export(export_type, username, id_string, export_id=None, options=None, xform=None): """ Generates osm export for OpenStreetMap data param: export_type params: username: logged in username params: id_string: xform id_string params: export_id: ID of export object associated with the request param: options: additional parameters required for the lookup. ext: File extension of the generated export """ extension = options.get("extension", export_type) if xform is None: xform = XForm.objects.get(user__username=username, id_string=id_string) osm_list = OsmData.objects.filter(instance__xform=xform, instance__deleted_at__isnull=True) content = get_combined_osm(osm_list) basename = "%s_%s" % (id_string, datetime.now().strftime("%Y_%m_%d_%H_%M_%S")) filename = basename + "." + extension file_path = os.path.join( username, 'exports', id_string, export_type, filename) storage = get_storage_class()() temp_file = NamedTemporaryFile(suffix=extension) temp_file.write(content) temp_file.seek(0) export_filename = storage.save( file_path, File(temp_file, file_path)) temp_file.close() dir_name, basename = os.path.split(export_filename) # get or create export object if export_id and Export.objects.filter(pk=export_id).exists(): export = Export.objects.get(id=export_id) else: export_options = get_export_options(options) export = Export.objects.create(xform=xform, export_type=export_type, options=export_options) export.filedir = dir_name export.filename = basename export.internal_status = Export.SUCCESSFUL export.save() return export
def generate_export(export_type, extension, username, id_string, export_id = None, filter_query=None, group_delimiter='/', split_select_multiples=True): """ Create appropriate export object given the export type """ from odk_viewer.models import Export xform = XForm.objects.get(user__username=username, id_string=id_string) df_builder = _df_builder_for_export_type( export_type, username, id_string, group_delimiter, split_select_multiples, filter_query) if hasattr(df_builder, 'get_exceeds_xls_limits')\ and df_builder.get_exceeds_xls_limits(): extension = 'xlsx' temp_file = NamedTemporaryFile(suffix=("." + extension)) df_builder.export_to(temp_file.name) basename = "%s_%s" % (id_string, datetime.now().strftime("%Y_%m_%d_%H_%M_%S")) filename = basename + "." + extension # check filename is unique while not Export.is_filename_unique(xform, filename): filename = increment_index_in_filename(filename) file_path = os.path.join( username, 'exports', id_string, export_type, filename) # TODO: if s3 storage, make private - how will we protect local storage?? storage = get_storage_class()() # seek to the beginning as required by storage classes temp_file.seek(0) export_filename = storage.save( file_path, File(temp_file, file_path)) temp_file.close() dir_name, basename = os.path.split(export_filename) # get or create export object if(export_id): export = Export.objects.get(id=export_id) else: export = Export(xform=xform, export_type=export_type) export.filedir = dir_name export.filename = basename export.internal_status = Export.SUCCESSFUL # dont persist exports that have a filter if filter_query == None: export.save() return export
def export_to_tmp(self, objects): """ Export given set of objects to a temporary file and return a handle to that file. """ f = NamedTemporaryFile() self.export_to_stream(objects, f) f.seek(0) return f
def generate_kml_export(export_type, username, id_string, export_id=None, options=None, xform=None): """ Generates kml export for geographical data param: export_type params: username: logged in username params: id_string: xform id_string params: export_id: ID of export object associated with the request param: options: additional parameters required for the lookup. ext: File extension of the generated export """ extension = options.get("extension", export_type) user = User.objects.get(username=username) if xform is None: xform = XForm.objects.get(user__username=username, id_string=id_string) response = render_to_response( 'survey.kml', {'data': kml_export_data(id_string, user, xform=xform)}) basename = "%s_%s" % (id_string, datetime.now().strftime("%Y_%m_%d_%H_%M_%S")) filename = basename + "." + extension file_path = os.path.join( username, 'exports', id_string, export_type, filename) storage = get_storage_class()() temp_file = NamedTemporaryFile(suffix=extension) temp_file.write(response.content) temp_file.seek(0) export_filename = storage.save( file_path, File(temp_file, file_path)) temp_file.close() dir_name, basename = os.path.split(export_filename) # get or create export object if export_id and Export.objects.filter(pk=export_id).exists(): export = Export.objects.get(id=export_id) else: export_options = get_export_options(options) export = Export.objects.create(xform=xform, export_type=export_type, options=export_options) export.filedir = dir_name export.filename = basename export.internal_status = Export.SUCCESSFUL export.save() return export
def _create_resized_images(self, raw_field, save): """Generate scaled down images for avatars.""" if not self.avatar_image: return None # Derive base filename (strip out the relative directory). filename = os.path.split(self.avatar_image.name)[-1] ctype = guess_type(filename)[0] ext = os.path.splitext(filename)[1] if not ext: ext = '.jpg' t = None try: try: pth = self.avatar_image.path except NotImplementedError: from django.core.files.temp import NamedTemporaryFile t = NamedTemporaryFile(suffix=ext) ix = self.avatar_image for d in ix.chunks(4000000): t.write(d) t.flush() t.seek(0) pth = t # Generate avatar. remove_model_image(self, 'avatar') self.avatar = None avatar_contents = resize_in_memory(pth, settings.AVATAR_IMAGE_CROP, crop=settings.AVATAR_IMAGE_CROP, crop_before_resize=True) if avatar_contents: avatar_file = str_to_file(avatar_contents) avatar_field = InMemoryUploadedFile(avatar_file, None, None, ctype, len(avatar_contents), None) self.avatar.save(name='avatar-%s' % filename, content=avatar_field, save=save) avatar_file.close() # Generate medium-sized avatar. remove_model_image(self, 'avatar_medium') self.avatar_medium = None if t: t.seek(0) avatar_contents = resize_in_memory(pth, settings.AVATAR_MEDIUM_IMAGE_CROP, crop=settings.AVATAR_MEDIUM_IMAGE_CROP, crop_before_resize=True) if avatar_contents: avatar_file = str_to_file(avatar_contents) avatar_field = InMemoryUploadedFile(avatar_file, None, None, ctype, len(avatar_contents), None) self.avatar_medium.save(name='avatar-med-%s' % filename, content=avatar_field, save=save) avatar_file.close() if t: t.close() if save: super(UserProfile, self).save() except Exception: raise finally: if t: t.close()
def download_avatar(self, url): """ """ r = requests.get(url) img_temp = NamedTemporaryFile(delete=True) img_temp.write(r.content) img_temp.flush() img_temp.seek(0) return File(img_temp)
def create_random_image(self, width=800, height=600): color = ( random.randint(0, 255), random.randint(0, 255), random.randint(0, 255)) image = PIL.Image.new('RGB', (width, height), color) temp = NamedTemporaryFile(suffix='.png') image.save(temp, 'png') temp.seek(0) return temp
def _sync_changed_files(sync_key, last_commit_hash, sync_url, project_dir): if not os.path.exists(COMMIT_CACHE_FILEPATH): open(COMMIT_CACHE_FILEPATH, 'w').close() commit_cache_file = open(COMMIT_CACHE_FILEPATH, 'r+') fd = commit_cache_file.fileno() temp_file = None try: fcntl.flock(fd, fcntl.LOCK_EX | fcntl.LOCK_NB) except IOError as e: print "other process is already running the sync:\n%s" % repr(e) else: last_commit_hash_from_cache = commit_cache_file.read() if last_commit_hash_from_cache: last_commit_hash = last_commit_hash_from_cache temp_file = NamedTemporaryFile(prefix='sync_changed_files', suffix='.tar.gz') signer = URLSafeTimedSerializer(sync_key) signed_data = signer.dumps(last_commit_hash) data = {'last_commit_hash': signed_data} response = requests.post(sync_url, data=data, stream=True) if response.ok: data_signature = hmac.new(key=str(sync_key), digestmod=hashlib.sha1) for chunk in response.iter_content(chunk_size=CHUNK_SIZE): data_signature.update(chunk) temp_file.write(chunk) temp_file.seek(0) data_signature = data_signature.hexdigest() header_signature = response.headers.get('aldryn-sync-signature') if not constant_time_compare(header_signature, data_signature): # TODO log failed attempt to corrupt the website's data raise RuntimeError( 'Sync signatures does not match:\ndata:\t%s\nheader:\t%s' % (data_signature, header_signature)) tarball = tarfile.open(mode='r:gz', fileobj=temp_file) for member in tarball.getmembers(): path = member.name if path.startswith(('static/', 'templates/')): full_path = os.path.join(project_dir, path) directory = os.path.dirname(full_path) if not os.path.exists(directory): os.makedirs(directory) tarball.extract(member, project_dir) tarball.close() # Successfully synced files, storing the newest commit hash current_commit_hash = response.headers.get( 'aldryn-sync-current-commit', last_commit_hash) commit_cache_file.seek(0) commit_cache_file.truncate() commit_cache_file.write(current_commit_hash) else: response.raise_for_status() finally: commit_cache_file.close() if temp_file: temp_file.close()
def view(self, request): def filename(title): return re.sub('[^A-Za-z0-9_. ]+', '-', title)[:32] def write(output, image, index, title, prefix=None): if image: output.write(image, ('%s%s %s%s' % ( os.path.join(prefix, '') if prefix else '', str(index + 1).zfill(4), filename(title), os.path.splitext(image)[1]) ).encode('ascii', 'replace')) def metadata_file(tempfile, record): t = Template("{% load data %}{% metadata record %}") c = Context({'record': record, 'request': request}) tempfile.write(t.render(c)) tempfile.flush() return tempfile.name presentation = self.obj passwords = request.session.get('passwords', dict()) items = presentation.items.filter(hidden=False) tempfile = NamedTemporaryFile(suffix='.zip') output = zipfile.ZipFile(tempfile, 'w') tempjsonfile = NamedTemporaryFile(suffix='.json') metadata = presentation_detail(request, presentation.id) tempjsonfile.write(metadata.content) tempjsonfile.flush() output.write(tempjsonfile.name, os.path.join('metadata', 'metadata.json')) for index, item in enumerate(items): write(output, get_image_for_record(item.record, self.user, passwords=passwords), index, item.record.title) write(output, get_image_for_record(item.record, self.user, 100, 100, passwords), index, item.record.title, 'thumb') tempmetadatafile = NamedTemporaryFile(suffix='.html') write(output, metadata_file(tempmetadatafile, item.record), index, item.record.title, 'metadata') output.close() tempfile.flush() tempfile.seek(0) wrapper = FileWrapper(tempfile) response = HttpResponse(wrapper, mimetype='application/zip') response['Content-Disposition'] = 'attachment; filename=%s.zip' % filename(presentation.title) response['Content-Length'] = os.path.getsize(tempfile.name) return response
def init_db(self): files = DumpFile.get_last_of_type(DumpFile.TYPE_SQL, node=None) self.assertTrue(files) sql_dump_file = files[0] f = NamedTemporaryFile(delete=False) f.write(sql_dump_file.file.read()) f.seek(0) f.close() proxy.initialize(peewee.SqliteDatabase(f.name)) proxy.create_tables([Metadatos], safe=True)
def view(self, request): def filename(title): return re.sub('[^A-Za-z0-9_. ]+', '-', title)[:32] def write(output, image, index, title, prefix=None): if image: output.write(image, ('%s%s %s%s' % ( os.path.join(prefix, '') if prefix else '', str(index + 1).zfill(4), filename(title or 'Slide %s' % (index + 1)), os.path.splitext(image)[1]) ).encode('ascii', 'replace')) def metadata_file(tempfile, record): t = Template("{% load data %}{% metadata record %}") c = Context({'record': record, 'request': request}) tempfile.write(smart_str(t.render(c))) tempfile.flush() return tempfile.name presentation = self.obj passwords = request.session.get('passwords', dict()) items = presentation.items.filter(hidden=False) tempfile = NamedTemporaryFile(suffix='.zip') output = zipfile.ZipFile(tempfile, 'w') tempjsonfile = NamedTemporaryFile(suffix='.json') metadata = presentation_detail(request, presentation.id) tempjsonfile.write(metadata.content) tempjsonfile.flush() output.write(tempjsonfile.name, os.path.join('metadata', 'metadata.json')) for index, item in enumerate(items): write(output, get_image_for_record(item.record, self.user, passwords=passwords), index, item.record.title) write(output, get_image_for_record(item.record, self.user, 100, 100, passwords), index, item.record.title, 'thumb') tempmetadatafile = NamedTemporaryFile(suffix='.html') write(output, metadata_file(tempmetadatafile, item.record), index, item.record.title, 'metadata') output.close() tempfile.flush() tempfile.seek(0) wrapper = FileWrapper(tempfile) response = HttpResponse(wrapper, content_type='application/zip') response['Content-Disposition'] = 'attachment; filename=%s.zip' % filename(presentation.title) response['Content-Length'] = os.path.getsize(tempfile.name) return response
def test_zipped_csv_export_works_with_unicode(self): """ cvs writer doesnt handle unicode we we have to encode to ascii """ survey = create_survey_from_xls(_logger_fixture_path( 'childrens_survey_unicode.xls')) export_builder = ExportBuilder() export_builder.set_survey(survey) temp_zip_file = NamedTemporaryFile(suffix='.zip') export_builder.to_zipped_csv(temp_zip_file.name, self.data_utf8) temp_zip_file.seek(0) temp_dir = tempfile.mkdtemp() zip_file = zipfile.ZipFile(temp_zip_file.name, "r") zip_file.extractall(temp_dir) zip_file.close() temp_zip_file.close() # check that the children's file (which has the unicode header) exists self.assertTrue( os.path.exists( os.path.join(temp_dir, "children.info.csv"))) # check file's contents with open(os.path.join(temp_dir, "children.info.csv")) as csv_file: reader = csv.reader(csv_file) expected_headers = ['children.info/name.first', 'children.info/age', 'children.info/fav_colors', u'children.info/fav_colors/red\u2019s', u'children.info/fav_colors/blue\u2019s', u'children.info/fav_colors/pink\u2019s', 'children.info/ice_creams', 'children.info/ice_creams/vanilla', 'children.info/ice_creams/strawberry', 'children.info/ice_creams/chocolate', '_id', '_uuid', '_submission_time', '_index', '_parent_table_name', '_parent_index', u'_tags', '_notes', '_version', '_duration', '_submitted_by'] rows = [row for row in reader] actual_headers = [h.decode('utf-8') for h in rows[0]] self.assertEqual(sorted(actual_headers), sorted(expected_headers)) data = dict(zip(rows[0], rows[1])) self.assertEqual( data[u'children.info/fav_colors/red\u2019s'.encode('utf-8')], 'True') self.assertEqual( data[u'children.info/fav_colors/blue\u2019s'.encode('utf-8')], 'True') self.assertEqual( data[u'children.info/fav_colors/pink\u2019s'.encode('utf-8')], 'False') # check that red and blue are set to true shutil.rmtree(temp_dir)
def file_from_response(response): if response.status_code == 204: # No content return None, None, None path = urlsplit(response.url).path name = urlunquote(os.path.basename(path)) content_type = response.headers['content-type'] # create temp file with the downloaded data temp_file = NamedTemporaryFile() temp_file.write(response.content) temp_file.size = len(response.content) temp_file.flush() temp_file.seek(0) return name, temp_file, content_type
def _save_thumbnail(self, uuid, thumbnail): # TODO: does it need to be written to temp file? img_temp = NamedTemporaryFile(delete=True) img_temp.write(thumbnail) img_temp.seek(0) img_temp.flush() thumbnail_field = self.model._meta.get_field(self.thumbnail_field) filename = thumbnail_field.upload_to + uuid + '.png' thumbnail_field.storage.save(filename, File(img_temp)) return thumbnail_field.storage.url(filename)
def _save_thumbnail(self, uuid, thumbnail): # TODO: does it need to be written to temp file? img_temp = NamedTemporaryFile(delete=True) img_temp.write(thumbnail) img_temp.seek(0) img_temp.flush() thumbnail_field = self.model._meta.get_field(self.thumbnail_field) filename = thumbnail_field.upload_to + uuid thumbnail_field.storage.save(filename, File(img_temp)) return thumbnail_field.storage.url(filename)
def create_photo_versions(sender, instance, **kwargs): """Create `PhotoVersion`` objects for the photo object defined by `instance`. A version is created for a bounding box defined by each PhotoSize instance. """ from photo.models import Photo, PhotoSize, PhotoVersion photo = instance ext = '.jpg' t = None try: pth = photo.image.path except NotImplementedError: from django.core.files.temp import NamedTemporaryFile t = NamedTemporaryFile(suffix=ext) ix = photo.image if ix.closed: # Reload from DB photo = Photo.objects.get(pk=photo.pk) ix = photo.image for d in ix.chunks(4000000): t.write(d) t.flush() t.seek(0) pth = t for size in PhotoSize.objects.all(): # Create a suitable filename. filename = '%s-%s-%s%s' % (photo.pk, uuid4().hex[::7], slugify(size.name)[:10], ext) ctype = guess_type(filename)[0] temp_file = TemporaryUploadedFile(name=filename, content_type=ctype, size=0, charset=None) if t: t.seek(0) try: version = PhotoVersion.objects.get(photo=photo, size=size) remove_model_image(version, 'image') version.image = None except PhotoVersion.DoesNotExist: version = PhotoVersion(photo=photo, size=size) if size.do_crop: resize_to, crop_box, input_image = get_perfect_fit_resize_crop(size.bounding_box, (photo.width, photo.height)) else: resize_to = size.bounding_box crop_box = None # Resize to a temporary location. resize(pth, resize_to, out_file_path=temp_file, crop=crop_box) # Save resized copy to `version` instance. temp_file.seek(0) # Prepare file for a re-read. version.image.save(name=filename, content=temp_file, save=True) temp_file.close() if t: t.close()
def test_zipped_csv_export_works_with_unicode(self): """ cvs writer doesnt handle unicode we we have to encode to ascii """ survey = create_survey_from_xls(_logger_fixture_path( 'childrens_survey_unicode.xls')) export_builder = ExportBuilder() export_builder.set_survey(survey) temp_zip_file = NamedTemporaryFile(suffix='.zip') export_builder.to_zipped_csv(temp_zip_file.name, self.data_utf8) temp_zip_file.seek(0) temp_dir = tempfile.mkdtemp() zip_file = zipfile.ZipFile(temp_zip_file.name, "r") zip_file.extractall(temp_dir) zip_file.close() temp_zip_file.close() # check that the children's file (which has the unicode header) exists self.assertTrue( os.path.exists( os.path.join(temp_dir, "children.info.csv"))) # check file's contents with open(os.path.join(temp_dir, "children.info.csv")) as csv_file: reader = csv.reader(csv_file) expected_headers = ['children.info/name.first', 'children.info/age', 'children.info/fav_colors', u'children.info/fav_colors/red\u2019s', u'children.info/fav_colors/blue\u2019s', u'children.info/fav_colors/pink\u2019s', 'children.info/ice_creams', 'children.info/ice_creams/vanilla', 'children.info/ice_creams/strawberry', 'children.info/ice_creams/chocolate', '_id', '_uuid', '_submission_time', '_index', '_parent_table_name', '_parent_index', u'_tags', '_notes'] rows = [row for row in reader] actual_headers = [h.decode('utf-8') for h in rows[0]] self.assertEqual(sorted(actual_headers), sorted(expected_headers)) data = dict(zip(rows[0], rows[1])) self.assertEqual( data[u'children.info/fav_colors/red\u2019s'.encode('utf-8')], 'True') self.assertEqual( data[u'children.info/fav_colors/blue\u2019s'.encode('utf-8')], 'True') self.assertEqual( data[u'children.info/fav_colors/pink\u2019s'.encode('utf-8')], 'False') # check that red and blue are set to true shutil.rmtree(temp_dir)
def generate_file_sample(module, target_cols: list, version: str = 1.0, number: int = 5): headers = [col['label'] for col in target_cols] data_rows = {} for target in target_cols: _label = target['label'] _type = target['type'] if _type in ['string']: _data = [] for i in range(number): value = f'{generate_string(3)} {generate_string(3)}' _data.append(value) data_rows[_label] = _data continue if _type in ['integer']: _data = [] for i in range(number): value = random.randint(0, 100) _data.append(value) data_rows[_label] = _data continue if _type in ['float', 'number']: _data = [] for i in range(number): value = float("{0:.2f}".format(random.uniform(0000, 9999))) _data.append(value) data_rows[_label] = _data continue if _type in ['datetime']: _data = [] for i in range(number): value = datetime.now() - timedelta(days=i) _data.append(value) data_rows[_label] = _data continue df = pd.DataFrame(data_rows, columns=headers) file_path = generate_file_path_sample(module, version) if PLAT_IMPORT_STORAGE == 'local': file_storage = os.path.join(MEDIA_ROOT, file_path) default_storage.save(file_storage, ContentFile('')) df.to_excel(file_storage, index=False, header=True, sheet_name='Sheet1') if PLAT_IMPORT_STORAGE == 'google': f = NamedTemporaryFile(suffix='.xlsx') df.to_excel(f, index=False, header=True, sheet_name='Sheet1') f.seek(0) bucket = get_bucket_google_storage() blob = bucket.blob(file_path) __GS_UPLOAD_CONTENT_TYPE__ = 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet' blob.upload_from_file(f, content_type=__GS_UPLOAD_CONTENT_TYPE__) blob.make_public()
def test_to_sav_export(self): survey = self._create_childrens_survey() export_builder = ExportBuilder() export_builder.set_survey(survey) temp_zip_file = NamedTemporaryFile(suffix='.zip') filename = temp_zip_file.name export_builder.to_zipped_sav(filename, self.data) temp_zip_file.seek(0) temp_dir = tempfile.mkdtemp() zip_file = zipfile.ZipFile(temp_zip_file.name, "r") zip_file.extractall(temp_dir) zip_file.close() temp_zip_file.close() # generate data to compare with index = 1 indices = {} survey_name = survey.name outputs = [] for d in self.data: outputs.append( dict_to_joined_export(d, index, indices, survey_name)) index += 1 # check that each file exists self.assertTrue( os.path.exists( os.path.join(temp_dir, "{0}.sav".format(survey.name)))) def _test_sav_file(section): with SavReader( os.path.join( temp_dir, "{0}.sav".format(section)), returnHeader=True) as reader: header = next(reader) rows = [r for r in reader] # open comparison file with SavReader(_logger_fixture_path( 'spss', "{0}.sav".format(section)), returnHeader=True) as fixture_reader: fixture_header = next(fixture_reader) self.assertEqual(header, fixture_header) expected_rows = [r for r in fixture_reader] self.assertEqual(rows, expected_rows) for section in export_builder.sections: section_name = section['name'].replace('/', '_') _test_sav_file(section_name)
def write_temp_file_to_path(suffix, content, file_path): """ Write a temp file and return the name of the file. :param suffix: The file suffix :param content: The content to write :param file_path: The path to write the temp file to :return: The filename written to """ temp_file = NamedTemporaryFile(suffix=suffix) temp_file.write(content) temp_file.seek(0) export_filename = default_storage.save(file_path, File(temp_file, file_path)) temp_file.close() return export_filename
def getFromStorage(image, alias): url = image.thumbnail(alias) if "://" in url: url = url.split('://')[1] else: url = settings.BASE_URL + url url = 'http://' + urllib2.quote(url.encode('utf-8')) headers = {'User-Agent': 'Mozilla/5.0'} req = urllib2.Request(url, None, headers) img = NamedTemporaryFile(delete=True) img.write(urllib2.urlopen(req).read()) img.flush() img.seek(0) return File(img)
def test_xls_export_works_with_unicode(self): survey = create_survey_from_xls(_logger_fixture_path("childrens_survey_unicode.xls")) export_builder = ExportBuilder() export_builder.set_survey(survey) temp_xls_file = NamedTemporaryFile(suffix=".xlsx") export_builder.to_xls_export(temp_xls_file.name, self.data_utf8) temp_xls_file.seek(0) # check that values for red\u2019s and blue\u2019s are set to true wb = load_workbook(temp_xls_file.name) children_sheet = wb.get_sheet_by_name("children.info") data = dict([(r[0].value, r[1].value) for r in children_sheet.columns]) self.assertTrue(data[u"children.info/fav_colors/red\u2019s"]) self.assertTrue(data[u"children.info/fav_colors/blue\u2019s"]) self.assertFalse(data[u"children.info/fav_colors/pink\u2019s"]) temp_xls_file.close()
def csv_export(request): logger.debug("csv export requested.") tmpfile = NamedTemporaryFile(suffix='.csv') csvwriter = csv.writer(tmpfile) # write csv headers csvwriter.writerow(['sensor_id', 'sensor_name', 'timestamp', 'value', 'value_name', 'value_units']) weather_data = WeatherData.objects.all() for d in weather_data: csvwriter.writerow([d.sensor.id, d.sensor.name, d.timestamp, d.value, d.type.name, d.type.units]) wrapper = FileWrapper(tmpfile) logger.debug("Serving CSV file name=%s length=%s", tmpfile.name, tmpfile.tell()) response = HttpResponse(wrapper, content_type='text/plain') response['Content-Disposition'] = 'attachment; filename=weather.csv' response['Content-Length'] = tmpfile.tell() tmpfile.seek(0) return response
def test_xls_export_works_with_unicode(self): survey = create_survey_from_xls( _logger_fixture_path('childrens_survey_unicode.xls')) export_builder = ExportBuilder() export_builder.set_survey(survey) temp_xls_file = NamedTemporaryFile(suffix='.xlsx') export_builder.to_xls_export(temp_xls_file.name, self.data_utf8) temp_xls_file.seek(0) # check that values for red\u2019s and blue\u2019s are set to true wb = load_workbook(temp_xls_file.name) children_sheet = wb.get_sheet_by_name("children.info") data = dict([(r[0].value, r[1].value) for r in children_sheet.columns]) self.assertTrue(data['children.info/fav_colors/red\u2019s']) self.assertTrue(data['children.info/fav_colors/blue\u2019s']) self.assertFalse(data['children.info/fav_colors/pink\u2019s']) temp_xls_file.close()
def write_temp_file_to_path(suffix, content, file_path): """ Write a temp file and return the name of the file. :param suffix: The file suffix :param content: The content to write :param file_path: The path to write the temp file to :return: The filename written to """ temp_file = NamedTemporaryFile(suffix=suffix) temp_file.write(content) temp_file.seek(0) export_filename = default_storage.save( file_path, File(temp_file, file_path)) temp_file.close() return export_filename
def generate_csv_html_file_response(name, list_data, keys): # use of named temporary file here is to handle deletion of file # after we return the file, after the new file object is evicted # it gets deleted # http://stackoverflow.com/questions/3582414/removing-tmp-file- # after-return-httpresponse-in-django newfile = NamedTemporaryFile(suffix='.csv', delete=False) newfile.name = name dict_writer = csv.DictWriter(newfile, keys) dict_writer.writeheader() dict_writer.writerows(list_data) newfile.seek(0) wrapper = FileWrapper(newfile) httpresponse = HttpResponse(wrapper, content_type="text/csv") httpresponse['Content-Disposition'] = 'attachment; filename=%s' \ % newfile.name return httpresponse
def _save_thumbnail(self, thumbnail): if thumbnail: img_temp = NamedTemporaryFile(delete=True) img_temp.write(thumbnail) img_temp.seek(0) img_temp.flush() # TODO: save in original format? filename = '{0}.png'.format(img_temp.name.split('/')[-1]) self.thumbnail = filename self.thumbnail.save( filename, File(img_temp), save=True) super(CrocoModel, self).save()
def generate_osm_export( export_type, extension, username, id_string, export_id=None, filter_query=None): # TODO resolve circular import from onadata.apps.viewer.models.export import Export xform = XForm.objects.get(user__username=username, id_string=id_string) attachments = Attachment.objects.filter( extension=Attachment.OSM, instance__xform=xform ) content = get_combined_osm([a.media_file for a in attachments]) basename = "%s_%s" % (id_string, datetime.now().strftime("%Y_%m_%d_%H_%M_%S")) filename = basename + "." + extension file_path = os.path.join( username, 'exports', id_string, export_type, filename) storage = get_storage_class()() temp_file = NamedTemporaryFile(suffix=extension) temp_file.write(content) temp_file.seek(0) export_filename = storage.save( file_path, File(temp_file, file_path)) temp_file.close() dir_name, basename = os.path.split(export_filename) # get or create export object if(export_id): export = Export.objects.get(id=export_id) else: export = Export.objects.create(xform=xform, export_type=export_type) export.filedir = dir_name export.filename = basename export.internal_status = Export.SUCCESSFUL export.save() return export
def make_temp_file(student, assignment): feedback = student.feedbacktostudent_set.filter(assignment=assignment).order_by('pk') if len(feedback) > 0: tempFile = NamedTemporaryFile() head = "Assignment: %s \n" \ "========================\n" \ "Student ID: %s \n" \ "Name: %s \n" \ "========================\n\n" \ "Feedback: \n" % (assignment.title, student.student_id, student.get_fullname()) tempFile.write(head) for each_feedback in feedback: comment = each_feedback.comments.message tempFile.write(comment + '\n') tempFile.seek(0) return tempFile return None
def test_to_xls_export_generates_valid_sheet_names(self): survey = create_survey_from_xls(_logger_fixture_path( 'childrens_survey_with_a_very_long_name.xls')) export_builder = ExportBuilder() export_builder.set_survey(survey) xls_file = NamedTemporaryFile(suffix='.xls') filename = xls_file.name export_builder.to_xls_export(filename, self.data) xls_file.seek(0) wb = load_workbook(filename) # check that we have childrens_survey, children, children_cartoons # and children_cartoons_characters sheets expected_sheet_names = ['childrens_survey_with_a_very_lo', 'childrens_survey_with_a_very_l1', 'childrens_survey_with_a_very_l2', 'childrens_survey_with_a_very_l3'] self.assertEqual(wb.get_sheet_names(), expected_sheet_names) xls_file.close()
def start(self, image_url, **kwargs): self.login(settings.ASTROMETRY_NET_API_KEY) r = requests.get(image_url, verify=False, allow_redirects=True, headers={'User-Agent': 'Mozilla/5.0'}) f = NamedTemporaryFile(delete=True) f.write(r.content) f.flush() f.seek(0) upload = self.upload(File(f), **kwargs) if upload['status'] == 'success': return upload['subid'] return 0
def generate_kml_export( export_type, extension, username, id_string, export_id = None, filter_query=None): from odk_viewer.models import Export user = User.objects.get(username=username) xform = XForm.objects.get(user__username=username, id_string=id_string) response = render_to_response( 'survey.kml', {'data': kml_export_data(id_string, user)}) basename = "%s_%s" % (id_string, datetime.now().strftime("%Y_%m_%d_%H_%M_%S")) filename = basename + "." + extension file_path = os.path.join( username, 'exports', id_string, export_type, filename) storage = get_storage_class()() temp_file = NamedTemporaryFile(suffix=extension) temp_file.write(response.content) temp_file.seek(0) export_filename = storage.save( file_path, File(temp_file, file_path)) temp_file.close() dir_name, basename = os.path.split(export_filename) # get or create export object if(export_id): export = Export.objects.get(id=export_id) else: export = Export.objects.create(xform=xform, export_type=export_type) export.filedir = dir_name export.filename = basename export.internal_status = Export.SUCCESSFUL export.save() return export
def setup_profile(request): """Display page 4 (profile) of the first login wizard.""" # Fields with file uploads are not AJAX requests. if request.method == "POST": form = ProfileForm(request.POST, user=request.user) profile = request.user.profile if form.is_valid(): profile.name = form.cleaned_data["display_name"].strip() if not profile.setup_profile: profile.setup_profile = True profile.add_points(score_mgr.setup_points(), datetime.datetime.today(), "Set up profile") profile.save() if form.cleaned_data["pic_method"] == 0: name = request.user for avatar in Avatar.objects.filter(user=name): avatar.delete() elif form.cleaned_data["pic_method"] == 2 and form.cleaned_data["facebook_photo"]: # Need to download the image from the url and save it. photo_temp = NamedTemporaryFile(delete=True) fb_url = form.cleaned_data["facebook_photo"] photo_temp.write(urllib2.urlopen(fb_url).read()) photo_temp.flush() photo_temp.seek(0) path = avatar_file_path(user=request.user, filename="fb_photo.jpg") avatar = Avatar( user=request.user, primary=True, avatar=path, ) avatar.avatar.storage.save(path, File(photo_temp)) avatar.save() return HttpResponseRedirect(reverse("setup_activity")) return _get_profile_form(request, form=form, non_xhr=False) return _get_profile_form(request)
def setup_profile(request): """Display page 4 (profile) of the first login wizard.""" # Fields with file uploads are not AJAX requests. if request.method == "POST": form = ProfileForm(request.POST, user=request.user) profile = request.user.get_profile() if form.is_valid(): profile.name = form.cleaned_data["display_name"].strip() if not profile.setup_profile: profile.setup_profile = True profile.add_points(score_mgr.setup_points(), datetime.datetime.today(), "Set up profile") profile.save() if form.cleaned_data["pic_method"] == 0: name = request.user for avatar in Avatar.objects.filter(user=name): avatar.delete() elif form.cleaned_data["pic_method"] == 2 and form.cleaned_data["facebook_photo"]: # Need to download the image from the url and save it. photo_temp = NamedTemporaryFile(delete=True) fb_url = form.cleaned_data["facebook_photo"] photo_temp.write(urllib2.urlopen(fb_url).read()) photo_temp.flush() photo_temp.seek(0) path = avatar_file_path(user=request.user, filename="fb_photo.jpg") avatar = Avatar( user=request.user, primary=True, avatar=path, ) avatar.avatar.storage.save(path, File(photo_temp)) avatar.save() return HttpResponseRedirect(reverse("setup_activity")) return _get_profile_form(request, form=form, non_xhr=False) return _get_profile_form(request)
def getFromStorage(image, alias): url = image.thumbnail(alias) if "placeholder" in url: raise ThumbnailNotReadyException if "://" in url: url = url.split('://')[1] else: url = settings.BASE_URL + url url = 'http://' + urllib2.quote(url.encode('utf-8')) headers = { 'User-Agent' : 'Mozilla/5.0' } req = urllib2.Request(url, None, headers) img = NamedTemporaryFile(delete = True) img.write(urllib2.urlopen(req).read()) img.flush() img.seek(0) return File(img)
def generate_export(export_type, extension, username, id_string, export_id = None, filter_query=None): """ Create appropriate export object given the export type """ from odk_viewer.models import Export xform = XForm.objects.get(user__username=username, id_string=id_string) df_builder = _df_builder_for_export_type(export_type, username, id_string, filter_query) if hasattr(df_builder, 'get_exceeds_xls_limits')\ and df_builder.get_exceeds_xls_limits(): extension = 'xlsx' temp_file = NamedTemporaryFile(suffix=("." + extension)) df_builder.export_to(temp_file.name) basename = "%s_%s.%s" % (id_string, datetime.now().strftime("%Y_%m_%d_%H_%M_%S"), extension) file_path = os.path.join( username, 'exports', id_string, export_type, basename) # TODO: if s3 storage, make private - how will we protect local storage?? storage = get_storage_class()() # seek to the beginning as required by storage classes temp_file.seek(0) export_filename = storage.save( file_path, File(temp_file, file_path)) temp_file.close() # create export object export, is_new = Export.objects.get_or_create(id=export_id, xform=xform, export_type=export_type) dir_name, basename = os.path.split(export_filename) export.filename = basename export.save() return export
def create_media(media): """Download media link""" if is_valid_url(media.data_value): filename = media.data_value.split('/')[-1] data_file = NamedTemporaryFile() content_type = mimetypes.guess_type(filename) with closing(requests.get(media.data_value, stream=True)) as r: for chunk in r.iter_content(chunk_size=CHUNK_SIZE): if chunk: data_file.write(chunk) data_file.seek(os.SEEK_SET, os.SEEK_END) size = os.path.getsize(data_file.name) data_file.seek(os.SEEK_SET) media.data_value = filename media.data_file = InMemoryUploadedFile( data_file, 'data_file', filename, content_type, size, charset=None) return media return None
def clean_document(self): try: uploaded_file = self.cleaned_data['document'] temp_doc = NamedTemporaryFile(mode='w+', delete=True) temp_doc.write(uploaded_file.read()) temp_doc.flush() temp_doc.seek(0) temp_doc._size = uploaded_file._size temp_doc._content_type = uploaded_file.content_type validate_file(temp_doc) temp_doc.close() except forms.ValidationError: logger.error(u'User was trying to upload file with wrong format.') raise return uploaded_file
def data_with_google(self, access_token): try: ret = urlopen("https://www.googleapis.com/plus/v1/people/me?" + urlencode({'access_token': access_token})).read() email_ret = urlopen("https://www.googleapis.com/oauth2/v3/userinfo?" + urlencode({'access_token': access_token, 'fields': 'email'})).read() email = json.loads(email_ret)['email'] user_data = json.loads(ret) if not user_data['isPlusUser'] or not email: return None, None google_id = user_data['id'] name = user_data['displayName'] try: locale = user_data['language'] except KeyError: locale = "en" gender = user_data.get('gender', 'unknown') if gender == 'male': gender = 'M' elif gender == 'female': gender = 'F' # location = user_data.get('currentLocation','') photo_url = user_data.get('image', {'url': None}).get('url') if photo_url: img_temp = NamedTemporaryFile() img_temp.write(urlopen(photo_url).read()) img_temp.flush() img_temp.seek(0) photo = File(img_temp) else: photo = None data = {'name': name, 'locale': locale, 'email': email, 'gender': gender, 'google_id': google_id} if photo: files = {'photo': photo} else: files = None return data, files except HTTPError: return None, None