def from_native(self, base64_data): if base64_data is None: data = base64_data # Check if this is a base64 string elif isinstance(base64_data, basestring): # Try to decode the file. Return validation error if it fails. try: decoded_file = base64.b64decode(base64_data) except TypeError: raise serializers.ValidationError(_(u"Please upload a valid image.")) # Generate file name: file_name = str(uuid.uuid4())[:12] # 12 characters are more than enough. # Get the file name extension: file_extension = self.get_file_extension(file_name, decoded_file) self.check_file_extension(file_extension) complete_file_name = file_name + "." + file_extension data = ContentFile(decoded_file, name=complete_file_name) else: data = base64_data file_extension = self.get_file_extension(data.name, data.read()) self.check_file_extension(file_extension) data.seek(0) return super(Base64ImageField, self).from_native(data)
def serve_tracks_as_zipfile(request, pk): """ Create a Zip archive with all the Tracks in a Project. The Tracks will be organized in folders according to Songs and Groups. Based on https://github.com/thibault/django-zipview/ """ project = get_object_or_404(Project, pk=pk) timestamp = now().astimezone(TZ).strftime("%Y-%m-%d %H-%M-%S") name = "%s %s.zip" % (to_folder_name(project.title), timestamp) temp_file = ContentFile(b(""), name=name) with ZipFile(temp_file, mode="w", compression=ZIP_DEFLATED) as zip_file: for track in Track.objects.filter(group__song__project=project): path = "{}/{}/{}".format(to_folder_name(track.group.song.title), to_folder_name(track.group.title), track.file.name.split("/")[-1]) zip_file.writestr(path, track.file.read()) file_size = temp_file.tell() temp_file.seek(0) response = HttpResponse(temp_file, content_type="application/zip") response["Content-Disposition"] = "attachment; filename=\"%s\"" % name response["Content-Length"] = file_size return response
def test_domain_import_with_mx_check(self, mock_gethostbyname, mock_query): """Check domain import when MX check is enabled.""" reseller = core_factories.UserFactory( username="******", groups=("Resellers", )) self.client.force_login(reseller) self.set_global_parameter("valid_mxs", "1.2.3.4") self.set_global_parameter("domains_must_have_authorized_mx", True) mock_query.return_value = [utils.FakeDNSAnswer("mail.ok.com")] mock_gethostbyname.return_value = "1.2.3.5" f = ContentFile( b"domain; domain1.com; 100; 1; True", name="domains.csv") resp = self.client.post( reverse("admin:domain_import"), { "sourcefile": f } ) self.assertContains(resp, "No authorized MX record found for domain") mock_gethostbyname.return_value = "1.2.3.4" f.seek(0) resp = self.client.post( reverse("admin:domain_import"), { "sourcefile": f } ) self.assertTrue( Domain.objects.filter(name="domain1.com").exists())
def create_metadata_file_from_output(self, incoming_obj): # here we need to handle all the possible returned file types that the scan workers could kick out # there is only one right now, the picklable file sample f = None if isinstance(incoming_obj, PickleableFileSample): f = ContentFile(incoming_obj.all_content) # todo what else would it be? should we raise an exception..? if not f: raise CantConvertMetadataFileException # important or hashing will be all whacked up f.seek(0) filehash = self.model.filename_hasher(f.read()) try: a = self.get(sha256=filehash) except ObjectDoesNotExist: a = self.model(sha256=filehash) a.filename = incoming_obj.original_filename f.seek(0) a.file.save(filehash, f) a.save() return a
def to_internal_value(self, base64_data): if base64_data is None: data = base64_data # Check if this is a base64 string elif isinstance(base64_data, basestring): # Try to decode the file. Return validation error if it fails. try: decoded_file = base64.b64decode(base64_data) except TypeError: raise serializers.ValidationError( _(u"Please upload a valid image.")) # Generate file name: file_name = str( uuid.uuid4())[:12] # 12 characters are more than enough. # Get the file name extension: file_extension = self.get_file_extension(file_name, decoded_file) self.check_file_extension(file_extension) complete_file_name = file_name + "." + file_extension data = ContentFile(decoded_file, name=complete_file_name) else: data = base64_data file_extension = self.get_file_extension(data.name, data.read()) self.check_file_extension(file_extension) data.seek(0) return super(Base64ImageField, self).to_internal_value(data)
def get(self, request, *args, **kwargs): temp_file = ContentFile(b(""), name=self.tarfile_name) with tarfile.TarFile(fileobj=temp_file, mode='w', debug=3) as tar_file: files = self.get_files() for file_ in files: file_name = file_.name try: data = file_.read() except UnicodeDecodeError: pass file_.seek(0, os.SEEK_SET) size = len(data) try: if isinstance(data, bytes): lol = BytesIO(data) else: lol = BytesIO(data.encode()) except UnicodeDecodeError: pass try: info = tar_file.gettarinfo(fileobj=file_) except UnsupportedOperation: info = tarfile.TarInfo(name=file_name) info.size = size tar_file.addfile(tarinfo=info, fileobj=lol) file_size = temp_file.tell() temp_file.seek(0) response = HttpResponse(temp_file, content_type='application/x-tar') response[ 'Content-Disposition'] = 'attachment; filename=%s' % self.tarfile_name response['Content-Length'] = file_size return response
def make_image_file(size=(1000, 1000), image_type='png'): buf = BytesIO() Image.new('RGB', size).save(buf, image_type) fp = ContentFile(buf.getvalue(), 'image.' + image_type) fp.seek(0) return fp
def get(self, request, *args, **kwargs): temp_file = ContentFile(b(""), name=self.tarfile_name) with tarfile.TarFile(fileobj=temp_file, mode='w', debug=3) as tar_file: files = self.get_files() for file_ in files: file_name = file_.name try: data = file_.read() except UnicodeDecodeError: pass file_.seek(0, os.SEEK_SET) size = len(data) try: if isinstance(data, bytes): lol = BytesIO(data) else: lol = BytesIO(data.encode()) except UnicodeDecodeError: pass try: info = tar_file.gettarinfo(fileobj=file_) except UnsupportedOperation: info = tarfile.TarInfo(name=file_name) info.size = size tar_file.addfile(tarinfo=info, fileobj=lol) file_size = temp_file.tell() temp_file.seek(0) response = HttpResponse(temp_file, content_type='application/x-tar') response['Content-Disposition'] = 'attachment; filename=%s' % self.tarfile_name response['Content-Length'] = file_size return response
def create_metadata_file_from_output(self, incoming_obj): # here we need to handle all the possible returned file types that the scan workers could kick out # there is only one right now, the picklable file sample f = None if isinstance(incoming_obj, PickleableFileSample): f = ContentFile(incoming_obj.all_content) # todo what else would it be? should we raise an exception..? if not f: raise CantConvertMetadataFileException # important or hashing will be all whacked up f.seek(0) filehash = self.model.filename_hasher(f.read()) try: a = self.get(sha256=filehash) except ObjectDoesNotExist: a = self.model(sha256=filehash) a.filename = incoming_obj.original_filename f.seek(0) a.file.save(filehash, f) a.save() return a
def put(self): agent = self.__get_agent(create=True) try: state = ContentFile(self.state.read()) except: try: state = ContentFile(self.state) except: state = ContentFile(str(self.state)) if self.registrationId: p,created = models.activity_state.objects.get_or_create(state_id=self.stateId,agent=agent,activity=self.activity,registration_id=self.registrationId) else: p,created = models.activity_state.objects.get_or_create(state_id=self.stateId,agent=agent,activity=self.activity) if not created: etag.check_preconditions(self.req_dict,p) p.state.delete() # remove old state file p.content_type = self.content_type p.etag = etag.create_tag(state.read()) if self.updated: p.updated = self.updated state.seek(0) if created: p.save() fn = "%s_%s_%s" % (p.agent_id,p.activity_id, self.req_dict.get('filename', p.id)) p.state.save(fn, state)
def put(self): agent = self.__get_agent(create=True) try: state = ContentFile(self.state.read()) except: try: state = ContentFile(self.state) except: state = ContentFile(str(self.state)) if self.registrationId: p, created = models.activity_state.objects.get_or_create( state_id=self.stateId, agent=agent, activity=self.activity, registration_id=self.registrationId) else: p, created = models.activity_state.objects.get_or_create( state_id=self.stateId, agent=agent, activity=self.activity) if not created: etag.check_preconditions(self.req_dict, p) p.state.delete() # remove old state file p.content_type = self.content_type p.etag = etag.create_tag(state.read()) if self.updated: p.updated = self.updated state.seek(0) if created: p.save() fn = "%s_%s_%s" % (p.agent_id, p.activity_id, self.req_dict.get('filename', p.id)) p.state.save(fn, state)
def write_to_csv_by_semicolon(rows): # tracker_emit(filename) output_buffer = ContentFile('') # output_buffer.write(codecs.BOM_UTF8) csvwriter = csv.writer(output_buffer, delimiter=';') csvwriter.writerows(_get_utf8_encoded_rows(rows)) output_buffer.seek(0) return output_buffer
def store_rows(self, course_id, filename, rows): """ Given a course_id, filename, and rows (each row is an iterable of strings), write the rows to the storage backend in csv format. """ output_buffer = ContentFile('') csvwriter = csv.writer(output_buffer) csvwriter.writerows(self._get_utf8_encoded_rows(rows)) output_buffer.seek(0) self.store(course_id, filename, output_buffer)
def test_image(format='PNG'): io = StringIO() size = (50, 50) color = (0, 0, 0) image = Image.new("RGB", size, color) image.save(io, format=format.upper()) image_file = ContentFile(io.getvalue()) image_file.name = 'test_image.%s' % format.lower() image_file.seek(0) return image_file
def store_rows(self, course_id, filename, rows): """ Given a course_id, filename, and rows (each row is an iterable of strings), write the rows to the storage backend in csv format. """ output_buffer = ContentFile('') csvwriter = csv.writer(output_buffer) csvwriter.writerows(self._get_utf8_encoded_rows(rows)) output_buffer.seek(0) self.store(course_id, filename, output_buffer)
def load_image(self, img): content = ContentFile(img.read()) content.seek(0) current = Image.open(content) tmp = StringIO.StringIO() name = hex(randint(1, 10**50)) + ".png" current.save(tmp, "PNG") tmp.seek(0) if self.background.__nonzero__(): self.background.delete() self.background.save(name, ContentFile(tmp.read()))
def load_illustration(self, img): content = ContentFile(img.read()) content.seek(0) current = Image.open(content) current = ImageOps.fit(current, (497, 300), Image.ANTIALIAS) tmp = StringIO.StringIO() name = hex(randint(1, 10**50)) + ".jpg" current.save(tmp, "JPEG") tmp.seek(0) if self.illustration.__nonzero__(): self.illustration.delete() self.illustration.save(name, ContentFile(tmp.read()))
def store_rows(self, course_id, filename, rows): """ Given a course_id, filename, and rows (each row is an iterable of strings), write the rows to the storage backend in csv format. """ output_buffer = ContentFile('') # Adding unicode signature (BOM) for MS Excel 2013 compatibility output_buffer.write(codecs.BOM_UTF8) csvwriter = csv.writer(output_buffer) csvwriter.writerows(self._get_utf8_encoded_rows(rows)) output_buffer.seek(0) self.store(course_id, filename, output_buffer)
def put_profile(self, request_dict): #Parse out profile from request_dict try: profile = ContentFile(request_dict['profile'].read()) except: try: profile = ContentFile(request_dict['profile']) except: profile = ContentFile(str(request_dict['profile'])) #Check if activity exists try: # Always want global version activity = models.activity.objects.get(activity_id=request_dict['activityId'], global_representation=True) except models.activity.DoesNotExist: err_msg = 'There is no activity associated with the id: %s' % request_dict['activityId'] log_message(self.log_dict, err_msg, __name__, self.put_profile.__name__, True) update_parent_log_status(self.log_dict, 404) raise IDNotFoundError(err_msg) user = get_user_from_auth(request_dict.get('auth', None)) #Get the profile, or if not already created, create one p,created = models.activity_profile.objects.get_or_create(profileId=request_dict['profileId'],activity=activity, user=user) if created: log_message(self.log_dict, "Created Activity Profile", __name__, self.put_profile.__name__) else: #If it already exists delete it etag.check_preconditions(request_dict,p, required=True) p.profile.delete() log_message(self.log_dict, "Retrieved Activity Profile", __name__, self.put_profile.__name__) #Save profile content type based on incoming content type header and create etag p.content_type = request_dict['CONTENT_TYPE'] p.etag = etag.create_tag(profile.read()) #Set updated if request_dict['updated']: p.updated = request_dict['updated'] #Go to beginning of file profile.seek(0) #If it didn't exist, save it if created: p.save() #Set filename with the activityID and profileID and save fn = "%s_%s" % (p.activity_id,request_dict.get('filename', p.id)) p.profile.save(fn, profile) log_message(self.log_dict, "Saved Activity Profile", __name__, self.put_profile.__name__)
def store_rows(self, course_id, filename, rows): """ Given a course_id, filename, and rows (each row is an iterable of strings), write the rows to the storage backend in csv format. """ output_buffer = ContentFile('') # Adding unicode signature (BOM) for MS Excel 2013 compatibility output_buffer.write(codecs.BOM_UTF8) csvwriter = csv.writer(output_buffer) csvwriter.writerows(self._get_utf8_encoded_rows(rows)) output_buffer.seek(0) self.store(course_id, filename, output_buffer)
def _get_data(self, filename='playbook.zip'): temp_file = ContentFile(b'file content', name=filename) zip_file = ZipFile(temp_file, 'w') zip_file.writestr('main.yml', b'test') zip_file.close() temp_file.seek(0) return { 'name': 'test playbook', 'archive': temp_file, 'entrypoint': 'main.yml', 'parameters': [{'name': 'parameter1',}, {'name': 'parameter2',},], }
def test_do_upload(self): url = reverse("fb_do_upload") test_file = ContentFile(b"Test File content", name="test-file-upload.txt") test_file_path = os.path.join(self.upload_dir, test_file.name) self.assertFalse(default_storage.exists(test_file_path)) response = self.client.post(url, data={"folder": "", "Filedata": test_file}) self.assertEqual(200, response.status_code) self.assertTrue(default_storage.exists(test_file_path)) with default_storage.open(test_file_path) as uploaded_file: test_file.seek(0) self.assertEqual(uploaded_file.read(), test_file.read()) # Cleanup uploaded file default_storage.delete(test_file_path)
def clean(self, value): value = forms.URLField.clean(self, value) if not self.required and value in ['', None]: return value value = quote_unicode_url(value) content_thumb = ContentFile(urllib.urlopen(value).read()) try: Image.open(content_thumb) except IOError: raise forms.ValidationError('Not a valid image.') else: content_thumb.seek(0) return content_thumb
def test_save_non_rewound(self): """Save file with position not at the beginning""" content = dict(orig=b"Hello world!") content_file = ContentFile(content['orig']) content_file.seek(5) def mocked_put_object(cls, url, token, container, name=None, contents=None, *args, **kwargs): content['saved'] = contents.read() with patch('tests.utils.FakeSwift.put_object', new=classmethod(mocked_put_object)): self.backend.save('test.txt', content_file) self.assertEqual(content['saved'], content['orig'])
def clean(self, value): value = forms.URLField.clean(self, value) if not self.required and value in ['', None]: return value value = quote_unicode_url(value) content_thumb = ContentFile(urllib.urlopen(value).read()) try: Image.open(content_thumb) except IOError: raise forms.ValidationError('Not a valid image.') else: content_thumb.seek(0) return content_thumb
def downloadFolderAsTar(path): temp_file = ContentFile(b(""), name=os.path.basename(path) + '.tar') with tarfile.TarFile(fileobj=temp_file, mode='w', debug=3) as tar_file: tar_file.add(path, arcname=os.path.basename(path)) file_size = temp_file.tell() temp_file.seek(0) response = HttpResponse(temp_file, content_type='application/x-tar') response[ 'Content-Disposition'] = 'attachment; filename=\"' + os.path.basename( path) + '.tar\"' response['Content-Length'] = file_size return response
def to_internal_value(self, data): if is_url(data): try: path = data.split(settings.MEDIA_URL)[1] data_file = open('%s/%s' % (settings.MEDIA_ROOT, path), 'rb') data = ContentFile(data_file.read(), data_file.name) except Exception as e: pass elif isinstance(data, InMemoryUploadedFile): data.seek(0) else: return None return super(CustomImageField, self).to_internal_value(data)
def test_from_file(self): fileobj = ContentFile('foo bar'.encode('utf-8')) my_file1 = FileBlob.from_file(fileobj) assert my_file1.path fileobj.seek(0) my_file2 = FileBlob.from_file(fileobj) # deep check assert my_file1.id == my_file2.id assert my_file1.checksum == my_file2.checksum assert my_file1.path == my_file2.path
def generate_image(self): """ Generate QR image and get its binary data. Returns ------- bytes Binary data of the png image file which can directly be returned to the user """ img = self._generate_image() f = ContentFile(b'', name='qr.png') img.save(f, 'png') f.seek(0) return f.read()
def generate_image(self): """ Generate QR image and get its binary data. Returns ------- bytes Binary data of the png image file which can directly be returned to the user """ img = self._generate_image() f = ContentFile(b'', name='qr.png') img.save(f, 'png') f.seek(0) return f.read()
def upload_pic(pic, code, up_type='ava'): #pdb.set_trace() try: ext_name = '' if up_type == 'ava': conn = UpYun('one9inava', 'one9', 'one9in1234') elif up_type == 'vid': conn = UpYun('vid19', 'one9', 'one9in1234') ext_name = '_cpp.mp4' elif up_type == 'avd': conn = UpYun('vid19', 'one9', 'one9in1234') ext_name = '_ava.mp4' else: conn = UpYun('cpp19', 'one9', 'one9in1234') if 'http' in pic.name: data = ContentFile(urllib2.urlopen(pic.name).read()) data.seek(0) elif 'default' in pic.name: return True else: try: pic.seek(0) data = pic except: print 'no pic' return False #conn.setContentMD5(md5file(pic)) #result = conn.writeFile('%d' % code, pic) #data.write(pic.read()) #data.seek(0) conn.setContentMD5(md5file(data)) #result = conn.deleteFile('%d' % code) result = conn.writeFile('%s%s' % (code, ext_name), data) print result data.close() if result: print 'ok!!' return True else: print 'nok' return False except Exception, ex: print Exception,":",ex return False
def upload_pic(pic, code, up_type='ava'): #pdb.set_trace() try: ext_name = '' if up_type == 'ava': conn = UpYun('one9inava', 'one9', 'one9in1234') elif up_type == 'vid': conn = UpYun('vid19', 'one9', 'one9in1234') ext_name = '_cpp.mp4' elif up_type == 'avd': conn = UpYun('vid19', 'one9', 'one9in1234') ext_name = '_ava.mp4' else: conn = UpYun('cpp19', 'one9', 'one9in1234') if 'http' in pic.name: data = ContentFile(urllib2.urlopen(pic.name).read()) data.seek(0) elif 'default' in pic.name: return True else: try: pic.seek(0) data = pic except: print 'no pic' return False #conn.setContentMD5(md5file(pic)) #result = conn.writeFile('%d' % code, pic) #data.write(pic.read()) #data.seek(0) conn.setContentMD5(md5file(data)) #result = conn.deleteFile('%d' % code) result = conn.writeFile('%s%s' % (code, ext_name), data) print result data.close() if result: print 'ok!!' return True else: print 'nok' return False except Exception, ex: print Exception, ":", ex return False
def test_submit_files(self): """ Submitted files should be uploaded to the storage backend. """ payload = self.valid_payload.copy() upload = ContentFile("TESTING", name="test") upload.seek(0) payload["upload"] = upload response = self._submit(payload) self.assertEqual(response["return_code"], 0) # success # Check that the file was actually uploaded _, files = default_storage.listdir("tmp/") key = make_hashkey(payload["xqueue_header"] + "upload") self.assertIn(key, files)
def test_submit_many_files(self): ''' Submitted files should be uploaded to the storage backend. ''' payload = self.valid_payload.copy() upload = ContentFile('TESTING', name='test') upload.seek(0) payload['upload'] = upload response = self._submit(payload) self.assertEqual(response['return_code'], 0) # success # Check that the file was actually uploaded _, files = default_storage.listdir('tmp/') key = make_hashkey(payload['xqueue_header'] + 'upload') self.assertIn(key, files)
def test_save_non_rewound(self): """Save file with position not at the beginning""" content = dict(orig=b"Hello world!") content_file = ContentFile(content['orig']) content_file.seek(5) def mocked_put_object(cls, url, token, container, name=None, contents=None, content_length=None, *args, **kwargs): content['saved'] = contents.read() content['size'] = content_length with patch('tests.utils.FakeSwift.put_object', new=classmethod(mocked_put_object)): self.backend.save('test.txt', content_file) self.assertEqual(content['saved'], content['orig']) self.assertEqual(content['size'], len(content['orig']))
def put_profile(self, request_dict): #Parse out profile from request_dict try: profile = ContentFile(request_dict['profile'].read()) except: try: profile = ContentFile(request_dict['profile']) except: profile = ContentFile(str(request_dict['profile'])) #Check if activity exists try: activity = models.activity.objects.get( activity_id=request_dict['activityId']) except models.activity.DoesNotExist: raise IDNotFoundError( 'There is no activity associated with the id: %s' % request_dict['activityId']) #Get the profile, or if not already created, create one p, created = models.activity_profile.objects.get_or_create( profileId=request_dict['profileId'], activity=activity) #If it already exists delete it if not created: etag.check_preconditions(request_dict, p, required=True) p.profile.delete() #Save profile content type based on incoming content type header and create etag p.content_type = request_dict['CONTENT_TYPE'] p.etag = etag.create_tag(profile.read()) #Set updated if request_dict['updated']: p.updated = request_dict['updated'] #Go to beginning of file profile.seek(0) #If it didn't exist, save it if created: p.save() #Set filename with the activityID and profileID and save fn = "%s_%s" % (p.activity_id, request_dict.get('filename', p.id)) p.profile.save(fn, profile)
def generate(_xmodule_instance_args, _entry_id, course_id, task_input, action_name): """ For a given `course_id`, generate a CSV file containing all student answers to a given problem, and store using a `ReportStore`. """ start_time = time() start_date = datetime.now(UTC) num_reports = 1 task_progress = TaskProgress(action_name, num_reports, start_time) current_step = {'step': 'XblockCompletion - Calculating students answers to problem'} task_progress.update_task_state(extra_meta=current_step) data = task_input.get('data') filter_types = ['problem'] students = XblockCompletionView().get_all_enrolled_users(data['course']) course_structure = get_data_course(data['course']) report_store = ReportStore.from_config('GRADES_DOWNLOAD') csv_name = 'Reporte_de_Preguntas' if data['format']: csv_name = 'Reporte_de_Preguntas_Resumen' report_name = u"{course_prefix}_{csv_name}_{timestamp_str}.csv".format( course_prefix=course_filename_prefix_generator(course_id), csv_name=csv_name, timestamp_str=start_date.strftime("%Y-%m-%d-%H%M") ) output_buffer = ContentFile('') if six.PY2: output_buffer.write(codecs.BOM_UTF8) csvwriter = csv.writer(output_buffer) student_states = XblockCompletionView().get_all_states(data['course'], filter_types) csvwriter = XblockCompletionView()._build_student_data(data, students, course_structure, student_states, filter_types, csvwriter) current_step = {'step': 'XblockCompletion - Uploading CSV'} task_progress.update_task_state(extra_meta=current_step) output_buffer.seek(0) report_store.store(course_id, report_name, output_buffer) current_step = { 'step': 'XblockCompletion - CSV uploaded', 'report_name': report_name, } return task_progress.update_task_state(extra_meta=current_step)
def get(self, request, *args, **kwargs): temp_file = ContentFile(b(""), name=self.zipfile_name) with zipfile.ZipFile(temp_file, mode='w', compression=zipfile.ZIP_DEFLATED) as zip_file: files = self.get_files() for file_ in files: path = file_.name zip_file.writestr(path, file_.read()) file_size = temp_file.tell() temp_file.seek(0) response = HttpResponse(temp_file, content_type='application/zip') response[ 'Content-Disposition'] = 'attachment; filename=%s' % self.zipfile_name response['Content-Length'] = file_size return response
def test_delete_does_not_remove_shared_blobs(self): fileobj = ContentFile(b"foo bar") baz_file = File.objects.create(name="baz-v1.js", type="default", size=7) baz_file.putfile(fileobj, 3) baz_id = baz_file.id # Rewind the file so we can use it again. fileobj.seek(0) raz_file = File.objects.create(name="baz-v2.js", type="default", size=7) raz_file.putfile(fileobj, 3) with self.tasks(), self.capture_on_commit_callbacks(execute=True): baz_file.delete() # baz_file blob indexes should be gone assert FileBlobIndex.objects.filter(file_id=baz_id).count() == 0 # Check that raz_file blob indexes are there. assert len(raz_file.blobs.all()) == 3
def store_spreadsheet(self, course_id, filename, ws1_title, ws1_rows, additional_sheets=None): """ Create and store a new spreadsheet and store it in the selected storage. Args: course_id: used for storing purposes filename (str): name of the spreadsheet ws1_title (str): title of the first worksheet ws1_rows (list): rows for the first worksheet additional_sheets (list): a list of additional worksheets. Each item is a dictionary containing a name and a list of rows """ log.info('Creating new spreadsheet') wb = Workbook() ws1 = wb.active ws1.title = ws1_title for row in ws1_rows: ws1.append(row) for additional_sheet in additional_sheets: additional_ws = wb.create_sheet(additional_sheet.get('title')) for row in additional_sheet.get('rows'): additional_ws.append(row) output_buffer = ContentFile('') archive = ZipFile(output_buffer, 'w', ZIP_DEFLATED, allowZip64=True) writer = ExcelWriter(wb, archive) try: writer.write_data() finally: archive.close() output_buffer.seek(0) log.info('Saving spreadsheet {}'.format(filename)) self.store(course_id, filename, output_buffer)
def put_profile(self, request_dict): #Parse out profile from request_dict try: profile = ContentFile(request_dict['profile'].read()) except: try: profile = ContentFile(request_dict['profile']) except: profile = ContentFile(str(request_dict['profile'])) #Check if activity exists try: activity = models.activity.objects.get(activity_id=request_dict['activityId']) except models.activity.DoesNotExist: raise IDNotFoundError('There is no activity associated with the id: %s' % request_dict['activityId']) #Get the profile, or if not already created, create one p,created = models.activity_profile.objects.get_or_create(profileId=request_dict['profileId'],activity=activity) #If it already exists delete it if not created: etag.check_preconditions(request_dict,p, required=True) p.profile.delete() #Save profile content type based on incoming content type header and create etag p.content_type = request_dict['CONTENT_TYPE'] p.etag = etag.create_tag(profile.read()) #Set updated if request_dict['updated']: p.updated = request_dict['updated'] #Go to beginning of file profile.seek(0) #If it didn't exist, save it if created: p.save() #Set filename with the activityID and profileID and save fn = "%s_%s" % (p.activity_id,request_dict.get('filename', p.id)) p.profile.save(fn, profile)
def _make_images(self): original = Image.open(self.original_image) buf_small = StringIO.StringIO() small = original.copy() small.thumbnail(SMALL_PHOTO_SIZE, Image.ANTIALIAS) small.save(buf_small, format="JPEG") dj_file = ContentFile(buf_small.getvalue()) dj_file.seek(0) self.small_image.save(_small_image_file_name(self, ""), dj_file, save=False) buf_large = StringIO.StringIO() large = original.copy() large.thumbnail(LARGE_PHOTO_SIZE, Image.ANTIALIAS) large.save(buf_large, format="JPEG") dj_file = ContentFile(buf_large.getvalue()) dj_file.seek(0) self.large_image.save(_large_image_file_name(self, ""), dj_file, save=False) self.original_image.seek(0)
def load_shot(self, img): content = ContentFile(img.read()) content.seek(0) current = Image.open(content) current = ImageOps.fit(current, (100, 70), Image.ANTIALIAS) tmp = StringIO.StringIO() name = hex(randint(1, 10**50)) + ".jpg" current.save(tmp, "JPEG") tmp.seek(0) if self.shot_small.__nonzero__(): self.shot_small.delete() self.shot_small.save(name, ContentFile(tmp.read())) content.seek(0) current = Image.open(content) current = ImageOps.fit(current, (500, 350), Image.ANTIALIAS) tmp = StringIO.StringIO() name = hex(randint(1, 10**50)) + ".jpg" current.save(tmp, "JPEG") tmp.seek(0) if self.shot_big.__nonzero__(): self.shot_big.delete() self.shot_big.save(name, ContentFile(tmp.read()))
def put_profile(self, request_dict): try: profile = ContentFile(request_dict['profile'].read()) except: try: profile = ContentFile(request_dict['profile']) except: profile = ContentFile(str(request_dict['profile'])) p,created = agent_profile.objects.get_or_create(profileId=request_dict['profileId'],agent=self.agent) if not created: etag.check_preconditions(request_dict,p, required=True) p.profile.delete() p.content_type = request_dict['CONTENT_TYPE'] p.etag = etag.create_tag(profile.read()) if request_dict['updated']: p.updated = request_dict['updated'] profile.seek(0) if created: p.save() fn = "%s_%s" % (p.agent_id,request_dict.get('filename', p.id)) p.profile.save(fn, profile)
def put(self): agent = self.__get_agent(create=True) try: state = ContentFile(self.state.read()) except: try: state = ContentFile(self.state) except: state = ContentFile(str(self.state)) if self.registrationId: p,created = models.activity_state.objects.get_or_create(state_id=self.stateId,agent=agent,activity=self.activity,registration_id=self.registrationId, user=self.user) else: p,created = models.activity_state.objects.get_or_create(state_id=self.stateId,agent=agent,activity=self.activity, user=self.user) if created: log_message(self.log_dict, "Created Activity State", __name__, self.put.__name__) elif not created: etag.check_preconditions(self.req_dict,p) p.state.delete() # remove old state file log_message(self.log_dict, "Retrieved Activity State", __name__, self.put.__name__) # if not created: # etag.check_preconditions(self.req_dict,p) # p.state.delete() # remove old state file p.content_type = self.content_type p.etag = etag.create_tag(state.read()) if self.updated: p.updated = self.updated state.seek(0) if created: p.save() fn = "%s_%s_%s" % (p.agent_id,p.activity_id, self.req_dict.get('filename', p.id)) p.state.save(fn, state) log_message(self.log_dict, "Saved Activity State", __name__, self.put.__name__)
def handle_noargs(self, **options): profiles = Profile.objects.filter(user__id=301) for profile in profiles: conn = UpYun('ava19', 'one9', 'one9in1234') data = None if 'http' in profile.avatar.name: #pdb.set_trace() #data_img = cStringIO.StringIO(urllib2.urlopen(profile.avatar.name).read()) data = ContentFile(urllib2.urlopen(profile.avatar.name).read()) #data = tempfile.TemporaryFile() #data.write(urllib2.urlopen(profile.avatar.name).read()) data.seek(0) elif 'default' in profile.avatar.name: continue else: try: profile.avatar.seek(0) data = profile.avatar except: print 'Warning -- Profile : %d avatar lost...' % profile.user.id continue conn.setContentMD5(md5file(data)) result = conn.writeFile('%d' % profile.user.id, data) if 'http' in profile.avatar.name: data.close() if result: print '-- Profile : %d success...' % profile.user.id else: print 'Error -- Profile : %d ...' % profile.user.id print 'Finish.'
def put_profile(self, request_dict): try: profile = ContentFile(request_dict['profile'].read()) except: try: profile = ContentFile(request_dict['profile']) except: profile = ContentFile(str(request_dict['profile'])) p, created = agent_profile.objects.get_or_create( profileId=request_dict['profileId'], agent=self.agent) if not created: etag.check_preconditions(request_dict, p, required=True) p.profile.delete() p.content_type = request_dict['CONTENT_TYPE'] p.etag = etag.create_tag(profile.read()) if request_dict['updated']: p.updated = request_dict['updated'] profile.seek(0) if created: p.save() fn = "%s_%s" % (p.agent_id, request_dict.get('filename', p.id)) p.profile.save(fn, profile)
logger.debug("Saved as %s" % ssht) except Exception, e: logger.error("Image save failed.") logger.error(str(e)) ScreenshotLog.objects.create(update=update, site=site, message_type="error", message="Image save failed: %s" % e) ssht.delete() return False ssht.has_image = True ssht.timestamp = timestamp ssht.save() # Reopen image as PIL object jpg_obj.seek(0) image = Image.open(jpg_obj) # Crop it to 1000px tall, starting from the top crop = image.crop(( 0, # Unless we provide an offset to scroll down before cropping getattr(ssht.site, "y_offset", 0), ssht.image.width, 1000)) # Prep for db crop_name = ssht.get_crop_name() crop_path = os.path.join(settings.REPO_DIR, crop_name) crop.save(open(crop_path, 'w'), 'JPEG') crop_data = File(open(crop_path, 'r'))
def post(self, request): """""" f = zipfile.ZipFile(request.FILES['file']) album_name = str(request.FILES['file']).split('.zip')[0] # ImageDescription: 0x010e # DateTimeOriginal: 0x9003 # GPSLongitude: 0x0004 # GPSLatitude: 0x0002 # Orientation: 1 == Horizontal, 6 === Vertical # 4000x3000 , 2664 x 4000 photo_album = PhotoAlbum(name=album_name) photo_album.save() photos = [] day_photos_map = {} conn = S3Connection(self.S3_ACCESS_KEY_ID, self.S3_SECRET_ACCESS_KEY, host='s3-us-west-1.amazonaws.com') bucket = conn.get_bucket('deepic') k = Key(bucket) for filename in f.namelist(): full_image_fp = ContentFile(f.read(filename)) full_image = Image.open(full_image_fp) exif_data = get_exif_data(full_image) horizontal_image = exif_data['Orientation'] == 1 # 2 tuple of (width, height) full_image_size = full_image.size full_image_width = full_image_size[0] full_image_height = full_image_size[1] # So this is something like 4000 x 3000, or 4000 x 2664 # The decent resize is simply 1/4th of the full res # The thumbnail needs to be a square. Based on the dimensions of the original, crop an image # Decent resize is 1/4 of the the original decent_width = full_image_size[0] / 2 decent_height = full_image_size[1] / 2 smaller_dimension = min(full_image_size[0], full_image_size[1]) left_crop = (full_image_width - full_image_height) / 2 # L, U, R, Lower crop_box = (left_crop, 0, full_image_width - left_crop, full_image_height) crop_resize = (smaller_dimension / 2, smaller_dimension / 2) decent_resize = (decent_width, decent_height) full_copy = full_image.copy() decent_copy = full_copy.resize(decent_resize) cropped_image = full_copy.crop(crop_box) thumbnail_copy = cropped_image.resize(crop_resize) if not horizontal_image: thumbnail_copy = thumbnail_copy.rotate(270) decent_copy = decent_copy.rotate(270) thumbnail_io = StringIO.StringIO() thumbnail_copy.save(thumbnail_io, format='JPEG') thumbnail_image_fp = ContentFile(thumbnail_io.getvalue()) decent_io = StringIO.StringIO() decent_copy.save(decent_io, format='JPEG') decent_image_fp = ContentFile(decent_io.getvalue()) datetime_str = exif_data['DateTimeOriginal'] photo_datetime = datetime.datetime.strptime(datetime_str, '%Y:%m:%d %H:%M:%S') photo_date = photo_datetime.date() day_results = Day.objects.filter(date=photo_date) if day_results: if len(day_results) == 1: d = day_results.get() if d.photoalbum_set.all(): # If the photoalbum_set exists, it's part of a different album, so create a new Date day = Day(date=photo_date) day.save() else: day = d else: found = False for d in day_results: if not d.photoalbum_set.all(): day = d found = True break elif d.photoalbum_set.all()[0].id == photo_album.id: day = d found = True break if not found: day = Day(date=photo_date) day.save() else: day = Day(date=photo_date) day.save() if day.id not in day_photos_map: day_photos_map[day.id] = [] photo = Photo(date=photo_datetime) photo.save() # 3 to save: the thumbnail, the decent, and the full k.key = '{0}/{1}/{2}.full.JPG'.format(photo_album.id, day.id, photo.id) full_image_fp.seek(0) k.set_contents_from_file(full_image_fp, cb=self.percent_cb, num_cb=10) full_image_fp.close() k.key = '{0}/{1}/{2}.decent.JPG'.format(photo_album.id, day.id, photo.id) k.set_contents_from_file(decent_image_fp, cb=self.percent_cb, num_cb=10) # decent_image_fp.close() k.key = '{0}/{1}/{2}.thumbnail.JPG'.format(photo_album.id, day.id, photo.id) k.set_contents_from_file(thumbnail_image_fp, cb=self.percent_cb, num_cb=10) thumbnail_image_fp.close() logger.info('Uploaded photos w/ID: \"{0}\"'.format(photo.id)) photos.append(photo) day_photos_map[day.id].append(photo) # Now map photos to days, and days to an Album days = [] for day_id, photos in day_photos_map.iteritems(): day = Day.objects.get(id=day_id) day.photos.add(*photos) day.save() days.append(day) photo_album.days.add(*days) photo_album.save() return HttpResponse(json.dumps({'photo_album': photo_album.id}), content_type='application/json')
class TestUploadForm(TestCase): @classmethod def setUpTestData(cls): cls.vasya = User.objects.create_user(username='******', password='******') cls.petya = User.objects.create_user(username='******', password='******') def setUp(self): self.testcontent = ContentFile('Hello, World!', name='test.txt') self.assertTrue(self.client.login(username='******', password='******')) def test_get(self): response = self.client.get('/upload') self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'filebox/filemetadata_form.html') def test_upload(self): response = self.client.post('/upload', { 'content': self.testcontent }, follow=True) self.assertEqual(response.status_code, 200) self.assertRedirects(response, '/') md = FileMetaData.objects.get(user=self.vasya) self.assertEqual(md.filename, self.testcontent.name) self.testcontent.seek(0) self.assertEqual(md.content.content.read(), self.testcontent.read()) def test_empty(self): response = self.client.post('/upload') self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'filebox/filemetadata_form.html') self.assertFormError(response, 'form', 'content', u'Обязательное поле.') def test_maxfiles(self): filecontent = FileContent.objects.create(content=self.testcontent) FileMetaData.objects.bulk_create( [FileMetaData(user=self.vasya, filename='test.txt', content=filecontent)] * settings.FILEBOX_MAX_FILES_PER_USER ) self.testcontent.seek(0) response = self.client.post('/upload', { 'content': self.testcontent }) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'filebox/filemetadata_form.html') self.assertFormError(response, 'form', None, u'Вы не можете загрузить более {0} файлов'.format(settings.FILEBOX_MAX_FILES_PER_USER)) def test_duplicate_message(self): FileMetaData.objects.create_with_content(user=self.petya, filename='test.txt', contentfile=self.testcontent) self.testcontent.seek(0) response = self.client.post('/upload', { 'content': self.testcontent }, follow=True) self.assertEqual(response.status_code, 200) self.assertRedirects(response, '/') self.assertIn(Message(INFO, u'Этот файл уже есть у пользователя petya'), response.context['messages']) self.testcontent.seek(0) response = self.client.post('/upload', { 'content': self.testcontent }, follow=True) self.assertEqual(response.status_code, 200) self.assertRedirects(response, '/') self.assertIn(Message(INFO, u'Этот файл уже есть у пользователя petya'), response.context['messages']) self.assertIn(Message(INFO, u'Этот файл уже есть в вашем хранилище'), response.context['messages'])
# Save the image data to the object target = ssht.get_image_name() logger.debug("Saving %s" % target) try: ssht.image.save(target, file_obj) except Exception, e: logger.error("Image save failed.") ssht.delete() raise e ssht.has_image = True ssht.timestamp = timestamp ssht.save() logger.debug("Screenshot saved for %s" % site.url) # Reopen image as PIL object file_obj.seek(0) image = Image.open(file_obj) # Crop it to 1000px tall, starting from the top crop = image.crop( ( 0, # Unless we provide an offset to scroll down before cropping getattr(ssht.site, "y_offset", 0), ssht.image.width, 1000 ) ) # Prep for db crop_name = ssht.get_crop_name() crop_data = prep_pil_for_db(crop, crop_name) # Save to the database
logger.error("Image save failed.") logger.error(str(e)) ScreenshotLog.objects.create( update=update, site=site, message_type="error", message="Image save failed: %s" % e ) ssht.delete() return False ssht.has_image = True ssht.timestamp = timestamp ssht.save() # Reopen image as PIL object jpg_obj.seek(0) image = Image.open(jpg_obj) # Crop it to 1000px tall, starting from the top crop = image.crop( ( 0, # Unless we provide an offset to scroll down before cropping getattr(ssht.site, "y_offset", 0), ssht.image.width, 1000 ) ) # Prep for db crop_name = ssht.get_crop_name()