def setUp(self): user_1 = User.objects.create(username='******') user_1.set_password('password-1') user_1.save() file_hash_1 = File(open(MEDIA_ROOT + '/test_file_1', 'w')) file_hash_1.write('Хеш-1') file_hash_1.close() file_hash_1.open('r') order_1 = Order.objects.create(title='Название-1', text='Текст-1', date=timezone.now(), author=user_1, order_hash=file_hash_1, is_closed=False) file_hash_1.close() self.order_1 = order_1 for i in range(2): UserOrder.objects.create(user=user_1, order=order_1, is_accepted=False, is_completed=False) Signature.objects.create(signer=user_1, order=order_1, is_correct=False)
def test_code_body(self): """ Tests when codefile was uploaded, read file and set as body. """ category = Category.objects.create(label=u"ソースコード") f = File(tempfile.NamedTemporaryFile( mode="r+w+t", suffix=".py", dir=settings.TEST_TEMPORARY_FILE_DIR )) body = """ import this print "this is a sample code file" hoge = 1 + 1 """ f.write(body) material_file = MaterialFile.objects.create(file=f) code = Material.objects.create( _file=material_file, description="description", category=category ) print code.body f.close() eq_(code.body, body) code.delete()
def save_text_to_file(filename=None, path=None, content=None): if path is None: path = settings.MEDIA_ROOT if filename is None: raise ValueError('Filename required.') full_path = os.path.join(path, filename) f = open(full_path, 'w') file = File(f) file.write(content) file.close()
def job_save(request): if request.POST: form = add_job_form(request.POST) Job = form.save(request.POST) f = open('%s/%s' %(settings.SCRIPTS_DIR,Job.name) , 'w') myfile = File(f) content = '#!/bin/sh \nRecips=" %s " \nSubj=" %s " \nBody="%s" \nFromDir="%s" \n source sys/mailer.sh \n ' % ( Job.recips , Job.subj, Job.body ,Job.fromdir ) myfile.write(content) f.close() myfile.close() os.chmod('%s/%s' %(settings.SCRIPTS_DIR,Job.name),stat.S_IRWXU) return redirect ('/mailer/viewjob/')
def export_xml_by_source(request, dataset_id): """Call export API with this dataset_id, combine paginated responses""" if not dataset_id: return base_url = request.build_absolute_uri( reverse('export:activity-export') ) + "?dataset={dataset_id}&format=xml&page_size=100&page={page}".format( # NOQA: E501 dataset_id=dataset_id, page="{page}" ) def get_result(xml, page_num): print('making request, page: ' + str(page_num)) rf = RequestFactory() req = rf.get(base_url.format(page=page_num)) view = IATIActivityList.as_view()(req).render() xml.extend(etree.fromstring(view.content).getchildren()) link_header = view.get('link') if not link_header: return xml link = requests.utils.parse_header_links(link_header) has_next = reduce(lambda acc, x: acc or ( x['rel'] == 'next'), link, False) if has_next: return get_result(xml, page_num + 1) else: return xml xml = E('iati-activities', version="2.02") final_xml = get_result(xml, 1) final_xml.attrib[ 'generated-datetime' ] = datetime.datetime.now().isoformat() from django.core.files.base import File from django.conf import settings import uuid file_name = "{}.xml".format(uuid.uuid4()) path = "{}/{}".format(settings.MEDIA_ROOT, file_name) xml_file = File(open(path, mode='w')) xml_file.write(etree.tostring(final_xml, pretty_print=True)) xml_file.close() return file_name
def export_xml_by_source(request, dataset_id): """Call export API with this dataset_id, combine paginated responses""" if not dataset_id: return base_url = request.build_absolute_uri( reverse('export:activity-export') ) + "?dataset={dataset_id}&format=xml&page_size=100&page={page}".format( # NOQA: E501 dataset_id=dataset_id, page="{page}") def get_result(xml, page_num): print('making request, page: ' + str(page_num)) rf = RequestFactory() req = rf.get(base_url.format(page=page_num)) view = IATIActivityList.as_view()(req).render() xml.extend(etree.fromstring(view.content).getchildren()) link_header = view.get('link') if not link_header: return xml link = requests.utils.parse_header_links(link_header) has_next = reduce(lambda acc, x: acc or (x['rel'] == 'next'), link, False) if has_next: return get_result(xml, page_num + 1) else: return xml xml = E('iati-activities', version="2.02") final_xml = get_result(xml, 1) final_xml.attrib['generated-datetime'] = datetime.datetime.now().isoformat( ) from django.core.files.base import File from django.conf import settings import uuid file_name = "{}.xml".format(uuid.uuid4()) path = "{}/{}".format(settings.MEDIA_ROOT, file_name) xml_file = File(open(path, mode='w')) xml_file.write(etree.tostring(final_xml, pretty_print=True)) xml_file.close() return file_name
def job_save_e(request,job_id): if request.POST: a = Job.objects.get(id=job_id) os.remove('/scripts/%s' %(a.name)) form = add_job_form(request.POST,instance=a) job = form.save(request.POST) f = open('%s/%s' %(settings.SCRIPTS_DIR,a.name), 'w') myfile = File(f) content = '#!/bin/sh \nRecips=" %s " \nSubj=" %s " \nBody="%s" \nFromDir="%s" \n source sys/mailer.sh \n ' % ( a.recips , a.subj, a.body ,a.fromdir ) myfile.write(content) f.close() myfile.close() os.chmod('%s/%s' %(settings.SCRIPTS_DIR,a.name),stat.S_IRWXU) return redirect ('/mailer/viewjob/')
def test_auto_cast_material_type_other(self): """ Tests cast material model to suitable type when other file was uploaded. """ from apps.materials.models import Material test_file = File(tempfile.NamedTemporaryFile( mode="r+w+t", suffix=".hoge", dir=settings.TEST_TEMPORARY_FILE_DIR )) test_file.write("hello!hello!") material_file = MaterialFile.objects.create(file=test_file) material = Material.objects.create( _file=material_file, description="description", category=Category.objects.get(pk=1) ) ok_(isinstance(material, Material)) test_file.close()
def setUpTestData(cls): user = User.objects.create(username='******', first_name='Андрей', last_name='Петров') user.set_password('password') user.save() user_profile = UserProfile.objects.create(user=user, middle_name='Сергеевич') user_profile.save() file_hash = File(open(MEDIA_ROOT + '/test_file_1', 'w')) file_hash.write('Хеш') file_hash.close() file_hash.open('r') order = Order.objects.create(title='Название', text='Текст', date=timezone.now(), author=user, order_hash=file_hash, is_closed=False) file_hash.close() UserOrder.objects.create(user=user, order=order, is_accepted=False, is_completed=False) Comment.objects.create( user=user, order=order, date=timezone.now(), text= 'Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod' )
def submissionHook(cls, uploaded_file): """Expect a tarball containing a Makefile Compile the submission""" old_mask = os.umask(0) try: f = zipfile.ZipFile(uploaded_file) except zipfile.BadZipfile: raise errors.BuildError("Invalid zip file") # Copy files to buildnest # Handle older versions of zipfile that do not contain 'extractall' if hasattr(f, "extractall"): f.extractall(path=cls.BUILDNEST) else: for name in f.namelist(): path = os.path.join(cls.BUILDNEST, name) f_ = open(path, "wb") f_.write( f.read( name ) ) f_.close() # make in the buildnest p = subprocess.Popen(["make"], stderr=subprocess.PIPE, cwd=cls.BUILDNEST) output = p.communicate()[1] ret = p.wait() # Report errors if ret != 0: os.umask(old_mask) raise errors.BuildError(output) output = os.path.join(cls.BUILDNEST, cls.BOT_OUTPUT) if not os.path.exists(output): raise errors.BuildError("No %s found after build"%(cls.BOT_OUTPUT)) # Save the output botSo = open(output, "rb") # TODO: Check compiled .so? os.umask(old_mask) def read_in_chunks(file_object, chunk_size=1024): """Lazy function (generator) to read a file piece by piece. Default chunk size: 1k.""" while True: data = file_object.read(chunk_size) if not data: break yield data uploaded_file = File(tempfile.NamedTemporaryFile()) for piece in read_in_chunks(botSo): uploaded_file.write(piece) uploaded_file.size = os.stat(os.path.join(cls.BUILDNEST, cls.BOT_OUTPUT)).st_size botSo.close() # Clean up obliterate(cls.BUILDNEST) return uploaded_file
def _create_file(filename): filename = os.path.join(tempfile.gettempdir(), filename) file = File(open(filename, 'w+')) file.write(filename) return file
def _create_content_file(raw_data, filename): with open(os.path.join(settings.MEDIA_ROOT, filename), 'w') as f: file_obj = File(f) file_obj.write(raw_data) return os.path.join(settings.MEDIA_ROOT, filename)
def _create_file(filename): filename = os.path.join(tempfile.gettempdir(), filename) file = File(open(filename, "w+")) file.write(filename) return file
def make_dummy_file(name='something.txt', content=None): stream = File(io.BytesIO(), name=name) if content is not None: stream.write(content) stream.seek(0) return stream