def cache_pypi_package(request, package_name, version): if version: jsonurl = 'http://pypi.python.org/pypi/%s/%s/json' % (package_name, version) else: jsonurl = 'http://pypi.python.org/pypi/%s/json' % (package_name) try: req = requests.get(jsonurl) if req.status_code != 200: if req.status_code == 404: #try with underscores package_name = package_name.replace('-', '_') if version: jsonurl = 'http://pypi.python.org/pypi/%s/%s/json' % (package_name, version) else: jsonurl = 'http://pypi.python.org/pypi/%s/json' % (package_name) req = requests.get(jsonurl) if req.status_code != 200: return False else: return False pjson = req.json() data = pjson['info'] sdist = None for pf in pjson['urls']: if pf['packagetype'] == 'sdist': sdist = pf data['md5_digest'] = sdist['md5_digest'] packageurl = sdist['url'] break if not sdist: if 'download_url' in data: packageurl = data['download_url'] else: return False data['metadata_version'] = '1.0' data = QueryDict(urllib.urlencode(data), mutable=True) filename = urlsplit(packageurl).path.split('/')[-1] package_content = requests.get(packageurl).content tempfilehandler = TemporaryFileUploadHandler() tempfilehandler.new_file('content', filename, 'who/cares', len(package_content)) tempfilehandler.receive_data_chunk(package_content, 0) tempfile = tempfilehandler.file_complete(len(package_content)) class FakeRequest: POST = data FILES = {'content': tempfile} user = User.objects.get(id=1) META = request.META method = 'POST' request = FakeRequest() ret = register_or_upload(request) return True except Exception, e: return False
def put(self, request, *args, **kwargs): self.ensure_write_allowed() upload = TemporaryFileUploadHandler(self.request) upload.new_file('lfs_upload.bin', 'lfs_upload.bin', 'application/octet-stream', -1) hash = hashlib.sha256() chunk = True size = 0 while chunk: chunk = request.read(self.CHUNK_SIZE) upload.receive_data_chunk(chunk, size) hash.update(chunk) size += len(chunk) upload.file_complete(size) oid = self.kwargs.get('oid', '') if hash.hexdigest() != oid: return self.json_response( {'message': 'OID of request does not match file contents'}, status=400) upload.file.name = '%s.bin' % oid form = LfsObjectForm(data={ 'oid': oid, 'size': size, }, files={ 'file': upload.file, }) if not form.is_valid(): return self.json_response( {'message': 'Field Errors for: %s' % ', '.join(form.errors)}, status=400) lfsobject = form.save(commit=False) lfsobject.uploader = self.access.user lfsobject.save() lfsobject.repositories.add(self.access.repository) return HttpResponse() # Just return Status 200
def put(self, request, *args, **kwargs): self.ensure_write_allowed() upload = TemporaryFileUploadHandler(self.request) upload.new_file('lfs_upload.bin', 'lfs_upload.bin', 'application/octet-stream', -1) hash = hashlib.sha256() chunk = True size = 0 while chunk: chunk = request.read(self.CHUNK_SIZE) upload.receive_data_chunk(chunk, size) hash.update(chunk) size += len(chunk) upload.file_complete(size) oid = self.kwargs.get('oid', '') if hash.hexdigest() != oid: return self.json_response({ 'message': 'OID of request does not match file contents' }, status=400) upload.file.name = '%s.bin' % oid form = LfsObjectForm(data={ 'oid': oid, 'size': size, }, files= { 'file': upload.file, }) if not form.is_valid(): return self.json_response({ 'message': 'Field Errors for: %s' % ', '.join(form.errors) }, status=400) lfsobject = form.save(commit=False) lfsobject.uploader = self.access.user lfsobject.save() lfsobject.repositories.add(self.access.repository) return HttpResponse() # Just return Status 200
def setUp(self): print("setup method") self.superAdminUN = '*****@*****.**' self.adminUN = '*****@*****.**' self.nonAdminUN = '*****@*****.**' superadminUser = None adminUser = None user = None eum = EmailUserManager() self.superadminUser = EmailUser.objects.create(email=self.superAdminUN, password="******", is_staff=True, is_superuser=True) self.superadminUser.set_password('pass') self.superadminUser.save() self.adminUser = EmailUser.objects.create(email=self.adminUN, password="******", is_staff=True, is_superuser=False) self.adminUser.set_password('pass') self.adminUser.save() self.customer = EmailUser.objects.create(email=self.nonAdminUN, password="******", is_staff=False, is_superuser=False) self.customer.set_password('pass') self.customer.save() # customer UserAddress user_address = UserAddress.objects.create( country_id='AU', #is_default_for_billing= True, #is_default_for_shipping= True, line1='17 Dick Perry', #line2: '', #line3': u'', #line4': u'BENTLEY DELIVERY CENTRE', #notes': u'', #num_orders': 0, #phone_number': None, postcode='6151', #'search_text': u'', state='WA', #title': u'', user_id=self.customer.id) customer_address = Address.objects.create(user=self.customer, oscar_address=user_address) self.customer.residential_address = customer_address self.customer.save() self.externalUser1 = '*****@*****.**' self.customer1 = EmailUser.objects.create(email=self.externalUser1, password="******", is_staff=False, is_superuser=False) self.customer1.set_password('pass') self.customer1.save() # customer1 UserAddress user1_address = UserAddress.objects.create(country_id='AU', line1='17 Dick Perry', postcode='6151', state='WA', user_id=self.customer1.id) customer1_address = Address.objects.create(user=self.customer1, oscar_address=user1_address) self.customer1.residential_address = customer1_address self.customer1.save() self.externalUser2 = '*****@*****.**' self.customer2 = EmailUser.objects.create(email=self.externalUser2, password="******", is_staff=False, is_superuser=False) self.customer2.set_password('pass') self.customer2.save() # customer2 UserAddress user2_address = UserAddress.objects.create(country_id='AU', line1='17 Dick Perry', postcode='6151', state='WA', user_id=self.customer2.id) customer2_address = Address.objects.create(user=self.customer2, oscar_address=user2_address) self.customer2.residential_address = customer2_address self.customer2.save() settings.SESSION_ENGINE = 'django.contrib.sessions.backends.file' engine = import_module(settings.SESSION_ENGINE) store = engine.SessionStore() store.save() self.session = store self.client.cookies[settings.SESSION_COOKIE_NAME] = store.session_key # Create ProposalStandardRequirements ProposalStandardRequirement.objects.create( code='R1', text='Standard requirement 1') ProposalStandardRequirement.objects.create( code='R2', text='Standard requirement 2') ProposalStandardRequirement.objects.create( code='R3', text='Standard Apiary requirement 1') ProposalStandardRequirement.objects.create( code='R4', text='Standard Apiary requirement 2') ## create_proposal_data #self.apiary_application_type_id = ApplicationType.objects.get(name='Apiary').id #self.create_proposal_data = { # u'profile': 132376, # u'application': self.apiary_application_type_id, # u'behalf_of': u'individual', # } #self.create_proposal_data_customer1 = { # u'profile': 132377, # u'application': self.apiary_application_type_id, # u'behalf_of': u'individual', # } #self.create_proposal_data_customer2 = { # u'profile': 132378, # u'application': self.apiary_application_type_id, # u'behalf_of': u'individual', # } ## Site transfer applications #self.site_transfer_application_type_id = ApplicationType.objects.get(name='Site Transfer').id #self.create_site_transfer_proposal_data = { # u'profile': 132377, # u'application': self.site_transfer_application_type_id, # u'behalf_of': u'individual', # #u'selected_licence_holder': u'*****@*****.**', # } ## submit_proposal_data #with open('disturbance/tests/all_the_features_1.json', 'r') as features_file_1: # self.all_the_features_1 = json.load(features_file_1) #with open('disturbance/tests/all_the_features_2.json', 'r') as features_file_2: # self.all_the_features_2 = json.load(features_file_2) # Dates self.today = timezone.now().date() self.today_str = self.today.strftime('%d/%m/%Y') day_delta = timedelta(days=1) week_delta = timedelta(weeks=1) self.today_plus_1_day = self.today + day_delta self.today_plus_1_week = self.today + day_delta self.today_plus_26_weeks = self.today + (day_delta * 26) self.today_plus_1_day_str = self.today_plus_1_day.strftime('%d/%m/%Y') self.today_plus_1_week_str = self.today_plus_1_week.strftime( '%d/%m/%Y') self.today_plus_26_weeks_str = self.today_plus_26_weeks.strftime( '%d/%m/%Y') self.today_minus_1_week = self.today - week_delta self.today_minus_4_weeks = self.today - (week_delta * 4) ## Payment admin data fee_season = FeeSeason.objects.create(name="2021/22") fee_period_1 = FeePeriod.objects.create( fee_season=fee_season, name="Period1", start_date=self.today_minus_1_week) fee_period_2 = FeePeriod.objects.create( fee_season=fee_season, name="Period2", start_date=self.today_minus_4_weeks) for application_type in ApplicationType.objects.all(): for vessel_size_category_group in VesselSizeCategoryGroup.objects.all( ): fee_constructor = FeeConstructor.objects.create( application_type=application_type, fee_season=fee_season, vessel_size_category_group=vessel_size_category_group) amount = 1 for fee_item in FeeItem.objects.all(): fee_item.amount = amount fee_item.save() amount += 1 ## test doc path = os.path.join(settings.BASE_DIR, 'mooringlicensing', 'tests', 'test_doc.png') #with open('mooringlicensing/tests/test_doc.png', 'rb') as f: #test_doc_bytes = f.read() with io.open(path, 'rb') as f: test_doc_bytes = f.read() test_doc_stream = io.BytesIO(test_doc_bytes) test_doc_obj = TemporaryFileUploadHandler() test_doc_obj.new_file( file_name= '17. External - Waiting List Amendment Application.png', field_name='_file', content_type='image/png', content_length=os.path.getsize(path)) test_doc_obj.receive_data_chunk(raw_data=test_doc_bytes, start=0) test_doc = test_doc_obj.file_complete( file_size=os.path.getsize(path)) self.rego_papers_data = { 'action': ['save'], 'input_name': ['vessel-registration-documents'], 'filename': [ '25. External - New Authorised User Application - Applicant.png' ], 'csrfmiddlewaretoken': [settings.CSRF_MIDDLEWARE_TOKEN], '_file': [test_doc] } self.electoral_roll_doc_data = { 'action': ['save'], 'input_name': ['electoral-roll-documents'], 'filename': [ '25. External - New Authorised User Application - Applicant.png' ], 'csrfmiddlewaretoken': [settings.CSRF_MIDDLEWARE_TOKEN], '_file': [test_doc] } ## Mooring Bays #retrieve_marine_parks() MooringBay.objects.create(name='Bay1', mooring_bookings_id=1) MooringBay.objects.create(name='Bay2', mooring_bookings_id=2) # Global settings #ApiaryGlobalSettings.objects.create(key='oracle_code_apiary_site_annual_rental_fee', value='sample') # Get data ready temp = DefaultDataManager()
def upload(request): url = request.POST.get("url", None) if not url: form = UploadedFileForm(data=request.POST, files=request.FILES) if form.is_valid(): uploaded_file = form.save() file_url = uploaded_file.file.url try: UploadedFile.objects.get(file=uploaded_file.file) except UploadedFile.MultipleObjectsReturned: uploaded_file.delete() data = { 'path': file_url, } return HttpResponse(json.dumps(data)) else: return HttpResponseBadRequest(json.dumps({'errors': form.errors})) else: try: # We open the url of the distant file distant_file = urllib2.urlopen(url) # We check the length of the content (size of the file) content_length = int(distant_file.headers.getheader('content-length', settings.FILE_UPLOAD_MAX_MEMORY_SIZE + 1)) # We get the maximum file upload size max_upload_size = getattr(settings, 'AJAX_UPLOAD_MAX_FILESIZE', upload_settings.DEFAULT_MAX_FILESIZE) # We check the length of the content if 0 < max_upload_size < content_length: return HttpResponseBadRequest(json.dumps({'errors': "File too big"})) # If it's too big, we store the file on the disk if content_length > settings.FILE_UPLOAD_MAX_MEMORY_SIZE: handler = TemporaryFileUploadHandler() # Else, we put it in memory else: handler = MemoryFileUploadHandler() # Attribute activated needed because of the class implementation handler.activated = True # try/except needed because of the class implementation try: # Init the file upload handler handler.new_file("url", url.split('/')[-1].split('?')[0], distant_file.headers.getheader('content-type'), content_length ) except StopFutureHandlers: pass def read_in_chunks(file_object, chunk_size=1024): """Lazy function to read a file piece by piece.""" while True: data = file_object.read(chunk_size) if not data: break yield len(data), data # We pass all chunks to the file upload handler size = 0 for read, data in read_in_chunks(distant_file, handler.chunk_size): handler.receive_data_chunk(data, None) size += read # We end the handler and save the file to the model uploaded_file = UploadedFile() uploaded_file.file.save(handler.file_name, handler.file_complete(size)) uploaded_file.save() file_url = uploaded_file.file.url try: UploadedFile.objects.get(file=uploaded_file.file) except UploadedFile.MultipleObjectsReturned: uploaded_file.delete() data = { 'path': file_url, } return HttpResponse(json.dumps(data)) except Exception: return HttpResponseBadRequest(json.dumps({'errors': "An error occured"}))