def upload_v1(conn, fileitem): import csv import repositories from models import Merchant, Sale, Upload upload = Upload() upload = repositories.UploadRepository(conn).create(upload) for row in csv.DictReader(fileitem.file, delimiter="\t"): merchant = Merchant(name=row['merchant name'].decode('utf-8'), address=row['merchant address'].decode('utf-8')) merchant = repositories.MerchantRepository(conn).create(merchant) sale = Sale( upload=upload, merchant=merchant, purchaser_name=row['purchaser name'].decode('utf-8'), description=row['item description'].decode('utf-8'), unit_price=row['item price'], count=row['purchase count'] ) sale = repositories.SaleRepository(conn).create(sale) upload.add_sale(sale) repositories.UploadRepository(conn).save(upload) return upload
def upload_v1(conn, fileitem): import csv import repositories from models import Merchant, Sale, Upload upload = Upload() upload = repositories.UploadRepository(conn).create(upload) for row in csv.DictReader(fileitem.file, delimiter="\t"): merchant = Merchant(name=row['merchant name'].decode('utf-8'), address=row['merchant address'].decode('utf-8')) merchant = repositories.MerchantRepository(conn).create(merchant) sale = Sale(upload=upload, merchant=merchant, purchaser_name=row['purchaser name'].decode('utf-8'), description=row['item description'].decode('utf-8'), unit_price=row['item price'], count=row['purchase count']) sale = repositories.SaleRepository(conn).create(sale) upload.add_sale(sale) repositories.UploadRepository(conn).save(upload) return upload
def file_upload(request, template_name='oms_config/file_upload_form.html'): var4 = 'active' s = .objects.all() # repository = Repository.objects.all() username = request.session['username'] if request.method == 'POST': form = UploadForm(request.POST, request.FILES) if form.is_valid(): # print form new_file = Upload( doc_file=request.FILES['doc_file'], s_id=request.POST['s'], title=request.POST['title'] ) new_file.save() # handle_uploaded_file(request.FILES['file']) return redirect('upload_list') else: form = UploadForm() return render(request, template_name, {'form': form, 'var4': var4, 'username': username, 'highlight3': 'active', 's': s, })
def upload_file(request): resp = {} url = str(request.get_full_path()) pid = url.split("/")[-1] try: if request.method == 'POST': form = UploadForm(request.POST, request.FILES) if form.is_valid(): #获取表单信息 xlsfile = form.cleaned_data['Filedata'] filename = xlsfile.name #写入数据库 uf = Upload(Filedata = xlsfile, uptime = datetime.datetime.now()) uf.save() filepath = uf.Filedata uipath = unicode(str(filepath), "utf8") uipath = os.path.join(settings.MEDIA_ROOT,uipath) excel_table_byindex(request,file= uipath, pid = pid) resp['success'] = True else: form = UploadForm() except Exception,e: info = "%s" % (sys.exc_info()[1]) resp['success'] = False resp['message'] = info
def upload(): if request.method == 'POST' and 'photo' in request.files: filename = uploads.save(request.files['photo']) rec = Upload(image_url=filename) rec.save() flash("Photo saved.") return redirect(url_for('galeria')) return render_template('upload.html')
def SaveDoc(uploaded_file_url, dateup): try: sv = Upload( pic=uploaded_file_url, upload_date=dateup, ) sv.save() return True except: return False
def upload(request): if request.method == 'POST': form = FileUploadForm(request.POST, request.FILES) if form.is_valid(): f = Upload() f.filename = form.cleaned_data['file'] f.save() # messages.add_message(request, messages.SUCCESS, u'文件 {0} 上传成功, 大小为{1}KB'.format(f.filename.name.split('/')[-1], int(f.filename.size)/1024)) # return HttpResponseRedirect(reverse('push')) return HttpResponse(u'文件 {0} 上传成功, 大小为{1}KB'.format(f.filename.name.split('/')[-1], int(f.filename.size)/1024))
def test_with_different_counts(self): """Method should return the correct value even with diff population sizes""" upload = Upload() testField = AggregateField(*['test']) count = 100 randomVals = [ random.choice([i for i in range(count)]) for i in range(count) ] mockedReturnVals = [ ChunkWithMean(v, random.choice([i for i in range(count)])) for v in randomVals ] weights = [ chunkWithMean.value * chunkWithMean.count for chunkWithMean in mockedReturnVals ] expectedMean = sum(weights) / sum( chunkWithMean.count for chunkWithMean in mockedReturnVals) with patch('calls.aggregate.functions.aggregate_mean.chunkify_big_json' ) as chunkFnMock: chunkFnMock.return_value = mockedReturnVals result = get_aggregate_mean(upload, testField) self.assertEqual(result, expectedMean)
def upload(): upload_folder = '/Users/developeraccount/Desktop/Roadbeam/roadbeam/src/static/accounts/%s' % session[ 'username'] if request.method == 'POST': file = request.files['file'] if file and allowed_file(file.filename.lower()): filename = secure_filename(file.filename) newupload = Upload( filename, 'accounts/%s/%s' % (session['username'], filename), session['username'], request.form['title'], request.form['description']) db.session.add(newupload) db.session.commit() file.save(os.path.join(upload_folder, filename)) return redirect(url_for('upload')) return """ <!doctype html> <title>Upload new File</title> <h1>Upload new File</h1> <form action="" method=post enctype=multipart/form-data> <p><input type=file name=file> <input type="text" name="title" placeholder="title"> <input type="text" name="description" placeholder="description"> <input type=submit value=Upload> </form> <p>%s</p> """ % "<br>".join(os.listdir(upload_folder, ))
def test_with_valid_file(self): """An uploadable file should be written out to disk""" testPath = '/test/path' validUpload = Upload(id=1, ts_uploaded=datetime.now()) uploadableMockfileStorage = Mock(filename='test.zip') uploadFile = UploadFile(validUpload, uploadableMockfileStorage) writeBinaryMode = 'wb' with patch('calls.uploader.UploadPathManager') as pathManagerMock: pathStub = Mock( return_value=f'{testPath}/{uploadableMockfileStorage.filename}' ) pathManagerMock.return_value.get_abs_path.return_value = pathStub with patch('calls.uploader.open') as openMock: enterMethodMock = Mock() openMock.return_value.__enter__ = enterMethodMock uploader = Uploader(testPath) uploader.upload(uploadFile) pathStub.parent.mkdir.assert_called_with(exist_ok=True, parents=True) openMock.assert_called_with(pathStub, writeBinaryMode) uploadFile.fileStorage.save.assert_called_with( enterMethodMock.return_value) self.assertEqual(uploadFile.upload.filepath, str(pathStub)) self.assertEqual(uploadFile.upload.source_filename, uploadableMockfileStorage.filename)
def new_upload(file, file_hash_bin): file_hash_str = str(binascii.hexlify(file_hash_bin).decode('utf8')) abs_file = os.path.join(app.config['UPLOAD_FOLDER'], file_hash_str) if not os.path.exists(app.config['UPLOAD_FOLDER']): os.makedirs(app.config['UPLOAD_FOLDER']) file.stream.seek(0) file.save(abs_file) # thumbnail generation if 'image' in file.mimetype: with Image(filename=abs_file) as img: ratio = img.width / img.height img.format = 'jpeg' img.transform(resize='125x125') img.save(filename='public/assets/thumbnails/' + file_hash_str + '.thumb.jpg') # Generate a short id and append extension short_id = get_new_short_url() extension = get_extension(file.filename) if extension: full_id = short_id + '.' + extension else: full_id = short_id # Add upload in DB upload = Upload(file_hash_bin, full_id, file.mimetype) db_session.add(upload) db_session.commit() return upload
def test_with_no_extension(self): """A file name with no extension should return an empty string""" upload = Upload() mockfileStorage = Mock(filename='test_file') uploadFile = UploadFile(upload, mockfileStorage) result = uploadFile.extension self.assertEqual(result, '')
def test_without_ts(self): """Attempting to get a path without a timestamp should raise an error""" pathManager = UploadPathManager(TEST_BASE_PATH) uploadWithNoTs = Upload(id='test', ts_uploaded=None) mockfileStorage = Mock() uploadFile = UploadFile(uploadWithNoTs, mockfileStorage) with self.assertRaises(ValueError): result = pathManager.get_abs_path(uploadFile)
def test_without_id(self): """Attempting to get a path without an ID should raise an error""" pathManager = UploadPathManager(TEST_BASE_PATH) uploadWithNoId = Upload() mockfileStorage = Mock() uploadFile = UploadFile(uploadWithNoId, mockfileStorage) with self.assertRaises(ValueError): result = pathManager.get_abs_path(uploadFile)
def test_with_a_normal_name(self): """Given a 'normal' filename, the extension should be correct""" EXPECTED_EXTENSION = 'pdf' upload = Upload() mockfileStorage = Mock(filename=f'test_file.{EXPECTED_EXTENSION}') uploadFile = UploadFile(upload, mockfileStorage) result = uploadFile.extension self.assertEqual(result, EXPECTED_EXTENSION)
def test_with_multiple_periods(self): """Regardless of the number of periods, the extension should be correct """ EXPECTED_EXTENSION = 'pdf' upload = Upload() mockfileStorage = Mock(filename=f'test.file.with.a.weird.name.{EXPECTED_EXTENSION}') uploadFile = UploadFile(upload, mockfileStorage) result = uploadFile.extension self.assertEqual(result, EXPECTED_EXTENSION)
def test_with_unknown_operaton(self): """An unknown operation should raise an UnknownOperation error""" uploads = [Upload() for i in range(5)] testAggregator = UploadFieldAggregator(uploads) unknownOperation = 'stdev' with self.assertRaises(UnknownOperation): testAggregator.get_method_for_operation(unknownOperation)
def test_with_known_operation(self): """An implemented operation should return the associated method""" uploads = [Upload() for i in range(5)] testAggregator = UploadFieldAggregator(uploads) implementedOperation= 'min' expectedMatchingMethod = testAggregator.get_min method = testAggregator.get_method_for_operation(implementedOperation) self.assertEqual(method, expectedMatchingMethod)
def uploadimg(x_s=0, y_s=0): if request.method == 'POST': userid = session.get('userid') x_s = request.form['x_s'] y_s = request.form['y_s'] rannew = request.form['rannew'] hero = request.form['hero'] photo = request.files['heroupload'] alt = '' if photo: img_x_y = Upload.img_x_y(photo) href = Upload.upload_image(photo, '_o_', 0, 800) src = Upload.upload_image(photo, '_s_', x_s, y_s) Photos.add_a_photo(href, src, alt, img_x_y * hero, userid, rannew) return json.dumps({'error': 0, 'url': src}) else: flash('请上传正确的照片') return json.dumps({'error': 1, 'info': '请上传正确的照片'}) return render_template("uploadimg.html")
def test_with_no_extension(self): """A file object with no extension is not allowed""" testPath = '/test/path' testCase = Uploader(testPath) upload = Upload() mockfileStorage = Mock(filename='test') uploadFile = UploadFile(upload, mockfileStorage) with self.assertRaises(InvalidExtension): testCase.upload(uploadFile)
def test_valid_upload(self): """Given a valid upload record, should return the expected filename""" testUploadTs = datetime.now() validUpload = Upload(id=1, ts_uploaded=testUploadTs) extension = 'pdf' mockfileStorage = Mock(filename=f'test.{extension}') uploadFile = UploadFile(validUpload, mockfileStorage) expectedResult = f"{TEST_BASE_PATH}/{testUploadTs.year}/{testUploadTs.month}/{testUploadTs.day}/" + \ f"{testUploadTs.strftime(UploadPathManager.FILENAME_FORMAT)}_{validUpload.id}.{extension}" result = UploadPathManager(TEST_BASE_PATH).get_abs_path(uploadFile) self.assertEqual(str(result), expectedResult)
def upload_video(request): if request.method == 'POST': try: callback_url = '' # for now # Create and populate a new model object upload = Upload(video_title=request.FILES['file'].name, video_file=request.FILES['file'], image_title='', image_file=None, status=Status.video_recieved.value, callback_url=callback_url) # For now upload.save() return HttpResponse(upload.id, status=202) except Exception: return HttpResponseBadRequest('Malformed data.') return HttpResponseNotAllowed('Only POST allowed.')
def upload_action(request): upload_path = '/data/deploy/OMS/media/Upload' files = request.FILES.getlist('doc_file') for item in files: if not os.path.exists(upload_path): makedir_p(upload_path) def handle_uploaded_file(f): with open(upload_path + '/' + f.name, 'wb+') as destination: for chunk in f.chunks(): destination.write(chunk) handle_uploaded_file(item) uploads = Upload( doc_file=os.path.join(upload_path, item.name), s_id=request.POST['s'], title=request.POST['title'] ) uploads.save() return HttpResponse("File(s) uploaded!")
def test_with_different_values(self): """Method should return the max value given a list of random ints""" upload = Upload() testField = AggregateField(*['test']) randomVals = [random.choice([i for i in range(5)]) for i in range(5)] mockedReturnVals = [ChunkWithMax(v) for v in randomVals] with patch('calls.aggregate.functions.aggregate_max.chunkify_big_json' ) as chunkFnMock: chunkFnMock.return_value = mockedReturnVals result = get_aggregate_max(upload, testField) self.assertEqual(result, max(randomVals))
def test_with_same_values(self): """Method should return the value even if the vals are all the same""" upload = Upload() testField = AggregateField(*['test']) count = 5 sameVals = [10 for i in range(count)] mockedReturnVals = [ChunkWithMean(v, count) for v in sameVals] with patch('calls.aggregate.functions.aggregate_mean.chunkify_big_json' ) as chunkFnMock: chunkFnMock.return_value = mockedReturnVals result = get_aggregate_mean(upload, testField) self.assertEqual(result, statistics.mean(sameVals))
def poll_for_image(request): if request.method == 'GET': try: id = request.GET['id'] upload = Upload(id=id) if upload.status == Status.image_created.value: return HttpResponse(status=200) # Processing has completed return HttpResponse(status=202) # Processing has not yet completed except Exception: return HttpResponseBadRequest('You must provide an id to poll') return HttpResponseNotAllowed('Only GET allowed.')
def create_upload(): """Create a new Upload record and upload it appropriately""" fileStorage = request.files['file'] uploader = Uploader(AppConfig.BASE_UPLOAD_PATH) newUpload = Upload(source_filename=fileStorage.filename) uploadFile = UploadFile(newUpload, fileStorage) if uploader.can_upload(uploadFile): session.add(newUpload) session.flush() #Give uploader access to id/ts uploader.upload(uploadFile) session.commit() return jsonify({}) else: return jsonify({ 'error': 'Failed to upload, please ensure the file is uploadable' }), 500
def process_id(upload_id): from run import config # Fetch upload id queued_sample = QueuedSample.query.filter( QueuedSample.id == upload_id).first() if queued_sample is not None: if queued_sample.user_id == g.user.id: # Allowed to process versions = CCExtractorVersion.query.all() form = FinishQueuedSampleForm(request.form) form.version.choices = [(v.id, v.version) for v in versions] if form.validate_on_submit(): # Store in DB db_committed = False temp_path = os.path.join(config.get('SAMPLE_REPOSITORY', ''), 'QueuedFiles', queued_sample.filename) final_path = os.path.join(config.get('SAMPLE_REPOSITORY', ''), 'TestFiles', queued_sample.filename) try: extension = queued_sample.extension[1:] if len( queued_sample.extension) > 0 else "" sample = Sample(queued_sample.sha, extension, queued_sample.original_name) g.db.add(sample) g.db.flush([sample]) uploaded = Upload(g.user.id, sample.id, form.version.data, Platform.from_string(form.platform.data), form.parameters.data, form.notes.data) g.db.add(uploaded) g.db.delete(queued_sample) g.db.commit() db_committed = True except: traceback.print_exc() g.db.rollback() # Move file if db_committed: os.rename(temp_path, final_path) return redirect( url_for('sample.sample_by_id', sample_id=sample.id)) return {'form': form, 'queued_sample': queued_sample} # Raise error raise QueuedSampleNotFoundException()
def test_with_flushed(self): """A flushed and available upload should return its id and uploaded ts""" id = 1 ts = datetime.now() filename = 'test.zip' upload = Upload(id=id, ts_uploaded=ts, source_filename=filename) uploadJson = UploadJson(upload) #Not ideal, but this is a unittest so remove flask integration dependency #I could hardcode or hack the test script more to get the app #but I "expect" url_for from flask to not be broken #This could work without popping and putting a url_for in here too, but #that makes me uncomfortable uploadJsonData = uploadJson() uploadJsonData.pop('downloadUrl') expectedResult = { 'id': id, 'ts': ts.isoformat(), 'filename': filename, } self.assertEqual(uploadJsonData, expectedResult)
def manageReportUpload(request,cloudItem): """ Uncrypt and store the report """ #check if an item is already in the DB impDb = Upload.objects.filter(cloudItemID=CloudItem.objects.get(id=cloudItem)) if len(impDb) != 0: raise ("A report already exists.") # add path for crypto cryptoPath = os.path.join(os.path.dirname(settings.BASE_DIR), "finder") if not cryptoPath in sys.path: sys.path.insert(1, cryptoPath) del cryptoPath import crypto fileUpload = request.FILES['fileUp'] fileName = strip_tags(fileUpload.name) #create a folder for this cloud item if do not exists path = os.path.join(settings.UPLOAD_DIR,cloudItem) if not os.path.isdir(path): os.mkdir(path) #upload name upTime = timezone.now() shaName = fileName uploadName = crypto.sha256(shaName[:-8]+crypto.HASH_SEPARATOR+format(upTime,"U")).hexdigest() wholeUploadPath = os.path.join(path,uploadName) if not os.path.isdir(wholeUploadPath): os.mkdir(wholeUploadPath) #write to disk with open(os.path.join(wholeUploadPath,fileName), 'wb+') as destination: for chunk in fileUpload.chunks(): destination.write(chunk) fileCont = open(os.path.join(wholeUploadPath,fileName), "r") jsonParsed = json.load(fileCont) cont = jsonParsed['enc'] k = jsonParsed['k'] #decrypt AES key aes = crypto.decryptRSA(k) #decrypt ZIP - first write encrypted cont into a temp file, read it, decrypt it and store the ZIP tempFileName = os.path.join(wholeUploadPath,fileName+".tmp") open(tempFileName, "w+b").write(cont) # fernet wants "bytes" as token fileBytes = crypto.decryptFernetFile(open(tempFileName, "rb").read(), aes) if fileName.endswith(".enc"): name = fileName[:-4] else: raise Exception("Invalid filename.") #write decrypted file to disc decZipFile = os.path.join(wholeUploadPath, name) open(decZipFile, "w+b").write(fileBytes) #delete temp file os.remove(tempFileName) aes = None del aes #unzip fileZip = zipfile.ZipFile(decZipFile) fileZip.extractall(wholeUploadPath) # set this report parsed newUpload = Upload(fileName=name[:-4],uploadDate=upTime,uploadIP=request.META['REMOTE_ADDR'],parsed=True,cloudItemID=CloudItem.objects.get(id=cloudItem)) newUpload.save()
def manageReportUpload(request, cloudItem): """ Uncrypt and store the report """ #check if an item is already in the DB impDb = Upload.objects.filter(cloudItemID=CloudItem.objects.get( id=cloudItem)) if len(impDb) != 0: raise ("A report already exists.") # add path for crypto cryptoPath = os.path.join(os.path.dirname(settings.BASE_DIR), "finder") if not cryptoPath in sys.path: sys.path.insert(1, cryptoPath) del cryptoPath import crypto fileUpload = request.FILES['fileUp'] fileName = strip_tags(fileUpload.name) #create a folder for this cloud item if do not exists path = os.path.join(settings.UPLOAD_DIR, cloudItem) if not os.path.isdir(path): os.mkdir(path) #upload name upTime = timezone.now() shaName = fileName uploadName = crypto.sha256(shaName[:-8] + crypto.HASH_SEPARATOR + format(upTime, "U")).hexdigest() wholeUploadPath = os.path.join(path, uploadName) if not os.path.isdir(wholeUploadPath): os.mkdir(wholeUploadPath) #write to disk with open(os.path.join(wholeUploadPath, fileName), 'wb+') as destination: for chunk in fileUpload.chunks(): destination.write(chunk) fileCont = open(os.path.join(wholeUploadPath, fileName), "r") jsonParsed = json.load(fileCont) cont = jsonParsed['enc'] k = jsonParsed['k'] #decrypt AES key aes = crypto.decryptRSA(k) #decrypt ZIP - first write encrypted cont into a temp file, read it, decrypt it and store the ZIP tempFileName = os.path.join(wholeUploadPath, fileName + ".tmp") open(tempFileName, "w+b").write(cont) # fernet wants "bytes" as token fileBytes = crypto.decryptFernetFile(open(tempFileName, "rb").read(), aes) if fileName.endswith(".enc"): name = fileName[:-4] else: raise Exception("Invalid filename.") #write decrypted file to disc decZipFile = os.path.join(wholeUploadPath, name) open(decZipFile, "w+b").write(fileBytes) #delete temp file os.remove(tempFileName) aes = None del aes #unzip fileZip = zipfile.ZipFile(decZipFile) fileZip.extractall(wholeUploadPath) # set this report parsed newUpload = Upload(fileName=name[:-4], uploadDate=upTime, uploadIP=request.META['REMOTE_ADDR'], parsed=True, cloudItemID=CloudItem.objects.get(id=cloudItem)) newUpload.save()
def user_bio(request): """ 'my_account' view used to handle avatar image upload and routing and templating of users account Args: None Returns: form for avatar upload """ email = request.user.email if request.method == 'POST': imageupload = UploadForm(request.POST, request.FILES) if imageupload.is_valid(): u = imageupload.save(commit=False) #image upload to google cloud storage uploaded_file = request.FILES['image'] content_type = request.FILES['image'].content_type name = request.FILES['image'].name data = uploaded_file.read() gcs_file = gcs.open( ('/' + bucket_name + '/' + name), mode = 'w', content_type = 'image/jpeg', options = { 'x-goog-acl': 'bucket-owner-full-control' } ) gcs_file.write(data) gcs_file.close() #get_serving_url from google and stats #avatar = gcs.open('/' + bucket_name + '/' + name) blob_key = blobstore.create_gs_key( '/gs/' + bucket_name + '/' + name ) serving_url = get_serving_url(blob_key) #find existing avatar image try: u = Upload.objects.get(email = email, is_avatar = True) except Upload.DoesNotExist: u = False #set is_avatar to False but keep entry in db if u: u.is_avatar = False u.save() #insert the uploaded avatar u = Upload(url = serving_url, email = email, is_avatar = True) u.save() stats = gcs.stat('/' + bucket_name + '/' + name) return HttpResponseRedirect('/my_account/') else: form = UploadForm() gallery_form = GalleryForm() try: u = Upload.objects.get(email = email, is_avatar = True) serving_url = u.url except Upload.DoesNotExist: serving_url = '/static/bio/blank.png' return render_to_response( 'profile.html', { 'form': form, 'gallery_form': gallery_form, 'serving_url': serving_url, 'user': email, }, context_instance = RequestContext(request) )
def edge_update(request): # or data[] if using jquery edge_data_list = json.loads(request.body)["data"] formatted_data = [] # d = request.data for upload in edge_data_list: if "image" not in upload: continue upload_latitude = float(upload["latitude"]) upload_longitude = float(upload["longitude"]) company_in_db = Company.objects.filter( companyname=upload["companyname"]) if not company_in_db.exists(): # if it is a new company, update db company, _ = Company.objects.get_or_create( companyname=upload["companyname"]) else: company = company_in_db[0] distance_range = 0.05 # a gas station in 50 meters min_latitude, max_latitude, min_longitude, max_longitude = get_bounding_box( upload_latitude, upload_longitude, distance_range) stations_in_db = Station.objects.filter( company=company, latitude__range=(min_latitude, max_latitude), longitude__range=(min_longitude, max_longitude)) if not stations_in_db.exists(): # if there isn't a previously logged station within 50 meters of the same company, add new station station, _ = Station.objects.get_or_create( company=company, latitude=upload["latitude"], longitude=upload["longitude"]) else: # there is a station within 50 meters station = stations_in_db[0] cleaned_timestamp = dateutil.parser.parse(upload["timestamp"]) potential_new_upload = Upload(timestamp=cleaned_timestamp, latitude=upload["latitude"], longitude=upload["longitude"], station=station, price=upload["price"]) uploads_in_db = Upload.objects.filter( timestamp=potential_new_upload.timestamp, latitude=potential_new_upload.latitude, longitude=potential_new_upload.longitude, station=potential_new_upload.station, price=potential_new_upload.price) # print cleaned_timestamp # print Upload.objects.all()[0].timestamp # print len(Upload.objects.filter(timestamp=cleaned_timestamp)) if not uploads_in_db.exists(): # decode base64 image and store into db image_str = upload["image"] # print len(image_str) image_str_file = StringIO.StringIO() image_str_file.write(base64.decodestring(image_str)) image = Image() image.imagefield.save('{}.jpg'.format(uuid.uuid4()), File(image_str_file)) potential_new_upload.image = image formatted_data.append(potential_new_upload) old_count = Upload.objects.count() Upload.objects.bulk_create(formatted_data) new_count = Upload.objects.count() if old_count != new_count: message = "updated db" else: message = "db already had data" print message objs_within = Upload.objects.order_by( 'station', '-timestamp').distinct('station').select_related('station__company') resp = [] for o in objs_within: d = { 'latitude': o.latitude, 'longitude': o.longitude, 'price': o.price, 'timestamp': o.timestamp, 'companyname': o.station.company.companyname } resp.append(d) return JsonResponse({"data": resp})
def dashboard(): if 'username' not in session: return redirect(url_for('signin')) user = User.query.filter_by(username=session['username']).first() if user.verified == 0: print 'user is not verified' return redirect(url_for('verify')) if user.portfolioname == None or len(user.portfolioname.replace(' ', '')) < 1: print 'user does not have a portfolioname' return redirect(url_for('portfolioSetup')) user = User.query.filter_by(username=session['username']).first() uploads = Upload.query.filter_by(publisher=session['username']) following = Follow.query.filter_by(follower_username=session['username']) following_count = [] for i in following: following_count.append(i) amount_of_following = len(following_count) user.following = amount_of_following db.session.commit() followers_count = [] for person in User.query.all(): followers = Follow.query.filter_by(followed_username=person.username) if followers != None: for i in followers: followers_count.append(i) amount_of_followers = len(followers_count) User.query.filter_by(username=person.username).first( ).followers = amount_of_followers db.session.commit() else: amount_of_followers = 0 User.query.filter_by(username=person.username).first( ).followers = amount_of_followers db.session.commit() random_people = [] for i in User.query.order_by(func.rand()).limit(2).all(): random_people.append(i) #work on filtering posts peopleFollowing = [] for i in Follow.query.filter_by(follower_username=session['username']): peopleFollowing.append(i.followed_username) posts_query = Posts.query.all() postsFollowing = [session['username']] for i in posts_query: if i.poster_username in peopleFollowing: postsFollowing.append(i.poster_username) posts = Posts.query.filter(Posts.poster_username.in_(postsFollowing)) upload_folder = '/Users/developeraccount/Desktop/Lens/lens/src/static/accounts/%s' % session[ 'username'] if user is None: return redirect(url_for('signin')) else: firstname = user.firstname lastname = user.lastname username = user.username figure = user.figure location = user.location following = user.following followers = user.followers twitter = user.twitter appreciations = user.appreciations instagram = user.instagram github = user.github bio = user.bio location = user.location if request.method == 'POST': file = request.files['file'] if file and allowed_file(file.filename.lower()): filename = secure_filename(file.filename) # newupload = Upload(filename, '/accounts/%s/%s' % (session['username'], filename), session['username'], request.form['title'], request.form['description']) newupload = Upload( filename, 'accounts/%s/%s' % (session['username'], filename), session['username'], "none", "none") db.session.add(newupload) db.session.commit() file.save(os.path.join(upload_folder, filename)) return redirect(url_for('dashboard')) return render_template('dashboard.html', User=User, user=user, peopleFollowing=peopleFollowing, posts=posts, random_people=random_people, bio=bio, uploads=uploads, location=location, github=github, instagram=instagram, username=username, firstname=firstname, lastname=lastname, figure=figure, following=following, followers=followers, twitter=twitter, appreciations=appreciations)