Esempio n. 1
0
    def test_post(self):
        u1 = User(username='******',
                  email='*****@*****.**',
                  password_hash=generate_password_hash('hello world'))
        db.session.add(u1)
        u2 = User(username='******',
                  email='*****@*****.**',
                  password_hash=generate_password_hash('hello world'))
        db.session.add(u2)

        db.session.commit()

        p1 = Upload(uploader=u1.id, description='test desc', image=0)
        db.session.add(p1)
        p2 = Upload(uploader=u2.id, description='test desc', image=0)
        db.session.add(p2)

        db.session.commit()

        ## No follower relationships
        feed = u1.fetch_feed(0)
        self.assertEqual(feed.count(), 1)
        self.assertEqual(feed[0].uploader, u1.id)
        self.assertEqual(feed[0].user, u1)

        ## u1 follows u2
        u1.toggle_follow(u2)
        feed1 = u1.fetch_feed(0)
        feed2 = u2.fetch_feed(0)

        self.assertEqual(feed1.count(), 2)
        self.assertEqual(feed2.count(), 1)
Esempio n. 2
0
    def post(self, request, *args, **kwargs):
        if not self.link.enabled:  # A user can still have the form opened even if the link is disabled
            return HttpResponseRedirect(reverse('link-disabled'))
        form_class = self.get_form_class()
        form = self.get_form(form_class)
        files = request.FILES.getlist('files')
        if form.is_valid():
            for file in files:
                upload = Upload(file=file, link=self.link)
                upload.save()
            domain, _ = split_domain_port(self.request.get_host())

            context = {
                'filenames': {file.name
                              for file in files},
                'link_url':
                request.build_absolute_uri(
                    reverse('update-link', kwargs={'pk': self.link.pk})),
                'link_name':
                self.link.nice_name,
            }
            link_creator = self.link.created_by
            link_creator.email_user(
                subject='Data has arrived',
                message=f'Please go to {context["link_url"]}',
                html_message=render_to_string(
                    template_name='app/mail_notification.html',
                    context=context),
                from_email=f'datadropbot@{domain}',
                fail_silently=True,
            )
            return self.form_valid(form)
        return self.form_invalid(form)
Esempio n. 3
0
def save_assignment_file (file, assignment_id, user_id = False):
	original_filename = secure_filename(file.filename)
	random_filename = save_file (file)
	
	executor.submit(get_thumbnail, random_filename)
	
	# Update SQL after file has saved
	new_upload = Upload(original_filename = original_filename, filename = random_filename, assignment_id = assignment_id, timestamp = datetime.now())
	
	if user_id:
		new_upload.user_id = user_id
	else:
		new_upload.user_id = current_user.id
		
	db.session.add(new_upload)
	db.session.commit()
Esempio n. 4
0
def create_post():
    user: typing.Optional[User] = logged_in_as()

    if user is None:
        return abort(403)

    else:
        try:
            f = request.files['image']
            description = request.form['description']
        except KeyError:
            return abort(400)

        else:
            try:
                with Image(file=f.stream) as img:
                    img.format = 'jpeg'
                    img.transform('', '{0}x{0}>'.format(app.config['MAX_IMAGE_SIZE']))

                    upload = Upload(image=img.make_blob(), uploader=user.id, description=description)
                    db.session.add(upload)
                    db.session.commit()
            except:
                Warning('Invalid image')

            return redirect( url_for('index') )
Esempio n. 5
0
File: utils.py Progetto: joe513/m3u8
def load_remote_m3u8(link, playlist, remove_existed=False):
    from app.models import Channel, Upload

    r = requests.get(link)
    if not r.ok:
        return

    upload = Upload(user=playlist.user, info=link)
    upload.file.save('requests.m3u8', ContentFile(r.content))
    upload.save()

    if remove_existed:
        playlist.channels.all().delete()

    duration = title = group = path = None
    for line in r.iter_lines(decode_unicode=True):
        line = line.decode("utf-8")

        if line == '#EXTM3U':
            continue

        if line.startswith('#EXTINF:'):
            duration, title = line[8:].split(',')
            continue

        if line.startswith('#EXTGRP:'):
            group = line[8:]
            continue

        if line.startswith('#'):
            logger.warning('Unsupported line skipped: {}'.format(line))
            continue

        if line:
            path = line

            Channel.objects.create(playlist=playlist,
                                   title=title,
                                   duration=duration,
                                   group=group,
                                   path=path)
Esempio n. 6
0
def post_backend_sync(request):
    data = json.loads(request.body)
    uploads = data["uploads"]

    # Clear all of the stored images
    Image.objects.all().delete()
    if os.path.exists("media"):
        shutil.rmtree("media")
    os.makedirs("media")

    # Go through all of the uploads
    for u in uploads:
        latitude = float(u["latitude"])
        longitude = float(u["longitude"])
        timestamp = dateutil.parser.parse(u["timestamp"])
        companyname = u["companyname"]
        price = float(u["price"])

        # Get (or insert) company and station
        company, station = get_station_from_lat_long_companyname(
            latitude, longitude, constants.STATION_RADIUS, companyname)

        # Is this station supposed to contain historical data?
        if historic_cache.contains(station.stationid):
            # This is something we want to delete data that is too old for
            timestamp_threshold = datetime.datetime.now() - datetime.timedelta(
                days=constants.HISTORICAL_DAYS)
            Upload.objects.filter(station=station,
                                  timestamp__lt=timestamp_threshold).delete()
        else:
            # Clear all old data
            Upload.objects.filter(station=station).delete()

        # Create the upload
        uploads.append(
            Upload(latitude=latitude,
                   longitude=longitude,
                   timestamp=timestamp,
                   station=station,
                   price=price))

    Upload.objects.bulk_create(uploads)

    return JsonResponse({"message": "success"})
def populate_map_with_density(num_stations):
    # delete all rows
    Company.objects.all().delete()
    Image.objects.all().delete()
    Station.objects.all().delete()
    Upload.objects.all().delete()

    if os.path.exists("media"):
        shutil.rmtree("media")

    # Used for location randomization
    latitude_min, latitude_max, longitude_min, longitude_max = get_bounding_box(LATITUDE, LONGITUDE, RANGE)

    uploads = []
    for _ in range(num_stations):
        companyname = random.choice(COMPANIES)
        latitude = random.uniform(latitude_min, latitude_max)
        longitude = random.uniform(longitude_min, longitude_max)
        price = round(random.uniform(PRICE_MIN, PRICE_MAX), 2)

        company, _ = Company.objects.get_or_create(
            companyname=companyname
        )

        station, _ = Station.objects.get_or_create(
            company=company,
            latitude=latitude,
            longitude=longitude
        )

        uploads.append(Upload(
            latitude=latitude,
            longitude=longitude,
            timestamp=timezone.now(),
            station=station,
            price=price
        ))

    # Bulk create is much faster for larger amounts of data
    Upload.objects.bulk_create(uploads)
Esempio n. 8
0
def manage_upload(uploaded_file, comment=None):
    filename = secure_filename(uploaded_file.filename)
    if filename != '':
        name, file_ext = os.path.splitext(filename)
        file_ext = file_ext.lower()
        if file_ext not in current_app.config['UPLOAD_EXTENSIONS']:
            print("Invalid File extension, valid extensions are "
                  ".jpg, .png, .gif, .pdf")
            abort(400)
        if (file_ext != ".pdf"):
            if (file_ext != validate_image(uploaded_file.stream)):
                print("Invalid Image")
                abort(400)
        else:
            try:
                doc = PdfFileReader(uploaded_file)
                print(doc.getNumPages())
            except PyPDF2.utils.PdfReadError:
                print("Invalid PDF")
                abort(400)
        uploaded_file.seek(0)
        s = uuid.uuid4().hex
        s += file_ext
        full_filename = os.path.join(current_app.config['UPLOAD_PATH'], s)
        uploaded_file.save(full_filename)
        if comment:
            comment_id = comment.id
        else:
            comment_id = None
        up_file = Upload(
            internal_filename=s,
            external_filename=(uuid.uuid4().hex + file_ext),
            user_filename=filename,
            uploader_id=current_user.get_id(),
            comment_id=comment_id,
        )
        db.session.add(up_file)
        db.session.commit()
        return up_file
def populate_images(num_images):
    # delete all rows
    Company.objects.all().delete()
    Image.objects.all().delete()
    Station.objects.all().delete()
    Upload.objects.all().delete()

    companyname = "exxon"
    latitude = LATITUDE
    longitude = LONGITUDE
    price = 1.73

    company, _ = Company.objects.get_or_create(
        companyname=companyname
    )

    station, _ = Station.objects.get_or_create(
        company=company,
        latitude=latitude,
        longitude=longitude
    )

    # Use the same modified image
    sample_image_path = os.path.join('prepopulate_data', 'prepopulate_sign.jpg')
    image = Image()
    image.imagefield.save('modified.jpg', File(open(sample_image_path, 'rb')))

    uploads = []
    for _ in range(num_images):
        uploads.append(Upload(
            latitude=latitude,
            longitude=longitude,
            timestamp=timezone.now(),
            station=station,
            price=price,
            image=image
        ))
    Upload.objects.bulk_create(uploads)
Esempio n. 10
0
def request_historical(latitude, longitude, companyname):
    # Make request to the /historical endpoint on the backend
    data = {
        "latitude": latitude,
        "longitude": longitude,
        "companyname": companyname
    }

    # Find the specified station and
    _, station = get_station_from_lat_long_companyname(
        latitude, longitude, constants.STATION_RADIUS, companyname)

    # See if edge doesn't contains the historical data
    historical = []
    if station is None:
        return historical
    elif historic_cache.contains(station.stationid):
        for upload in Upload.objects.filter(station=station):
            historical.append({
                "latitude": str(upload.latitude),
                "longitude": str(upload.longitude),
                "timestamp": upload.timestamp.isoformat(),
                "price": upload.price,
                "companyname": upload.station.company.companyname
            })
    else:
        r = requests.get(url="{}/historical".format(
            constants.BACKEND_ENDPOINT),
                         params=urllib.urlencode(data))
        historical = json.loads(r.content)["data"]

        uploads = []
        for u in historical:
            latitude = float(u["latitude"])
            longitude = float(u["longitude"])
            timestamp = dateutil.parser.parse(u["timestamp"])
            price = float(u["price"])

            # Don't add duplicates
            if len(
                    Upload.objects.filter(station=station,
                                          timestamp=timestamp,
                                          price=price)) == 0:
                # Create the upload
                uploads.append(
                    Upload(latitude=latitude,
                           longitude=longitude,
                           timestamp=timestamp,
                           station=station,
                           price=price))

        Upload.objects.bulk_create(uploads)

    # Sort it
    historical = list(sorted(historical, key=lambda x: x["timestamp"]))

    # Mark in cache and clear old cache data
    old = historic_cache.entry(station.stationid)
    if old is not None:
        old_station, _ = Station.objects.get_or_create(stationid=old)
        Upload.objects.filter(pk__in=Upload.objects.filter(
            image__isnull=False, station=old_station).order_by("-timestamp")
                              [1:]).delete()

    return historical
Esempio n. 11
0
def populate_database(num_locations, distance_range):
    random.seed(42)

    # delete all rows
    Company.objects.all().delete()
    Image.objects.all().delete()
    Station.objects.all().delete()
    Upload.objects.all().delete()

    if os.path.exists("media"):
        shutil.rmtree("media")

    # Some constants
    LATITUDE = 42.442445
    LONGITUDE = -76.485146
    PRICE_MIN = 2.0
    PRICE_MAX = 2.5
    COMPANIES = [
        "exxon", "mobil", "shell", "sunoco", "kwikfill", "bp", "chevron",
        "speedway", "wawa"
    ]

    # So we can batch create
    uploads = []

    latitude_min, latitude_max, longitude_min, longitude_max = get_bounding_box(
        LATITUDE, LONGITUDE, distance_range)
    print(num_locations)
    for i in range(num_locations):
        if i % 10 == 0:
            print i

        companyname = random.choice(COMPANIES)
        latitude = random.uniform(latitude_min, latitude_max)
        longitude = random.uniform(longitude_min, longitude_max)
        price = round(random.uniform(PRICE_MIN, PRICE_MAX), 2)

        # pick random company
        company, _ = Company.objects.get_or_create(companyname=companyname)

        # use the same image for etsting
        # sample_image_path = os.path.join('prepopulate_data', 'prepopulate_sign.jpg')
        # image = Image()
        # image.imagefield.save('prepopulate_sign.jpg', File(open(sample_image_path, 'rb')))

        # create random station location
        station, _ = Station.objects.get_or_create(company=company,
                                                   latitude=latitude,
                                                   longitude=longitude)

        num_points = 5
        # num_points = random.randint(2, 5)
        day_deltas = sorted(random.sample(range(-6, 0), num_points))

        price_deltas = np.round(
            np.random.uniform(low=-0.1, high=0.1, size=(num_points, )), 2)

        for i in range(num_points):
            price += price_deltas[i]
            price = round(price, 2)
            uploads.append(
                Upload(latitude=latitude,
                       longitude=longitude,
                       timestamp=timezone.now() +
                       datetime.timedelta(days=day_deltas[i]),
                       station=station,
                       price=price))

    # Batch create
    Upload.objects.bulk_create(uploads)