예제 #1
0
def done(request, url):
    '''
        This method removes any transaction data for a given url
    '''
    if url_tools.validate_url_in_db(url) or url_tools.url_in_media(url):
        # Check that the url is present in either location
        if url_tools.validate_url_in_db(url):
            url_tools.expire_url(url)
        if url_tools.url_in_media(url):
            media_tools.delete_dir(url)

        return response_ok({'removed':True})
    else:
        return response_ok("Invalid url")
예제 #2
0
def cleanup(request):
    '''
        This method checks all active urls and expires accordingly.
    '''

    expired_count = 0

    for url_obj in ActiveURL.objects.all():
        if url_obj.is_expired():
            url_tools.expire_url(url_obj.get_url())
            expired_count += 1

    return response_ok({'message': "{0} successfully removed in cleanup".format(expired_count)})
예제 #3
0
def eta(request, url):
    '''
        This view is for returning the estimated or actual processing time for a
        given ActiveUrl.
    '''
    urlobj = ActiveURL.objects.get(url=url)

    # Compare the files, file sizes and filetypes with that of the expired url durations, and the image data stores to predict the loading time

    # Add the predicted time for each image to its url item (on expiration, this gets added to the data store)

    # TODO add image dimensions to the url item, and subsequently to the image data stores

    return response_ok({'type':"estimate/actual", 'estimate':"in seconds"})
예제 #4
0
파일: hard.py 프로젝트: reritom/Tricle-Lite
def hard(request):
    '''
        This request expires all active urls and deletes all url temporary files.
        It can only be performed by a superuser
    '''
    all_urls = ActiveURL.objects.all()

    for url_obj in all_urls:
        url = url_obj.get_url()
        url_tools.expire_url(url)

    media_tools.delete_temp()

    return response_ok({'hard': "OK"})
예제 #5
0
def status(request, url):
    '''
        This method returns the status of the url,
        including whether it is still downloadable
    '''
    print("In status")

    urlobj = ActiveURL.objects.get(url=url)
    status = urlobj.get_status()

    print("Status is {0}".format(status))

    if not (status['downloadable'] and status['processed']):
        # Is processed, but can't be downloaded
        print("Url being expired, bad status")
        url_tools.expire_url(url)

    return response_ok(status)
예제 #6
0
def load(request, url):
    '''
        This method processes the files.
    '''
    print("In LOAD")

    url_obj = ActiveURL.objects.get(url=url)

    if url_obj.is_processed():
        return response_ko("Already loaded")

    #process files
    print("At Manager")
    url_obj.set_start()
    with ScramblerManager(url) as manager:
        manager.run()
    url_obj.set_end()

    url_obj.processed = True
    url_obj.save()

    return response_ok({"url": url})
예제 #7
0
def post(request):
    '''
        This method accepts and stores the data for scrambling
    '''
    common_tools.show_request(request)

    form = ScrambleForm(request.POST, request.FILES)
    if not form.is_valid():
        return response_ko("Invalid form data")

    form = form.cleaned_data
    formdat = {
        'mode': form['mode'],
        'k1': form['key_one'],
        'k2': form['key_two'],
        'k3': form['key_three']
    }

    if formdat['mode'] not in ['Scramble', 'Unscramble']:
        return response_ko('Invalid mode')

    for key in ['k1', 'k2', 'k3']:
        if len(formdat[key]) < 3:
            return response_ko('Key too short')

    if not len(request.FILES.getlist('images')) > 0:
        return response_ko("No images submitted")

    # Create an ActiveURL
    this_url = ActiveURL.generate_url()
    urlobj = ActiveURL.objects.create(url=this_url)

    if formdat['mode'] == 'Unscramble':
        urlobj.mode = 'Unscramble'
    urlobj.set_token(form['retrieve_token'])
    urlobj.save()

    # Create ZipCode object
    if form.get('zipcode', False):
        zipobj = ZipLock.objects.create(active=urlobj)
        zipobj.set_zipcode(form['zipcode'])
        zipobj.save()

    # Create KeyChain object
    keyobj = KeyChain.objects.create(active=urlobj)
    keyobj.set_keys([formdat['k1'], formdat['k2'], formdat['k3']])
    keyobj.save()

    # Create the dir for storing the files
    media_path = media_tools.make_dir(this_url)

    # Store the files and create a URL item for each
    for f in request.FILES.getlist('images'):
        if f.name.lower().endswith(('.jpg', '.bmp', '.png', '.jpeg')):

            file_name, file_type = os.path.splitext(f.name.lower())
            file_path = os.path.join(media_path, f.name)

            image = Image.open(f)
            image.save(file_path, subsampling=0, quality=100)

            file_size = os.path.getsize(file_path)

            # Create the UrlItem
            url_item = UrlItem.objects.create(
                id=UrlItem.create_url_item_uuid(),
                active=urlobj,
                file_name=file_name,
                file_type=file_type[1:],
                file_size=file_size,
                file_path=file_path)
            urlobj.increment_count()

    # return success to initate the load
    return response_ok({"url": this_url})