Ejemplo n.º 1
0
    def generic_import_csv(self, request, import_info):
        log_object_change(
            request.user.id,
            f"Import CSV: '{import_info.form_title}'",
        )

        header_list = import_info.header_list
        form_title = import_info.form_title

        # because the files are small, upload them to memory
        # instead of using S3
        request.upload_handlers.insert(0, TemporaryFileUploadHandler(request))
        request.upload_handlers.insert(0, MemoryFileUploadHandler(request))

        if request.method == "POST":
            form = CsvImportForm(
                header_list,
                form_title,
                request.POST,
                request.FILES,
            )
            if form.is_valid():
                success, message = self.process_csv(request, import_info)
                if success:
                    return redirect("..")
        else:
            form = CsvImportForm(header_list, form_title)
        payload = {"form": form}
        return render(request, "admin/csv_form.html", payload)
Ejemplo n.º 2
0
 def post(self, request, *args, **kwargs):
     request.upload_handlers = [
         EncryptedFileUploadHandler(request=request),
         MemoryFileUploadHandler(request=request),
         TemporaryFileUploadHandler(request=request)
     ]
     return self._post(request)
Ejemplo n.º 3
0
def file_upload_interrupted_temporary_file(request):
    request.upload_handlers.insert(0, TemporaryFileUploadHandler())
    request.upload_handlers.pop(2)
    request.FILES  # Trigger file parsing.
    return JsonResponse(
        {'temp_path': request.upload_handlers[0].file.temporary_file_path()},
    )
Ejemplo n.º 4
0
def cache_pypi_package(request, package_name, version):
    if version:
        jsonurl = 'http://pypi.python.org/pypi/%s/%s/json' % (package_name, version)
    else:
        jsonurl = 'http://pypi.python.org/pypi/%s/json' % (package_name)

    try:
        req = requests.get(jsonurl)
        if req.status_code != 200:
            if req.status_code == 404:
                #try with underscores
                package_name = package_name.replace('-', '_')
                if version:
                    jsonurl = 'http://pypi.python.org/pypi/%s/%s/json' % (package_name, version)
                else:
                    jsonurl = 'http://pypi.python.org/pypi/%s/json' % (package_name)
                req = requests.get(jsonurl)
                if req.status_code != 200:
                    return False
            else:
                return False

        pjson = req.json()
        data = pjson['info']

        sdist = None
        for pf in pjson['urls']:
            if pf['packagetype'] == 'sdist':
                sdist = pf
                data['md5_digest'] = sdist['md5_digest']
                packageurl = sdist['url']
                break
        if not sdist:
            if 'download_url' in data:
                packageurl = data['download_url']
            else:
                return False

        data['metadata_version'] = '1.0'
        data = QueryDict(urllib.urlencode(data), mutable=True)

        filename = urlsplit(packageurl).path.split('/')[-1]
        package_content = requests.get(packageurl).content
        tempfilehandler = TemporaryFileUploadHandler()
        tempfilehandler.new_file('content', filename, 'who/cares', len(package_content))
        tempfilehandler.receive_data_chunk(package_content, 0)
        tempfile = tempfilehandler.file_complete(len(package_content))

        class FakeRequest:
            POST = data
            FILES = {'content': tempfile}
            user = User.objects.get(id=1)
            META = request.META
            method = 'POST'

        request = FakeRequest()
        ret = register_or_upload(request)
        return True
    except Exception, e:
        return False
 def test_parse_missing_filename_large_file(self):
     """
     Parse raw file upload when filename is missing with TemporaryFileUploadHandler.
     """
     parser = FileUploadParser()
     self.stream.seek(0)
     self.parser_context['request'].upload_handlers = (
         TemporaryFileUploadHandler(), )
     self.parser_context['request'].META['HTTP_CONTENT_DISPOSITION'] = ''
     with pytest.raises(ParseError) as excinfo:
         parser.parse(self.stream, None, self.parser_context)
     assert str(
         excinfo.value
     ) == 'Missing filename. Request should include a Content-Disposition header with a filename parameter.'
Ejemplo n.º 6
0
def question_import(request):
    if request.method == "POST":
        temp_file_loader = TemporaryFileUploadHandler(request)
        request.upload_handlers = [temp_file_loader]

        return _question_import(request)
    elif request.method == "GET":
        return render(request=request,
                      template_name="course/Question_Importer.html",
                      context={
                          "suc_info": "hidden",
                          'fail_info': "hidden",
                          "course_html": get_lesson_list_html(request),
                          'course_desc': request.course_desc,
                          "qTypeList": exam_sys.q_type_list,
                      })
Ejemplo n.º 7
0
 def post(self, request, *args, **kwargs):
     try:
         # we want the uploaded file to be physically saved on disk (no InMemoryFileUploaded) so we force the file
         # upload handlers of the request.
         request.upload_handlers = [TemporaryFileUploadHandler(request)]
         file_obj = request.data.get('file')
         infer_dataset_type = to_bool(request.data.get('infer_dataset_type', True))
         if file_obj is None:
             response_data = {
                 'errors': 'Missing file'
             }
             return Response(response_data, status=status.HTTP_400_BAD_REQUEST)
         file_format = FileReader.get_uploaded_file_format(file_obj)
         if file_format == FileReader.NOT_SUPPORTED_FORMAT:
             msg = "Wrong file type {}. Should be one of: {}".format(file_obj.content_type,
                                                                     FileReader.SUPPORTED_TYPES)
             return Response(msg, status=status.HTTP_400_BAD_REQUEST)
         dataset_name = path.splitext(file_obj.name)[0]
         builder = DataPackageBuilder.infer_from_file(
             file_obj.temporary_file_path(),
             name=dataset_name,
             format_=file_format,
             infer_dataset_type=infer_dataset_type
         )
         if builder.valid:
             response_data = {
                 'name': builder.title,  # use the data-package title instead of name (name is a slug)
                 'type': builder.dataset_type,
                 'data_package': builder.descriptor
             }
             return Response(response_data, status=status.HTTP_200_OK)
         else:
             errors = [str(e) for e in builder.errors]
             response_data = {
                 'errors': errors
             }
             return Response(response_data, status=status.HTTP_400_BAD_REQUEST)
     except Exception as e:
         response_data = {
             'errors': str(e)
         }
         return Response(response_data, status=status.HTTP_400_BAD_REQUEST)
Ejemplo n.º 8
0
    def put(self, request, *args, **kwargs):
        self.ensure_write_allowed()
        upload = TemporaryFileUploadHandler(self.request)
        upload.new_file('lfs_upload.bin', 'lfs_upload.bin',
                        'application/octet-stream', -1)
        hash = hashlib.sha256()

        chunk = True
        size = 0
        while chunk:
            chunk = request.read(self.CHUNK_SIZE)
            upload.receive_data_chunk(chunk, size)
            hash.update(chunk)
            size += len(chunk)
        upload.file_complete(size)

        oid = self.kwargs.get('oid', '')
        if hash.hexdigest() != oid:
            return self.json_response(
                {'message': 'OID of request does not match file contents'},
                status=400)

        upload.file.name = '%s.bin' % oid
        form = LfsObjectForm(data={
            'oid': oid,
            'size': size,
        },
                             files={
                                 'file': upload.file,
                             })
        if not form.is_valid():
            return self.json_response(
                {'message': 'Field Errors for: %s' % ', '.join(form.errors)},
                status=400)
        lfsobject = form.save(commit=False)
        lfsobject.uploader = self.access.user
        lfsobject.save()
        lfsobject.repositories.add(self.access.repository)
        return HttpResponse()  # Just return Status 200
Ejemplo n.º 9
0
    def put(self, request, *args, **kwargs):
        self.ensure_write_allowed()
        upload = TemporaryFileUploadHandler(self.request)
        upload.new_file('lfs_upload.bin', 'lfs_upload.bin', 'application/octet-stream', -1)
        hash = hashlib.sha256()

        chunk = True
        size = 0
        while chunk:
            chunk = request.read(self.CHUNK_SIZE)
            upload.receive_data_chunk(chunk, size)
            hash.update(chunk)
            size += len(chunk)
        upload.file_complete(size)

        oid = self.kwargs.get('oid', '')
        if hash.hexdigest() != oid:
            return self.json_response({
                'message': 'OID of request does not match file contents'
            }, status=400)

        upload.file.name = '%s.bin' % oid
        form = LfsObjectForm(data={
            'oid': oid,
            'size': size,
        }, files= {
            'file': upload.file,
        })
        if not form.is_valid():
            return self.json_response({
                'message': 'Field Errors for: %s' % ', '.join(form.errors)
            }, status=400)
        lfsobject = form.save(commit=False)
        lfsobject.uploader = self.access.user
        lfsobject.save()
        lfsobject.repositories.add(self.access.repository)
        return HttpResponse()  # Just return Status 200
Ejemplo n.º 10
0
 def file_complete(self, file_size):
     self.hash = self._hash_func.hexdigest()
     print("file upload compelete: hash=%s" % self.hash)
     return TemporaryFileUploadHandler.file_complete(self, file_size)
Ejemplo n.º 11
0
    def receive_data_chunk(self, raw_data, start):
        # write the chunk
        TemporaryFileUploadHandler.receive_data_chunk(self, raw_data, start)

        # hash the chunk
        self._hash_func.update(raw_data)
Ejemplo n.º 12
0
 def file_complete(self, file_size):
     self.hash = self._hash_func.hexdigest()
     print ("file upload compelete: hash=%s" % self.hash)
     return TemporaryFileUploadHandler.file_complete(self, file_size)
Ejemplo n.º 13
0
 def dispatch(self, request, *args, **kwargs):
     """Royalty processing works only with TemporaryFileUploadHandler."""
     request.upload_handlers = [TemporaryFileUploadHandler()]
     return super().dispatch(request, *args, **kwargs)
Ejemplo n.º 14
0
def upload(request):
    url = request.POST.get("url", None)
    if not url:
        form = UploadedFileForm(data=request.POST, files=request.FILES)
        if form.is_valid():
            uploaded_file = form.save()
            file_url = uploaded_file.file.url
            try:
                UploadedFile.objects.get(file=uploaded_file.file)
            except UploadedFile.MultipleObjectsReturned:
                uploaded_file.delete()

            data = {
                'path': file_url,
            }
            return HttpResponse(json.dumps(data))
        else:
            return HttpResponseBadRequest(json.dumps({'errors': form.errors}))
    else:
        try:
            # We open the url of the distant file
            distant_file = urllib2.urlopen(url)

            # We check the length of the content (size of the file)
            content_length = int(distant_file.headers.getheader('content-length', settings.FILE_UPLOAD_MAX_MEMORY_SIZE + 1))

            # We get the maximum file upload size
            max_upload_size = getattr(settings, 'AJAX_UPLOAD_MAX_FILESIZE', upload_settings.DEFAULT_MAX_FILESIZE)

            # We check the length of the content
            if 0 < max_upload_size < content_length:
                return HttpResponseBadRequest(json.dumps({'errors': "File too big"}))

            # If it's too big, we store the file on the disk
            if content_length > settings.FILE_UPLOAD_MAX_MEMORY_SIZE:
                handler = TemporaryFileUploadHandler()
            # Else, we put it in memory
            else:
                handler = MemoryFileUploadHandler()
                # Attribute activated needed because of the class implementation
                handler.activated = True

            # try/except needed because of the class implementation
            try:
                # Init the file upload handler
                handler.new_file("url", url.split('/')[-1].split('?')[0],
                    distant_file.headers.getheader('content-type'),
                    content_length
                )
            except StopFutureHandlers:
                pass

            def read_in_chunks(file_object, chunk_size=1024):
                """Lazy function to read a file piece by piece."""
                while True:
                    data = file_object.read(chunk_size)
                    if not data:
                        break
                    yield len(data), data

            # We pass all chunks to the file upload handler
            size = 0
            for read, data in read_in_chunks(distant_file, handler.chunk_size):
                handler.receive_data_chunk(data, None)
                size += read

            # We end the handler and save the file to the model
            uploaded_file = UploadedFile()
            uploaded_file.file.save(handler.file_name, handler.file_complete(size))
            uploaded_file.save()
            file_url = uploaded_file.file.url
            try:
                UploadedFile.objects.get(file=uploaded_file.file)
            except UploadedFile.MultipleObjectsReturned:
                uploaded_file.delete()

            data = {
                'path': file_url,
            }
            return HttpResponse(json.dumps(data))
        except Exception:
            return HttpResponseBadRequest(json.dumps({'errors': "An error occured"}))
Ejemplo n.º 15
0
 def post(self, request):
     request.upload_handlers.insert(0, TemporaryFileUploadHandler(request))
     super().post(request)
     return Response("Document uploaded successfully",
                     status=status.HTTP_200_OK)
Ejemplo n.º 16
0
    def put(self, request, workspace_id, name, plugin_id):
        """ update a plugin
            1. check params, perms and resources
            2. read new plugin file, read its info.json
            3. delete old asset file, replace with new asset file
            4. update database record

            permission: dtable admin
        """
        request.upload_handlers = [TemporaryFileUploadHandler(request=request)]

        # 1. check params, perms and resources
        try:
            plugin_record = DTablePlugins.objects.get(pk=plugin_id)
        except DTablePlugins.DoesNotExist:
            error_msg = 'Plugin %s not found.' % plugin_id
            return api_error(status.HTTP_404_NOT_FOUND, error_msg)

        workspace = Workspaces.objects.get_workspace_by_id(workspace_id)
        if not workspace:
            error_msg = 'Workspace %s not found.' % workspace_id
            return api_error(status.HTTP_404_NOT_FOUND, error_msg)

        if '@seafile_group' in workspace.owner:
            group_id = workspace.owner.split('@')[0]
            group = ccnet_api.get_group(int(group_id))
            if not group:
                error_msg = 'Group %s not found.' % group_id
                return api_error(status.HTTP_404_NOT_FOUND, error_msg)
        table_name = name
        dtable = DTables.objects.get_dtable(workspace, table_name)
        if not dtable:
            error_msg = 'DTable %s not found.' % table_name
            return api_error(status.HTTP_404_NOT_FOUND, error_msg)

        # permission check
        username = request.user.username
        permission = check_dtable_admin_permission(username, workspace.owner)
        if not permission:
            error_msg = 'Permission denied.'
            return api_error(status.HTTP_403_FORBIDDEN, error_msg)

        repo_id = workspace.repo_id
        repo = seafile_api.get_repo(repo_id)
        if not repo:
            error_msg = 'Library %s not found.' % repo_id
            return api_error(status.HTTP_404_NOT_FOUND, error_msg)

        plugin_file_path = '/asset/' + str(dtable.uuid) + '/plugins/' + plugin_record.name
        plugin_file_dir_id = seafile_api.get_dir_id_by_path(repo_id, plugin_file_path)
        if not plugin_file_dir_id:
            error_msg = 'Plugin %s not found.' % plugin_id
            return api_error(status.HTTP_404_NOT_FOUND, error_msg)

        plugin_file = request.FILES.get('plugin', None)
        if not plugin_file:
            error_msg = 'plugin invalid.'
            return api_error(status.HTTP_400_BAD_REQUEST, error_msg)

        if plugin_file.size >> 20 > 300:
            error_msg = _('File is too large.')
            return api_error(status.HTTP_400_BAD_REQUEST, error_msg)

        uploaded_temp_path = plugin_file.temporary_file_path()
        if not is_zipfile(uploaded_temp_path):
            error_msg = _('A zip file is required.')
            return api_error(status.HTTP_400_BAD_REQUEST, error_msg)

        # read from zip
        with ZipFile(uploaded_temp_path, 'r') as zip_file:
            folder_path = get_folder_path(zip_file.namelist())
            try:
                info_json_str = zip_file.read(os.path.join(folder_path, INFO_FILE_NAME))
                info = json.loads(info_json_str)
            except Exception:
                error_msg = _('"info.json" not found.')
                return api_error(status.HTTP_400_BAD_REQUEST, error_msg)

            try:
                zip_file.read(os.path.join(folder_path, MAINJS_FILE_NAME))
            except Exception:
                error_msg = _('"main.js" not found.')
                return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
            new_plugin_name = info.get('name', '')
            zip_file.extractall(TMP_EXTRACTED_PATH)

        plugin_path = '/asset/' + str(dtable.uuid) + '/plugins/'

        # if new_plugin_name == old plugin name, no need to check name
        if new_plugin_name != plugin_record.name:
            if DTablePlugins.objects.filter(name=new_plugin_name, dtable=dtable).count() > 0:
                error_msg = _('Plugin with name %s is already in dtable %s.') % (new_plugin_name, dtable.name)
                return api_error(status.HTTP_400_BAD_REQUEST, error_msg)

            # check if duplicate plugin name within a dtable asset
            if seafile_api.get_dir_id_by_path(repo_id, os.path.join(plugin_path, new_plugin_name)):
                error_msg = _('Plugin with name %s is already in dtable %s.') % (new_plugin_name, dtable.name)
                return api_error(status.HTTP_400_BAD_REQUEST, error_msg)

        # delete old asset file
        try:
            delete_plugin_asset_folder(repo_id, username, plugin_file_path)
        except Exception as e:
            logger.error(e)
            return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, 'Internal Server Error')

        # create file in asset dir, and delete TMP_EXTRACTED_PATH
        try:
            create_plugin_asset_files(repo_id, username, new_plugin_name, plugin_path, folder_path)
        except Exception as e:
            logger.error(e)
            return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, 'Internal Server Error')

        shutil.rmtree(TMP_EXTRACTED_PATH)

        # 4. update record in database
        plugin_record.name = new_plugin_name
        plugin_record.info = info_json_str
        plugin_record.save()

        return Response(plugin_record.to_dict())
Ejemplo n.º 17
0
    def receive_data_chunk(self, raw_data, start):
        # write the chunk
        TemporaryFileUploadHandler.receive_data_chunk(self, raw_data, start)

        # hash the chunk
        self._hash_func.update(raw_data)
Ejemplo n.º 18
0
 def initialize_request(self, request: HttpRequest, *args, **kwargs):
     request.upload_handlers = [TemporaryFileUploadHandler(request=request)]
     return super().initialize_request(request, *args, **kwargs)
Ejemplo n.º 19
0
 def dispatch(self, request, *args, **kwargs):
     # for temporary file, for xar.
     # see https://docs.djangoproject.com/en/3.1/topics/http/file-uploads/#modifying-upload-handlers-on-the-fly
     request.upload_handlers = [TemporaryFileUploadHandler(request)]
     return super().dispatch(request, *args, **kwargs)
Ejemplo n.º 20
0
 def post(self, request, *args, **kwargs):
     request.upload_handlers = [TemporaryFileUploadHandler(request)]
     return self._post(request, *args, **kwargs)
Ejemplo n.º 21
0
    def setUp(self):
        print("setup method")
        self.superAdminUN = '*****@*****.**'
        self.adminUN = '*****@*****.**'
        self.nonAdminUN = '*****@*****.**'
        superadminUser = None
        adminUser = None
        user = None
        eum = EmailUserManager()
        self.superadminUser = EmailUser.objects.create(email=self.superAdminUN,
                                                       password="******",
                                                       is_staff=True,
                                                       is_superuser=True)
        self.superadminUser.set_password('pass')
        self.superadminUser.save()
        self.adminUser = EmailUser.objects.create(email=self.adminUN,
                                                  password="******",
                                                  is_staff=True,
                                                  is_superuser=False)
        self.adminUser.set_password('pass')
        self.adminUser.save()

        self.customer = EmailUser.objects.create(email=self.nonAdminUN,
                                                 password="******",
                                                 is_staff=False,
                                                 is_superuser=False)
        self.customer.set_password('pass')
        self.customer.save()
        # customer UserAddress
        user_address = UserAddress.objects.create(
            country_id='AU',
            #is_default_for_billing= True,
            #is_default_for_shipping= True,
            line1='17 Dick Perry',
            #line2: '',
            #line3': u'',
            #line4': u'BENTLEY DELIVERY CENTRE',
            #notes': u'',
            #num_orders': 0,
            #phone_number': None,
            postcode='6151',
            #'search_text': u'',
            state='WA',
            #title': u'',
            user_id=self.customer.id)

        customer_address = Address.objects.create(user=self.customer,
                                                  oscar_address=user_address)
        self.customer.residential_address = customer_address
        self.customer.save()

        self.externalUser1 = '*****@*****.**'
        self.customer1 = EmailUser.objects.create(email=self.externalUser1,
                                                  password="******",
                                                  is_staff=False,
                                                  is_superuser=False)
        self.customer1.set_password('pass')
        self.customer1.save()
        # customer1 UserAddress
        user1_address = UserAddress.objects.create(country_id='AU',
                                                   line1='17 Dick Perry',
                                                   postcode='6151',
                                                   state='WA',
                                                   user_id=self.customer1.id)

        customer1_address = Address.objects.create(user=self.customer1,
                                                   oscar_address=user1_address)
        self.customer1.residential_address = customer1_address
        self.customer1.save()

        self.externalUser2 = '*****@*****.**'
        self.customer2 = EmailUser.objects.create(email=self.externalUser2,
                                                  password="******",
                                                  is_staff=False,
                                                  is_superuser=False)
        self.customer2.set_password('pass')
        self.customer2.save()
        # customer2 UserAddress
        user2_address = UserAddress.objects.create(country_id='AU',
                                                   line1='17 Dick Perry',
                                                   postcode='6151',
                                                   state='WA',
                                                   user_id=self.customer2.id)

        customer2_address = Address.objects.create(user=self.customer2,
                                                   oscar_address=user2_address)
        self.customer2.residential_address = customer2_address
        self.customer2.save()

        settings.SESSION_ENGINE = 'django.contrib.sessions.backends.file'
        engine = import_module(settings.SESSION_ENGINE)
        store = engine.SessionStore()
        store.save()
        self.session = store
        self.client.cookies[settings.SESSION_COOKIE_NAME] = store.session_key

        # Create ProposalStandardRequirements
        ProposalStandardRequirement.objects.create(
            code='R1', text='Standard requirement 1')
        ProposalStandardRequirement.objects.create(
            code='R2', text='Standard requirement 2')
        ProposalStandardRequirement.objects.create(
            code='R3', text='Standard Apiary requirement 1')
        ProposalStandardRequirement.objects.create(
            code='R4', text='Standard Apiary requirement 2')
        ## create_proposal_data
        #self.apiary_application_type_id = ApplicationType.objects.get(name='Apiary').id
        #self.create_proposal_data = {
        #    u'profile': 132376,
        #    u'application': self.apiary_application_type_id,
        #    u'behalf_of': u'individual',
        #    }
        #self.create_proposal_data_customer1 = {
        #    u'profile': 132377,
        #    u'application': self.apiary_application_type_id,
        #    u'behalf_of': u'individual',
        #    }
        #self.create_proposal_data_customer2 = {
        #    u'profile': 132378,
        #    u'application': self.apiary_application_type_id,
        #    u'behalf_of': u'individual',
        #    }
        ## Site transfer applications
        #self.site_transfer_application_type_id = ApplicationType.objects.get(name='Site Transfer').id
        #self.create_site_transfer_proposal_data = {
        #    u'profile': 132377,
        #    u'application': self.site_transfer_application_type_id,
        #    u'behalf_of': u'individual',
        #    #u'selected_licence_holder': u'*****@*****.**',
        #    }
        ## submit_proposal_data
        #with open('disturbance/tests/all_the_features_1.json', 'r') as features_file_1:
        #    self.all_the_features_1 = json.load(features_file_1)
        #with open('disturbance/tests/all_the_features_2.json', 'r') as features_file_2:
        #    self.all_the_features_2 = json.load(features_file_2)

        # Dates
        self.today = timezone.now().date()
        self.today_str = self.today.strftime('%d/%m/%Y')
        day_delta = timedelta(days=1)
        week_delta = timedelta(weeks=1)
        self.today_plus_1_day = self.today + day_delta
        self.today_plus_1_week = self.today + day_delta
        self.today_plus_26_weeks = self.today + (day_delta * 26)
        self.today_plus_1_day_str = self.today_plus_1_day.strftime('%d/%m/%Y')
        self.today_plus_1_week_str = self.today_plus_1_week.strftime(
            '%d/%m/%Y')
        self.today_plus_26_weeks_str = self.today_plus_26_weeks.strftime(
            '%d/%m/%Y')

        self.today_minus_1_week = self.today - week_delta
        self.today_minus_4_weeks = self.today - (week_delta * 4)

        ## Payment admin data
        fee_season = FeeSeason.objects.create(name="2021/22")
        fee_period_1 = FeePeriod.objects.create(
            fee_season=fee_season,
            name="Period1",
            start_date=self.today_minus_1_week)
        fee_period_2 = FeePeriod.objects.create(
            fee_season=fee_season,
            name="Period2",
            start_date=self.today_minus_4_weeks)

        for application_type in ApplicationType.objects.all():
            for vessel_size_category_group in VesselSizeCategoryGroup.objects.all(
            ):
                fee_constructor = FeeConstructor.objects.create(
                    application_type=application_type,
                    fee_season=fee_season,
                    vessel_size_category_group=vessel_size_category_group)
        amount = 1
        for fee_item in FeeItem.objects.all():
            fee_item.amount = amount
            fee_item.save()
            amount += 1

        ## test doc
        path = os.path.join(settings.BASE_DIR, 'mooringlicensing', 'tests',
                            'test_doc.png')
        #with open('mooringlicensing/tests/test_doc.png', 'rb') as f:
        #test_doc_bytes = f.read()
        with io.open(path, 'rb') as f:
            test_doc_bytes = f.read()
            test_doc_stream = io.BytesIO(test_doc_bytes)
            test_doc_obj = TemporaryFileUploadHandler()
            test_doc_obj.new_file(
                file_name=
                '17. External - Waiting List Amendment Application.png',
                field_name='_file',
                content_type='image/png',
                content_length=os.path.getsize(path))
            test_doc_obj.receive_data_chunk(raw_data=test_doc_bytes, start=0)
            test_doc = test_doc_obj.file_complete(
                file_size=os.path.getsize(path))

            self.rego_papers_data = {
                'action': ['save'],
                'input_name': ['vessel-registration-documents'],
                'filename': [
                    '25. External - New Authorised User Application - Applicant.png'
                ],
                'csrfmiddlewaretoken': [settings.CSRF_MIDDLEWARE_TOKEN],
                '_file': [test_doc]
            }

            self.electoral_roll_doc_data = {
                'action': ['save'],
                'input_name': ['electoral-roll-documents'],
                'filename': [
                    '25. External - New Authorised User Application - Applicant.png'
                ],
                'csrfmiddlewaretoken': [settings.CSRF_MIDDLEWARE_TOKEN],
                '_file': [test_doc]
            }

        ## Mooring Bays
        #retrieve_marine_parks()
        MooringBay.objects.create(name='Bay1', mooring_bookings_id=1)
        MooringBay.objects.create(name='Bay2', mooring_bookings_id=2)

        # Global settings
        #ApiaryGlobalSettings.objects.create(key='oracle_code_apiary_site_annual_rental_fee', value='sample')

        # Get data ready
        temp = DefaultDataManager()
Ejemplo n.º 22
0
def set_temporary_file_upload_handler(request):
    # Disable in memory upload before accessing POST
    # because we need a file from which to read metadata
    request.upload_handlers = [TemporaryFileUploadHandler()]
Ejemplo n.º 23
0
    def post(self, request, workspace_id, name):
        """ upload a plugin *.zip file
            1. check params, perms and resources
            2. read info from zip file, and extract zip in TMP_EXTRACTED_PATH
            3. create file in asset dir, and delete TMP_EXTRACTED_PATH
            4. record in database

            There are two tmp files in this api.
            First is django upload tmp file, it will be removed automatically.
            Second is extracted folder 'TMP_EXTRACTED_PATH', we removed it manually.

            permission: workspace owner or admin
        """

        # use TemporaryFileUploadHandler, which contains TemporaryUploadedFile
        # TemporaryUploadedFile has temporary_file_path() method
        # in order to change upload_handlers, we must exempt csrf check
        request.upload_handlers = [TemporaryFileUploadHandler(request=request)]

        table_name = name
        from_market = request.data.get('from_market', 'false').lower()

        workspace = Workspaces.objects.get_workspace_by_id(workspace_id)
        if not workspace:
            error_msg = 'Workspace %s not found.' % workspace_id
            return api_error(status.HTTP_404_NOT_FOUND, error_msg)

        if '@seafile_group' in workspace.owner:
            group_id = workspace.owner.split('@')[0]
            group = ccnet_api.get_group(int(group_id))
            if not group:
                error_msg = 'Group %s not found.' % group_id
                return api_error(status.HTTP_404_NOT_FOUND, error_msg)

        dtable = DTables.objects.get_dtable(workspace, table_name)
        if not dtable:
            error_msg = 'DTable %s not found.' % table_name
            return api_error(status.HTTP_404_NOT_FOUND, error_msg)

        # permission check
        username = request.user.username
        permission = check_dtable_admin_permission(username, workspace.owner)
        if not permission:
            error_msg = 'Permission denied.'
            return api_error(status.HTTP_403_FORBIDDEN, error_msg)

        repo_id = workspace.repo_id
        repo = seafile_api.get_repo(repo_id)
        if not repo:
            error_msg = 'Library %s not found.' % repo_id
            return api_error(status.HTTP_404_NOT_FOUND, error_msg)


        if from_market not in ['true', 'false']:
            # from_market invalid
            error_msg = 'from_market invalid.'
            return api_error(status.HTTP_400_BAD_REQUEST, error_msg)

        if from_market == 'true':
            """
            if we add plugin from market
            1. get plugin_download_url from market by plugin_name
            2. download plugin zip by plugin_download_url
            3. extract zip in TMP_EXTRACTED_PATH
            4. create file in asset dir, and delete TMP_EXTRACTED_PATH
            5. record in database
            """
            plugin_name = request.data.get('plugin_name', '')
            if not plugin_name:
                error_msg = 'plugin_name invalid.'
                return api_error(status.HTTP_400_BAD_REQUEST, error_msg)

            if DTablePlugins.objects.filter(name=plugin_name, dtable=dtable).count() > 0:
                error_msg = _('Plugin with name %s is already in dtable %s.') % (plugin_name, dtable.name)
                return api_error(status.HTTP_400_BAD_REQUEST, error_msg)

            # get plugin_download_url from market by plugin_name
            # download plugin zip by plugin_download_url
            seamarket_plugin_api_url = SEATABLE_MARKET_URL.rstrip('/') + '/api/plugins/' + plugin_name + '/'
            res = requests.get(seamarket_plugin_api_url)
            download_url = json.loads(res.content).get('download_url', '')

            if not download_url:
                error_msg = 'plugin %s not found.' % plugin_name
                return api_error(status.HTTP_404_NOT_FOUND, error_msg)

            plugin_zip_file_response = requests.get(download_url)

            os.mkdir('/tmp/plugin_download_from_market')
            tmp_zip_path = '/tmp/plugin_download_from_market/plugin_zip'
            with open(tmp_zip_path, 'wb') as f:
                f.write(plugin_zip_file_response.content)

            # extract zip in TMP_EXTRACTED_PATH
            with ZipFile(tmp_zip_path, 'r') as zip_file:
                folder_path = get_folder_path(zip_file.namelist())
                try:
                    info_json_str = zip_file.read(os.path.join(folder_path, INFO_FILE_NAME))
                except Exception:
                    error_msg = _('"info.json" not found.')
                    return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
                zip_file.extractall(TMP_EXTRACTED_PATH)

            shutil.rmtree('/tmp/plugin_download_from_market')

            # create file in asset dir, and delete TMP_EXTRACTED_PATH
            # if no plugins path, create it
            plugin_path = '/asset/' + str(dtable.uuid) + '/plugins/'
            plugin_path_id = seafile_api.get_dir_id_by_path(repo_id, plugin_path)
            if not plugin_path_id:
                try:
                    seafile_api.mkdir_with_parents(repo_id, '/', plugin_path[1:], username)
                except Exception as e:
                    logger.error(e)
                    error_msg = 'Internal Server Error'
                    return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg)

            # if asset dir has plugin with same name, we replace old with new
            if seafile_api.get_dir_id_by_path(repo_id, os.path.join(plugin_path, plugin_name)):
                delete_plugin_asset_folder(repo_id, username, os.path.join(plugin_path, plugin_name))

            # create path and file
            try:
                create_plugin_asset_files(repo_id, username, plugin_name, plugin_path, folder_path)
            except Exception as e:
                logger.error(e)
                return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, 'Internal Server Error')

            # remove extracted tmp file
            shutil.rmtree(TMP_EXTRACTED_PATH)

            # 4. record in database
            plugin_record = DTablePlugins.objects.create(
                dtable=dtable,
                added_by=username,
                added_time=datetime.now(),
                name=plugin_name,
                info=info_json_str
            )

            return Response(plugin_record.to_dict())

        # 1. check params
        plugin_file = request.FILES.get('plugin', None)
        if not plugin_file:
            error_msg = 'plugin invalid.'
            return api_error(status.HTTP_400_BAD_REQUEST, error_msg)

        if plugin_file.size >> 20 > 300:
            error_msg = _('File is too large.')
            return api_error(status.HTTP_400_BAD_REQUEST, error_msg)

        # 2. read info from zip file, and extract zip in TMP_EXTRACTED_PATH
        uploaded_temp_path = plugin_file.temporary_file_path()
        if not is_zipfile(uploaded_temp_path):
            error_msg = _('A zip file is required.')
            return api_error(status.HTTP_400_BAD_REQUEST, error_msg)

        with ZipFile(uploaded_temp_path, 'r') as zip_file:
            folder_path = get_folder_path(zip_file.namelist())
            try:
                info_json_str = zip_file.read(os.path.join(folder_path, INFO_FILE_NAME))
                info = json.loads(info_json_str)
            except Exception:
                error_msg = _('"info.json" not found.')
                return api_error(status.HTTP_400_BAD_REQUEST, error_msg)

            try:
                zip_file.read(os.path.join(folder_path, MAINJS_FILE_NAME))
            except Exception:
                error_msg = _('"main.js" not found.')
                return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
            plugin_name = info.get('name', '')
            zip_file.extractall(TMP_EXTRACTED_PATH)

        if DTablePlugins.objects.filter(name=plugin_name, dtable=dtable).count() > 0:
            error_msg = _('Plugin with name %s is already in dtable %s.') % (plugin_name, dtable.name)
            return api_error(status.HTTP_400_BAD_REQUEST, error_msg)

        # 3. create file in asset dir, and delete TMP_EXTRACTED_PATH
        # if no plugins path, create it
        plugin_path = '/asset/' + str(dtable.uuid) + '/plugins/'
        plugin_path_id = seafile_api.get_dir_id_by_path(repo_id, plugin_path)
        if not plugin_path_id:
            try:
                seafile_api.mkdir_with_parents(repo_id, '/', plugin_path[1:], username)
            except Exception as e:
                logger.error(e)
                error_msg = 'Internal Server Error'
                return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg)

        # if asset dir has plugin with same name, we replace old with new
        if seafile_api.get_dir_id_by_path(repo_id, os.path.join(plugin_path, plugin_name)):
            delete_plugin_asset_folder(repo_id, username, os.path.join(plugin_path, plugin_name))

        # create path and file
        try:
            create_plugin_asset_files(repo_id, username, plugin_name, plugin_path, folder_path)
        except Exception as e:
            logger.error(e)
            return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, 'Internal Server Error')

        # remove extracted tmp file
        shutil.rmtree(TMP_EXTRACTED_PATH)

        # 4. record in database
        plugin_record = DTablePlugins.objects.create(
            dtable=dtable,
            added_by=username,
            added_time=datetime.now(),
            name=plugin_name,
            info=info_json_str
        )

        return Response(plugin_record.to_dict())
Ejemplo n.º 24
0
    def post(self, request, workspace_id):
        """ import dtable from xxx.dtable zip

        :param request:
        :param workspace_id:
        :return:
        """
        # use TemporaryFileUploadHandler, which contains TemporaryUploadedFile
        # TemporaryUploadedFile has temporary_file_path() method
        # in order to change upload_handlers, we must exempt csrf check
        request.upload_handlers = [TemporaryFileUploadHandler(request=request)]
        username = request.user.username

        imported_zip = request.FILES.get('dtable', None)
        if not imported_zip:
            error_msg = 'dtable invalid.'
            return api_error(status.HTTP_400_BAD_REQUEST, error_msg)

        if not imported_zip.name.endswith(('.dtable', '.csv')):
            error_msg = 'dtable %s invalid.' % imported_zip.name
            return api_error(status.HTTP_400_BAD_REQUEST, error_msg)

        dtable_file_name = imported_zip.name   # xxx.dtable
        dtable_name = dtable_file_name.split('.')[0]

        if DTables.objects.filter(workspace_id=workspace_id, name=dtable_name).exists():
            error_msg = _('Table %s already exists.') % dtable_name
            return api_error(status.HTTP_400_BAD_REQUEST, error_msg)

        # resource check
        workspace = Workspaces.objects.get_workspace_by_id(workspace_id)
        if not workspace:
            error_msg = 'Workspace %s not found.' % workspace_id
            return api_error(status.HTTP_404_NOT_FOUND, error_msg)

        repo_id = workspace.repo_id
        repo = seafile_api.get_repo(repo_id)
        if not repo:
            error_msg = 'Library %s not found.' % repo_id
            return api_error(status.HTTP_404_NOT_FOUND, error_msg)


        # post json file after dtable recored is created
        # because we need to get dtable uuid
        try:
            dtable = DTables.objects.create_dtable(username, workspace, dtable_name)
        except Exception as e:
            logger.error(e)
            error_msg = 'Internal Server Error'
            return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg)

        # permission check
        permission = check_dtable_permission(request.user.username, workspace, dtable)
        if not permission:
            error_msg = 'Permission denied.'
            return api_error(status.HTTP_403_FORBIDDEN, error_msg)

        # create dtable from csv
        if imported_zip.name.endswith('.csv'):
            try:
                seafile_api.post_empty_file(repo_id, '/', dtable_name + '.dtable', username)
            except Exception as e:
                logger.error(e)
                error_msg = 'Internal Server Error'
                return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg)

            payload = {
                'exp': int(time.time()) + 60,
                'dtable_uuid': dtable.uuid.hex,
                'username': '******',
                'permission': permission,
            }
            try:
                access_token = jwt.encode(
                    payload, DTABLE_PRIVATE_KEY, algorithm='HS256'
                )
            except Exception as e:
                logger.error(e)
                error_msg = 'Internal Server Error'
                return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg)

            headers = {'Authorization': 'Token ' + access_token.decode('utf-8')}
            url = DTABLE_SERVER_URL + 'api/v1/' + dtable.uuid.hex + '/import-csv/'
            data = {
                'table_name': dtable_name,
                'is_create_base': True,
            }
            files = {
                'csv_file': imported_zip
            }

            try:
                res = requests.post(url, headers=headers, data=data, files=files)
            except Exception as e:
                logger.error(e)
                error_msg = 'Internal Server Error'
                return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg)

            return Response({"table": dtable.to_dict()})

        uploaded_temp_path = imported_zip.temporary_file_path()
        if not is_zipfile(uploaded_temp_path):
            error_msg = _('A *.dtable file is required.')
            return api_error(status.HTTP_400_BAD_REQUEST, error_msg)

        params = {}
        params['username'] = request.user.username
        params['repo_id'] = repo_id
        params['workspace_id'] = workspace_id
        params['dtable_uuid'] = str(dtable.uuid)
        params['dtable_file_name'] = dtable_file_name
        params['uploaded_temp_path'] = uploaded_temp_path

        try:
            task_id = add_dtable_io_task(type='import', params=params)
        except Exception as e:
            logger.error(e)
            return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, 'Internal Server Error.')

        return Response({'task_id': task_id, "table": dtable.to_dict()})
Ejemplo n.º 25
0
def upload(request, directory):
    #use the temporary file handler by default as we want to be able to read files using PYME.IO.ImageStack
    #FIXME - this will not work for files with separate metadata
    request.upload_handlers.insert(0, TemporaryFileUploadHandler(request))
    return upload_files(request, directory)
Ejemplo n.º 26
0
def people(request):
    # the upload handlers can only be set before touching request.POST or
    # request.FILES. The CsrfViewMiddleware touches request.POST, avoid
    # this by doing the CSRF manually with a separate view
    request.upload_handlers = [TemporaryFileUploadHandler()]
    return _people(request)