Example #1
0
def upload_from_irods(username, password, host, port, zone, irods_fnames, res_files):
    """
    use iget to transfer selected data object from irods zone to local as a NamedTemporaryFile
    :param username: iRODS login account username used to download irods data object for uploading
    :param password: iRODS login account password used to download irods data object for uploading
    :param host: iRODS login host used to download irods data object for uploading
    :param port: iRODS login port used to download irods data object for uploading
    :param zone: iRODS login zone used to download irods data object for uploading
    :param irods_fnames: the data object file name to download to local for uploading
    :param res_files: list of files for uploading to create resources
    :raises SessionException(proc.returncode, stdout, stderr) defined in django_irods/icommands.py
            to capture iRODS exceptions raised from iRODS icommand subprocess run triggered from
            any method calls from IrodsStorage() if an error or exception ever occurs
    :return: None, but the downloaded file from the iRODS will be appended to res_files list for
    uploading
    """
    irods_storage = IrodsStorage()
    irods_storage.set_user_session(username=username, password=password, host=host, port=port,
                                   zone=zone)
    ifnames = string.split(irods_fnames, ',')
    for ifname in ifnames:
        size = irods_storage.size(ifname)
        tmpFile = irods_storage.download(ifname)
        fname = os.path.basename(ifname.rstrip(os.sep))
        fileobj = File(file=tmpFile, name=fname)
        fileobj.size = size
        res_files.append(fileobj)

    # delete the user session after iRODS file operations are done
    irods_storage.delete_user_session()
 def save(self, storage, name, meta):
     """Saves this asset to the given storage."""
     method = self._method
     # Calculate sizes.
     display_size = meta["size"]
     image_data, original_size = self._image_data_and_size
     data_size = method.get_data_size(display_size, display_size.intersect(original_size))
     # Check whether we need to make a thumbnail.
     if data_size == original_size:
         super(ThumbnailAsset, self).save(storage, name, meta)
     else:
         # Use efficient image loading.
         image_data.draft(None, data_size)
         # Resize the image data.
         try:
             image_data = method.do_resize(image_data, original_size, display_size, data_size)
         except Exception as ex:  # HACK: PIL raises all sorts of Exceptions :(
             raise ThumbnailError(str(ex))
         # Parse the image format.
         _, extension = os.path.splitext(name)
         format = extension.lstrip(".").upper().replace("JPG", "JPEG") or "PNG"
         # If we're saving to PNG, make sure we're not in CMYK.
         if image_data.mode == "CMYK" and format == "PNG":
             image_data = image_data.convert("RGB")
         # If the storage has a path, then save it efficiently.
         try:
             thumbnail_path = storage.path(name)
         except NotImplementedError:
             # No path for the storage, so save it in a memory buffer.
             buffer = StringIO()
             try:
                 image_data.save(buffer, format)
             except Exception as ex:    # HACK: PIL raises all sorts of Exceptions :(
                 raise ThumbnailError(str(ex))
             # Write the file.
             buffer.seek(0, os.SEEK_END)
             buffer_length = buffer.tell()
             buffer.seek(0)
             file = File(buffer)
             file.size = buffer_length
             storage.save(name, file)
         else:
             # We can do an efficient streaming save.
             try:
                 os.makedirs(os.path.dirname(thumbnail_path))
             except OSError:
                 pass
             try:
                 image_data.save(thumbnail_path, format)
             except Exception as ex:  # HACK: PIL raises all sorts of Exceptions :(
                 try:
                     raise ThumbnailError(str(ex))
                 finally:
                     # Remove an incomplete file, if present.
                     try:
                         os.unlink(thumbnail_path)
                     except:
                         pass
Example #3
0
def data_upload(request):
    if request.method == 'GET':
        return render(request, 'bulk_upload.html', {})

    data_file = request.FILES['file']
    images_zip = request.FILES['images']

    if not data_file.name.endswith('.csv') and not images_zip.name.endswith(
            '.zip'):
        messages.error(request, 'This is not a csv file')
        return render(request, 'teacher/bulk_upload.html', {})

    data_set = data_file.read().decode('UTF-8')
    io_string = io.StringIO(data_set)
    next(io_string)

    zipped_files = ZipFile(images_zip)
    image_names = zipped_files.namelist()

    for column in csv.reader(io_string, delimiter=',', quotechar='"'):
        if not column[3] == '':
            image_name = column[2]
            teacher, created = Teacher.objects.update_or_create(
                first_name=column[0],
                last_name=column[1],
                email_address=column[3],
                phone_number=column[4],
                room_number=column[5])

            if not image_name == '':
                if image_name in image_names:
                    zip_img = zipped_files.read(image_name)
                    tmp_file = io.BytesIO(zip_img)
                    dummy_file = File(tmp_file)
                    dummy_file.name = image_name
                    dummy_file.size = len(zip_img)
                    dummy_file.file = tmp_file
                    teacher.profile_picture = dummy_file
                    teacher.save()

            subjects = column[6].split(',')
            subjects_taught_count = TeacherSubject.objects.filter(
                teacher=teacher).count()

            for subject in subjects:
                if subjects_taught_count > 5:
                    break

                subject = subject.strip().lower()
                subject_object, created = Subject.objects.update_or_create(
                    title=subject)
                TeacherSubject.objects.update_or_create(teacher=teacher,
                                                        subject=subject_object)
                subjects_taught_count += 1

    messages.success(request, 'Data has been uploaded')
    return render(request, 'teacher/bulk_upload.html', {})
Example #4
0
 def _open(self, name, mode="rb"):
     """
     Return a File object.
     """
     attachment = Attachment.objects.using(self.using).get(attachment__exact=name)
     fname = File(StringIO(attachment.blob), attachment.filename)
     
     # Make sure the checksum match before returning the file
     if not md5buffer(fname) == attachment.checksum:
         raise IntegrityError("Checksum mismatch")
     
     fname.size = attachment.size
     fname.mode = mode
     return fname
Example #5
0
 def _open(self, name, mode="rb"):
     """
     Read the file from the database, and return
     as a File instance.
     """
     attachment = Attachment.objects.using(self.using).get(attachment__exact=name)
     cursor = connections[self.using].cursor()
     lobject = cursor.db.connection.lobject(attachment.blob, "r")
     fname = File(StringIO(lobject.read()), attachment.filename)
     lobject.close()
     
     # Make sure the checksum match before returning the file
     if not md5buffer(fname) == attachment.checksum:
         raise IntegrityError("Checksum mismatch")
     
     fname.size = attachment.size
     fname.mode = mode
     
     return fname
Example #6
0
    def submissionHook(cls, uploaded_file):
        """Expect a tarball containing a Makefile
        Compile the submission"""

        old_mask = os.umask(0)

        try:
            f = zipfile.ZipFile(uploaded_file)
        except zipfile.BadZipfile:
            raise errors.BuildError("Invalid zip file")

        # Copy files to buildnest

        # Handle older versions of zipfile that do not contain 'extractall'
        if hasattr(f, "extractall"):
            f.extractall(path=cls.BUILDNEST)
        else:
            for name in f.namelist():
                path = os.path.join(cls.BUILDNEST, name)
                f_ = open(path, "wb")
                f_.write( f.read( name ) )
                f_.close()

        # make in the buildnest
        p = subprocess.Popen(["make"], stderr=subprocess.PIPE, cwd=cls.BUILDNEST)
        output = p.communicate()[1]
        ret = p.wait()

        # Report errors
        if ret != 0:
            os.umask(old_mask)
            raise errors.BuildError(output)

        output = os.path.join(cls.BUILDNEST, cls.BOT_OUTPUT)

        if not os.path.exists(output):
            raise errors.BuildError("No %s found after build"%(cls.BOT_OUTPUT))

        # Save the output    
        botSo = open(output, "rb")

        # TODO: Check compiled .so?
        os.umask(old_mask)

        def read_in_chunks(file_object, chunk_size=1024):
            """Lazy function (generator) to read a file piece by piece.
            Default chunk size: 1k."""
            while True:
               data = file_object.read(chunk_size)
               if not data:
                   break
               yield data

        uploaded_file = File(tempfile.NamedTemporaryFile())
        for piece in read_in_chunks(botSo):
            uploaded_file.write(piece)
        uploaded_file.size = os.stat(os.path.join(cls.BUILDNEST, cls.BOT_OUTPUT)).st_size
        botSo.close()

        # Clean up 
        obliterate(cls.BUILDNEST)

        return uploaded_file
Example #7
0
 def save(self, storage, name, meta):
     """Saves this asset to the given storage."""
     method = self._method
     # Calculate sizes.
     display_size = meta["size"]
     image_data, original_size = self._image_data_and_size
     data_size = method.get_data_size(display_size,
                                      display_size.intersect(original_size))
     # Check whether we need to make a thumbnail.
     if data_size == original_size:
         super(ThumbnailAsset, self).save(storage, name, meta)
     else:
         # Use efficient image loading.
         image_data.draft(None, data_size)
         # Resize the image data.
         try:
             image_data = method.do_resize(image_data, original_size,
                                           display_size, data_size)
         except Exception as ex:  # HACK: PIL raises all sorts of Exceptions :(
             raise ThumbnailError(str(ex))
         # Parse the image format.
         _, extension = os.path.splitext(name)
         format = extension.lstrip(".").upper().replace("JPG",
                                                        "JPEG") or "PNG"
         # If we're saving to PNG, make sure we're not in CMYK.
         if image_data.mode == "CMYK" and format == "PNG":
             image_data = image_data.convert("RGB")
         # If the storage has a path, then save it efficiently.
         try:
             thumbnail_path = storage.path(name)
         except NotImplementedError:
             # No path for the storage, so save it in a memory buffer.
             buffer = StringIO()
             try:
                 image_data.save(buffer, format)
             except Exception as ex:  # HACK: PIL raises all sorts of Exceptions :(
                 raise ThumbnailError(str(ex))
             # Write the file.
             buffer.seek(0, os.SEEK_END)
             buffer_length = buffer.tell()
             buffer.seek(0)
             file = File(buffer)
             file.size = buffer_length
             storage.save(name, file)
         else:
             # We can do an efficient streaming save.
             try:
                 os.makedirs(os.path.dirname(thumbnail_path))
             except OSError:
                 pass
             try:
                 image_data.save(thumbnail_path, format)
             except Exception as ex:  # HACK: PIL raises all sorts of Exceptions :(
                 try:
                     raise ThumbnailError(str(ex))
                 finally:
                     # Remove an incomplete file, if present.
                     try:
                         os.unlink(thumbnail_path)
                     except:
                         pass
Example #8
0
def generate_part_doc_links(request,product, parent_ctrl,instances,doc3D, inbulk_cache):
    """
    :param product: :class:`.Product` that represents the arborescense
    :param parent_ctrl: :class:`.Part` from which we want to realize the decomposition
    :param instances: Use to trace the items to update

    Parses forms and generates:


    - The bom-child of Parts (in relation to the **product**)

    - For every :class:`.ParentChildLink` generated in the previous condition we attach all the :class:`.Location_link` relatives

    - To every generated :class:`.Part` a :class:`.Document3D` has been attached and Document3D has been set like the attribute PartDecompose of the Part

    - The attribute doc_id of every node of the arborescense(**product**) is now the relative id of :class:`.DocumentFile` generated in the previous condition

    - To every generated :class:`.Document3D` has been added a new empty(locked) :class:`.DocumentFile` STP ( :meth:`.generateGhostDocumentFile` )

    - The attribute doc_path of every node of the arborescense(**product**) is now the path of :class:`.DocumentFile` STP generated in the previous condition
    """

    to_delete=[]
    user = parent_ctrl._user
    company = pmodels.User.objects.get(username=settings.COMPANY)
    other_files = list(doc3D.files.exclude(models.is_stp))
    for link in product.links:
        try:

            oq=forms.Order_Quantity_Form(request.POST,prefix=link.visited)
            oq.is_valid()
            options=oq.cleaned_data
            order=options["order"]
            quantity=options["quantity"]
            unit=options["unit"]

            if not link.product.part_to_decompose:

                part_ctype=forms.Doc_Part_type_Form(request.POST,prefix=link.product.visited)
                part_ctype.is_valid()
                options = part_ctype.cleaned_data
                cls = get_all_plmobjects()[options["type_part"]]
                part_form = pforms.get_creation_form(user, cls, request.POST,
                    inbulk_cache=inbulk_cache, prefix=str(link.product.visited)+"-part")

                part_ctrl = parent_ctrl.create_from_form(part_form, user, True, True)

                instances.append((part_ctrl.object._meta.app_label,
                    part_ctrl.object._meta.module_name, part_ctrl.object._get_pk_val()))

                c_link = parent_ctrl.add_child(part_ctrl.object,quantity,order,unit)

                models.generate_extra_location_links(link, c_link)

                doc_form = pforms.get_creation_form(user, models.Document3D, request.POST,
                    inbulk_cache=inbulk_cache, prefix=str(link.product.visited)+"-document")
                doc_ctrl = models.Document3DController.create_from_form(doc_form,
                        user, True, True)

                link.product.part_to_decompose=part_ctrl.object
                to_delete.append(generateGhostDocumentFile(link.product, doc_ctrl.object, company))

                instances.append((doc_ctrl.object._meta.app_label,
                    doc_ctrl.object._meta.module_name, doc_ctrl.object._get_pk_val()))
                part_ctrl.attach_to_document(doc_ctrl.object)
                new_Doc3D = doc_ctrl.object
                new_Doc3D.PartDecompose = part_ctrl.object
                new_Doc3D.no_index = True
                new_Doc3D.save()

                for doc_file in other_files:
                    filename, ext = os.path.splitext(doc_file.filename)
                    # add files with the same name (for example a .sldXXX
                    # or.CATXXX file)
                    if filename == link.product.name:
                        f = File(doc_file.file)
                        f.name = doc_file.filename
                        f.size = doc_file.size
                        df = doc_ctrl.add_file(f, False, False)
                        if doc_file.thumbnail:
                            doc_ctrl.add_thumbnail(df, File(doc_file.thumbnail))
                        instances.append((df._meta.app_label, df._meta.module_name, df.pk))
                        instances.append((doc_file._meta.app_label, doc_file._meta.module_name, doc_file.pk))
                        doc_file.no_index = True
                        doc_file.deprecated = True
                        doc_file.save()

                generate_part_doc_links(request,link.product, part_ctrl,instances,doc3D, inbulk_cache)

            else:

                c_link = parent_ctrl.add_child(link.product.part_to_decompose,quantity,order,unit)
                models.generate_extra_location_links(link, c_link)

        except Exception:
            raise models.Document_Generate_Bom_Error(to_delete,link.product.name)