示例#1
0
def create_file_object(path, file_group, lims_metadata, metadata, file_type):
    try:
        f = File.objects.create(file_name=os.path.basename(path),
                                path=path,
                                file_group=file_group,
                                file_type=file_type)
        f.save()

        fm = FileMetadata(file=f, metadata=metadata)
        fm.save()
        Job.objects.create(
            run=TYPES["CALCULATE_CHECKSUM"],
            args={
                "file_id": str(f.id),
                "path": path
            },
            status=JobStatus.CREATED,
            max_retry=3,
            children=[],
        )
        import_metadata = ImportMetadata.objects.create(file=f,
                                                        metadata=lims_metadata)
    except Exception as e:
        logger.error("Failed to create file %s. Error %s" % (path, str(e)))
        raise FailedToFetchSampleException(
            "Failed to create file %s. Error %s" % (path, str(e)))
示例#2
0
 def _create_single_file(self, path, file_type, group_id, request_id,
                         sample_id):
     file_type_obj = FileType.objects.get(name=file_type)
     group_id_obj = FileGroup.objects.get(id=group_id)
     file = File(path=path,
                 file_name=os.path.basename(path),
                 file_type=file_type_obj,
                 file_group=group_id_obj,
                 size=1234)
     file.save()
     file_metadata = {"requestId": request_id, "igoSampleId": sample_id}
     file_metadata = FileMetadata(file=file, metadata=file_metadata)
     file_metadata.save()
     return file
示例#3
0
 def _register_as_file(self, path, metadata):
     print("Registering file")
     try:
         file_group = FileGroup.objects.get(id=settings.NOTIFIER_FILE_GROUP)
     except FileGroup.DoesNotExist:
         return
     file_type_obj = FileType.objects.filter(name='json').first()
     try:
         f = File.objects.create(file_name=os.path.basename(path),
                                 path=path,
                                 file_group=file_group,
                                 file_type=file_type_obj)
         f.save()
         fm = FileMetadata(file=f, metadata=metadata)
         fm.save()
     except Exception as e:
         self.logger.error("Failed to create file %s. Error %s" % (path, str(e)))
示例#4
0
 def create(self, validated_data):
     request = self.context.get("request")
     user = request.user if request and hasattr(request, "user") else None
     validated_data['file_name'] = os.path.basename(
         validated_data.get('path'))
     validated_data['file_type'] = validated_data['file_type']
     metadata = validated_data.pop('metadata')
     file = File.objects.create(**validated_data)
     metadata = FileMetadata(file=file, metadata=metadata, user=user)
     metadata.save()
     job = Job.objects.create(run=TYPES["CALCULATE_CHECKSUM"],
                              args={
                                  'file_id': str(file.id),
                                  'path': validated_data.get('path')
                              },
                              status=JobStatus.CREATED,
                              max_retry=3,
                              children=[])
     return file
示例#5
0
 def create_file_obj(uri, size, checksum, group_id, metadata):
     file_path = FileProcessor.parse_path_from_uri(uri)
     basename = os.path.basename(file_path)
     file_type = FileProcessor.get_file_ext(basename)
     try:
         group_id_obj = FileGroup.objects.get(id=group_id)
     except FileGroup.DoesNotExist as e:
         raise FileHelperException("Invalid FileGroup id: %s" % group_id)
     if File.objects.filter(path=file_path).first():
         raise FileConflictException("File with path %s already exist" % file_path)
     file_object = File.objects.create(
         path=file_path,
         file_name=os.path.basename(file_path),
         checksum=checksum,
         file_type=file_type,
         file_group=group_id_obj,
         size=size,
     )
     file_metadata = FileMetadata(file=file_object, metadata=metadata)
     file_metadata.save()
     return file_object
示例#6
0
    def update(self, instance, validated_data):
        request = self.context.get("request")
        user = request.user if request and hasattr(request, "user") else None
        if not user:
            try:
                user = User.objects.get(id=validated_data.get("user"))
            except User.DoesNotExist:
                pass
        instance.path = validated_data.get("path", instance.path)
        instance.file_name = os.path.basename(instance.path)
        instance.size = validated_data.get("size", instance.size)
        instance.file_group_id = validated_data.get("file_group_id",
                                                    instance.file_group_id)
        instance.file_type = validated_data.get("file_type",
                                                instance.file_type)

        if self.partial:
            old_metadata = instance.filemetadata_set.order_by(
                "-version").first().metadata
            old_metadata.update(validated_data.get("metadata"))
            metadata = FileMetadata(file=instance,
                                    metadata=old_metadata,
                                    user=user)
            metadata.save()
        else:
            ddiff = DeepDiff(
                validated_data.get("metadata"),
                instance.filemetadata_set.order_by(
                    "-created_date").first().metadata,
                ignore_order=True,
            )
            if ddiff:
                metadata = FileMetadata(
                    file=instance,
                    metadata=validated_data.get("metadata"),
                    user=user)
                metadata.save()
        instance.save()
        return instance
示例#7
0
def create_pooled_normal(filepath, file_group_id):
    """
    Parse the file path provided for a Pooled Normal sample into the metadata fields needed to
    create the File and FileMetadata entries in the database

    Parameters:
    -----------
    filepath: str
        path to file for the sample
    file_group_id: UUID
        primary key for FileGroup to use for imported File entry

    Examples
    --------
        filepath = "/ifs/archive/GCL/hiseq/FASTQ/JAX_0397_BHCYYWBBXY/Project_POOLEDNORMALS/Sample_FFPEPOOLEDNORMAL_IGO_IMPACT468_GTGAAGTG/FFPEPOOLEDNORMAL_IGO_IMPACT468_GTGAAGTG_S5_R1_001.fastq.gz"
        file_group_id = settings.IMPORT_FILE_GROUP
        create_pooled_normal(filepath, file_group_id)

    Notes
    -----
    For filepath string such as "JAX_0397_BHCYYWBBXY";
    - runId = JAX_0397
    - flowCellId = HCYYWBBXY
    - [A|B] might be the flowcell bay the flowcell is placed into
    """
    # if FileRepository.filter(path=filepath):
    try:
        File.objects.get(path=filepath)
    except File.DoesNotExist:
        logger.info("Pooled normal already created filepath")
    file_group_obj = FileGroup.objects.get(id=file_group_id)
    file_type_obj = FileType.objects.filter(name="fastq").first()
    assays = ETLConfiguration.objects.first()
    assay_list = assays.all_recipes
    run_id = None
    preservation_type = None
    recipe = None
    try:
        parts = filepath.split("/")
        path_shift = 0
        # path_shift needed for /ifs/archive/GCL/hiseq/ -> /igo/delivery/ transition
        if "igo" in parts[1]:
            path_shift = 2
        run_id = get_run_id_from_string(parts[6 - path_shift])
        pooled_normal_folder = parts[8 - path_shift]
        preservation_type = pooled_normal_folder
        preservation_type = preservation_type.split("Sample_")[1]
        preservation_type = preservation_type.split("POOLEDNORMAL")[0]
        potential_recipe = list(
            filter(lambda single_assay: single_assay in pooled_normal_folder,
                   assay_list))
        if potential_recipe:
            potential_recipe.sort(key=len, reverse=True)
            recipe = potential_recipe[0]
    except Exception as e:
        raise FailedToFetchPoolNormalException(
            "Failed to parse metadata for pooled normal file %s" % filepath)
    if preservation_type not in ("FFPE", "FROZEN", "MOUSE"):
        logger.info("Invalid preservation type %s" % preservation_type)
        return
    if recipe in assays.disabled_recipes:
        logger.info("Recipe %s, is marked as disabled" % recipe)
        return
    if None in [run_id, preservation_type, recipe]:
        logger.info("Invalid metadata runId:%s preservation:%s recipe:%s" %
                    (run_id, preservation_type, recipe))
        return
    metadata = {
        "runId": run_id,
        "preservation": preservation_type,
        "recipe": recipe
    }
    try:
        new_path = CopyService.remap(recipe, filepath)
        if new_path != filepath:
            CopyService.copy(filepath, new_path)
    except Exception as e:
        logger.error("Failed to copy file %s." % (filepath, ))
        raise FailedToFetchPoolNormalException(
            "Failed to copy file %s. Error %s" % (filepath, str(e)))

    try:
        f = File.objects.create(file_name=os.path.basename(filepath),
                                path=filepath,
                                file_group=file_group_obj,
                                file_type=file_type_obj)
        f.save()
        fm = FileMetadata(file=f, metadata=metadata)
        fm.save()
    except Exception as e:
        logger.info("File already exist %s." % (filepath))