Пример #1
0
    def do_copo_schemas(self):
        copo_schemas = dict(
            ontology_schema=d_utils.get_copo_schema("ontology_annotation"),
            comment_schema=d_utils.get_copo_schema("comment"),
            characteristics_schema=d_utils.get_copo_schema("material_attribute_value"),
            source_schema=d_utils.get_copo_schema("source")
        )

        self.context["copo_schemas"] = copo_schemas
        return self.context
Пример #2
0
    def do_form_control_schemas(self):
        """
        function returns object type control schemas used in building form controls
        :return:
        """

        copo_schemas = dict()
        for k, v in d_utils.object_type_control_map().items():
            copo_schemas[k] = d_utils.get_copo_schema(v)

        self.context["copo_schemas"] = copo_schemas
        return self.context
Пример #3
0
def resolve_phenotypic_characteristics_data(data, elem):
    schema = d_utils.get_copo_schema("phenotypic_variables")

    resolved_data = list()

    for f in schema:
        if f.get("show_in_table", True):
            a = dict()
            if f["id"].split(".")[-1] in data:
                a[f["id"].split(".")[-1]] = resolve_ontology_term_data(data[f["id"].split(".")[-1]], elem)
                resolved_data.append(a)

    return resolved_data  # turn this casting off after merge
Пример #4
0
def resolve_copo_characteristics_data(data, elem):
    schema = d_utils.get_copo_schema("material_attribute_value")

    resolved_data = list()

    for f in schema:
        if f.get("show_in_table", True):
            a = dict()
            if f["id"].split(".")[-1] in data:
                a[f["id"].split(".")[-1]] = resolve_ontology_term_data(data[f["id"].split(".")[-1]], elem)
                resolved_data.append(a)

    return resolved_data
Пример #5
0
def resolve_copo_duration_data(data, elem):
    schema = d_utils.get_copo_schema("duration")

    resolved_data = list()

    for f in schema:
        if f.get("show_in_table", True):
            # a = dict()
            if f["id"].split(".")[-1] in data:
                # a[f["label"]] = data[f["id"].split(".")[-1]]
                resolved_data.append(f["label"] + ": " + data[f["id"].split(".")[-1]])

    return resolved_data
Пример #6
0
def resolve_copo_comment_data(data, elem):
    schema = d_utils.get_copo_schema("comment")

    resolved_data = list()

    for f in schema:
        if f.get("show_in_table", True):
            a = dict()
            if f["id"].split(".")[-1] in data:
                a[f["id"].split(".")[-1]] = data[f["id"].split(".")[-1]]
                resolved_data.append(a)

    if not resolved_data:
        resolved_data = str()
    elif len(resolved_data) == 1:
        resolved_data = resolved_data[0]
    return resolved_data
Пример #7
0
def inspect_file(request):
    # utility method to examine a file and return meta-data to the frontend
    output_dict = {'file_type': 'unknown', 'do_compress': False}

    # get reference to file
    file_id = request.GET['file_id']

    chunked_upload = ChunkedUpload.objects.get(id=int(file_id))
    file_name = os.path.join(settings.MEDIA_ROOT, chunked_upload.file.name)

    # size threshold to determine if a file should be compressed
    zip_threshold = 200000000  # size in bytes

    # check if file is compressed
    is_zipped = u.is_gzipped(file_name)

    if chunked_upload.offset >= zip_threshold and not is_zipped:
        output_dict['do_compress'] = True

    # check for file type
    if u.is_pdf_file(file_name):
        output_dict['file_type'] = 'pdf'
    else:
        try:
            if u.is_fastq_file(file_name):
                output_dict['file_type'] = 'fastq'
                if not is_zipped:
                    output_dict['do_compress'] = True
            elif u.is_sam_file(file_name):
                output_dict['file_type'] = 'sam'
                if not is_zipped:
                    output_dict['do_compress'] = True
            elif u.is_bam_file(file_name):
                output_dict['file_type'] = 'bam'
                if not is_zipped:
                    output_dict['do_compress'] = True

            else:  # make file type same as extension
                output_dict['file_type'] = chunked_upload.filename.rsplit(
                    '.')[1]
        except:
            output_dict['file_type'] = 'unknown'

    # add datafile schema
    chunked_upload.type = output_dict['file_type']
    chunked_upload.save()

    # ...and obtain the inserted record
    profile_id = request.session['profile_id']
    component = "datafile"

    auto_fields = dict()
    auto_fields[DataFile().get_qualified_field("file_id")] = file_id
    auto_fields[DataFile().get_qualified_field(
        "file_type")] = output_dict['file_type']
    auto_fields[DataFile().get_qualified_field("file_location")] = file_name
    auto_fields[DataFile().get_qualified_field(
        "file_size")] = u.filesize_toString(chunked_upload.offset)
    auto_fields[DataFile().get_qualified_field(
        "name")] = chunked_upload.filename

    # get default type from schema
    type = [
        f for f in d_utils.get_copo_schema(component)
        if f.get("id").split(".")[-1] == "type"
    ]
    if type:
        type = type[0]["default_value"]
        auto_fields[DataFile().get_qualified_field("type")] = type

    df = BrokerDA(context=dict(),
                  profile_id=profile_id,
                  component=component,
                  auto_fields=auto_fields,
                  visualize="last_record").do_save_edit().get(
                      "record_object", dict())

    out = jsonpickle.encode(output_dict)
    return HttpResponse(out, content_type='json')
Пример #8
0
def inspect_file(request):
    # utility method to examine a file and return meta-data to the frontend
    output_dict = {'file_type': 'unknown', 'do_compress': False}

    # get reference to file
    file_id = request.GET['file_id']

    chunked_upload = ChunkedUpload.objects.get(id=int(file_id))
    file_name = os.path.join(settings.MEDIA_ROOT, chunked_upload.file.name)

    # size threshold to determine if a file should be compressed
    zip_threshold = 200000000  # size in bytes

    # check if file is compressed
    is_zipped = u.is_gzipped(file_name)

    if chunked_upload.offset >= zip_threshold and not is_zipped:
        output_dict['do_compress'] = True

    # check for file type
    if u.is_pdf_file(file_name):
        output_dict['file_type'] = 'pdf'
    else:
        try:
            if u.is_fastq_file(file_name):
                output_dict['file_type'] = 'fastq'
                if not is_zipped:
                    output_dict['do_compress'] = True
            elif u.is_sam_file(file_name):
                output_dict['file_type'] = 'sam'
                if not is_zipped:
                    output_dict['do_compress'] = True
            elif u.is_bam_file(file_name):
                output_dict['file_type'] = 'bam'
                if not is_zipped:
                    output_dict['do_compress'] = True

            else:  # make file type same as extension
                output_dict['file_type'] = chunked_upload.filename.rsplit('.')[1]
        except:
            output_dict['file_type'] = 'unknown'

    # add datafile schema
    chunked_upload.type = output_dict['file_type']
    chunked_upload.save()

    # ...and obtain the inserted record
    profile_id = request.session['profile_id']
    component = "datafile"

    auto_fields = dict()
    auto_fields[DataFile().get_qualified_field("file_id")] = file_id
    auto_fields[DataFile().get_qualified_field("file_type")] = output_dict['file_type']
    auto_fields[DataFile().get_qualified_field("file_location")] = file_name
    auto_fields[DataFile().get_qualified_field("file_size")] = u.filesize_toString(chunked_upload.offset)
    auto_fields[DataFile().get_qualified_field("name")] = chunked_upload.filename

    # get default type from schema
    type = [f for f in d_utils.get_copo_schema(component) if f.get("id").split(".")[-1] == "type"]
    if type:
        type = type[0]["default_value"]
        auto_fields[DataFile().get_qualified_field("type")] = type

    df = BrokerDA(context=dict(),
                  profile_id=profile_id,
                  component=component,
                  auto_fields=auto_fields,
                  visualize="last_record"
                  ).do_save_edit().get("record_object", dict())

    # do visualise
    table_data = BrokerVisuals(
        profile_id=profile_id,
        context=output_dict,
        component=component,
        record_object=df
    ).do_row_data().get("table_data", dict())

    output_dict['table_data'] = table_data

    out = jsonpickle.encode(output_dict)
    return HttpResponse(out, content_type='json')