def to_json(self):
     ret = {
         'project_id': self.project_id,
         'open_date': format_date(self.open_date),
         'close_date': format_date(self.close_date),
         'project_status': self.status,
         'researcher_name': self.researcher_name,
         'nb_quoted_samples': self.nb_quoted_samples,
         'udfs': self.udfs
     }
     return ret
 def to_json(self):
     return {
         'created_date': format_date(self.created_date),
         'cst_date': format_date(self.cst_date),
         'run_id': self.udfs.get('RunID'),
         'run_status': self.udfs.get('Run Status'),
         'sample_ids': sorted(list(self.samples)),
         'project_ids': sorted(list(self.projects)),
         'lanes': list(self.lanes.values()),
         'instrument_id': self.udfs.get('InstrumentID'),
         'nb_reads': self.udfs.get('Read'),
         'nb_cycles': self.udfs.get('Cycle')
     }
 def to_json(self):
     return {
         'sample_id': self.sample_name,
         'project_id': self.project_name,
         'statuses': self.all_statuses(),
         'current_status': self.status,
         'started_date': format_date(self.started_date),
         'finished_date': format_date(self.finished_date),
         'library_type': self.library_type,
         'species': self.species,
         'required_yield': self.required_yield,
         'required_coverage': self.coverage
     }
 def to_json(self):
     sample_per_status, status = self.samples_per_status()
     ret = {
         'project_id': self.project_id,
         'nb_samples': len(self.samples),
         'library_type': self.library_types,
         'required_yield': self.required_yield,
         'required_coverage': self.coverage,
         'species': self.species,
         'open_date': format_date(self.open_date),
         'close_date': format_date(self.close_date),
         'project_status': self.status,
         'researcher_name': self.researcher_name,
         'nb_quoted_samples': self.nb_quoted_samples,
         'finished_date': format_date(self.finished_date),
         'started_date': format_date(self.started_date),
         'status': status
     }
     ret.update(sample_per_status)
     return ret
 def to_flatten_json(self):
     most_recent_step = sorted(self.specific_steps.values(), key=lambda p: p.date_run, reverse=True)[0]
     data = []
     for artifact in most_recent_step.artifacts.values():
         element = {
             'id': self.id,
             'step_link': cfg['lims_url'] + '/clarity/work-complete/' + str(most_recent_step.id),
             'step_run': len(self.specific_steps),
             'date_completed': format_date(most_recent_step.date_run),
             'protocol': status_cfg.protocol_names.get(self.type, self.type),
             'project': artifact.project_id,
         }
         element.update(artifact.to_json())
         data.append(element)
     return data
    def to_json(self):
        most_recent_step = sorted(self.specific_steps.values(), key=lambda p: p.date_run, reverse=True)[0]
        projects = set()
        data = {
            'id': self.id,
            'step_link': cfg['lims_url'] + '/clarity/work-complete/' + str(most_recent_step.id),
            'step_run': len(self.specific_steps),
            'date_completed': format_date(most_recent_step.date_run),
            'samples': [],
            'protocol': status_cfg.protocol_names.get(self.type, self.type),
            'nsamples': len(most_recent_step.artifacts)
        }

        passing_samples = 0
        for k, v in sorted(most_recent_step.artifacts.items()):
            projects.add(v.project_id)
            data['samples'].append(v.to_json())
            if v.qc_flag == 'PASSED':
                passing_samples += 1

        data['pc_qc_flag_pass'] = (passing_samples / len(most_recent_step.artifacts)) * 100
        data['project_ids'] = sorted(projects)
        return data