def save_record(self, auto_fields=dict(), **kwargs): fields = dict() # set auto fields if auto_fields: fields = DecoupleFormSubmission(auto_fields, self.get_schema().get("schema")).get_schema_fields_updated() # should have target_id for updates and return empty string for inserts target_id = kwargs.pop("target_id", str()) # set system fields system_fields = dict( date_modified=data_utils.get_datetime(), deleted=data_utils.get_not_deleted_flag() ) if not target_id: system_fields["date_created"] = data_utils.get_datetime() system_fields["profile_id"] = self.profile_id # extend system fields for k, v in kwargs.items(): system_fields[k] = v # add system fields to 'fields' and set default values - insert mode only for f in self.get_schema().get("schema"): f_id = f.id.split(".")[-1] if f_id in system_fields: fields[f_id] = system_fields.get(f_id) if not target_id and f_id not in fields: fields[f_id] = data_utils.default_jsontype(f.type) # if True, then the database action (to save/update) is never performed, but validated 'fields' is returned validate_only = kwargs.pop("validate_only", False) # prefer this testto save guard against all sorts of value the 'validate_only' can assume if validate_only == True: return fields else: if target_id: self.get_collection_handle().update( {"_id": ObjectId(target_id)}, {'$set': fields}) else: doc = self.get_collection_handle().insert(fields) target_id = str(doc) # return saved record rec = self.get_record(target_id) return rec
def save_record(self, auto_fields=dict(), **kwargs): fields = dict() schema = kwargs.get("schema", list()) or self.get_component_schema() # set auto fields if auto_fields: fields = DecoupleFormSubmission( auto_fields, schema).get_schema_fields_updated_dict() # should have target_id for updates and return empty string for inserts target_id = kwargs.pop("target_id", str()) # set system fields system_fields = dict(date_modified=data_utils.get_datetime(), deleted=data_utils.get_not_deleted_flag()) if not target_id: system_fields["date_created"] = data_utils.get_datetime() system_fields["profile_id"] = self.profile_id # extend system fields for k, v in kwargs.items(): system_fields[k] = v # add system fields to 'fields' and set default values - insert mode only for f in schema: f_id = f["id"].split(".")[-1] if f_id in system_fields: fields[f_id] = system_fields.get(f_id) if not target_id and f_id not in fields: fields[f_id] = data_utils.default_jsontype(f["type"]) # if True, then the database action (to save/update) is never performed, but validated 'fields' are returned validate_only = kwargs.pop("validate_only", False) if validate_only is True: return fields else: if target_id: self.get_collection_handle().update( {"_id": ObjectId(target_id)}, {'$set': fields}) else: doc = self.get_collection_handle().insert(fields) target_id = str(doc) # return saved record rec = self.get_record(target_id) return rec
def save_record(self, auto_fields=dict(), **kwargs): if not kwargs.get("target_id", str()): repo = kwargs.pop("repository", str()) for k, v in dict(repository=repo, status=False, complete='false', is_cg=str(repo == "cg_core"), user_id=data_utils.get_current_user().id, date_created=data_utils.get_datetime()).items(): auto_fields[self.get_qualified_field(k)] = v return super(Submission, self).save_record(auto_fields, **kwargs)
def create_description(self, stages=list(), attributes=dict()): self.purge_descriptions() fields = dict( stages=stages, attributes=attributes, created_on=data_utils.get_datetime(), user_id=ThreadLocal.get_current_user().id ) doc = self.DescriptionCollection.insert(fields) # return inserted record df = self.GET(str(doc)) return df
def get_elapsed_time_dataframe(self): pipeline = [{ "$project": { "_id": 1, "diff_days": { "$divide": [{ "$subtract": [data_utils.get_datetime(), "$created_on"] }, 1000 * 60 * 60 * 24] } } }] description_df = pd.DataFrame( cursor_to_list(self.DescriptionCollection.aggregate(pipeline))) return description_df
def update_submission_status(status=str(), message=str(), submission_id=str()): """ function updates status of submission :param status: the message type: 'info', 'error' :param message: status message :param submission_id: the target record id :return: """ if not submission_id: return True collection_handle = get_submission_handle() doc = collection_handle.find_one({"_id": ObjectId(submission_id)}, { "transcript": 1, "profile_id": 1 }) if not doc: return True submission_record = doc transcript = submission_record.get("transcript", dict()) status = dict(type=status, message=message) transcript['status'] = status if not message: transcript.pop('status', '') submission_record['transcript'] = transcript submission_record['date_modified'] = d_utils.get_datetime() collection_handle.update( {"_id": ObjectId(str(submission_record.pop('_id')))}, {'$set': submission_record}) # notify client agent on status change try: notify_status_change(profile_id=submission_record.get( "profile_id", str()), submission_id=submission_id) except Exception as e: log_general_error(str(e)) return True
def schedule_submission(submission_id=str(), submission_repo=str()): """ function adds submission to a queue for processing :return: """ from submission.helpers import generic_helper as ghlper import web.apps.web_copo.schemas.utils.data_utils as d_utils context = dict(status=True, message='') if not submission_id: context = dict(status=False, message='Submission identifier not found!') return context collection_handle = ghlper.get_submission_queue_handle() doc = collection_handle.find_one({"submission_id": submission_id}) if not doc: # submission not already in queue, add to queue fields = dict(submission_id=submission_id, date_modified=d_utils.get_datetime(), repository=submission_repo, processing_status='pending') collection_handle.insert(fields) context[ 'message'] = 'Submission has been added to the processing queue. Status update will be reported.' else: context[ 'message'] = 'Submission is already in the processing queue. Status updates will be reported.' ghlper.update_submission_status(status='info', message=context['message'], submission_id=submission_id) ghlper.logging_info(context['message'], submission_id) return context
def create_description(self, stages=list(), attributes=dict(), profile_id=str(), component=str(), meta=dict(), name=str()): self.component = component fields = dict( stages=stages, attributes=attributes, profile_id=profile_id, component=component, meta=meta, name=name, created_on=data_utils.get_datetime(), ) doc = self.DescriptionCollection.insert(fields) # return inserted record df = self.GET(str(doc)) return df