def start_from_file(self, fileref): #logmessage("Starting from file " + str(fileref)) existing_entry = db.session.execute(select(MachineLearning).filter_by(group_id=self.group_id)).first() if existing_entry is not None: return file_info = get_info_from_file_reference(fileref, folder='sources') if 'fullpath' not in file_info or file_info['fullpath'] is None or not os.path.exists(file_info['fullpath']): return #raise Exception("File reference " + str(fileref) + " is invalid") with open(file_info['fullpath'], 'r', encoding='utf-8') as fp: content = fp.read() if 'mimetype' in file_info and file_info['mimetype'] == 'application/json': aref = json.loads(content) elif 'extension' in file_info and file_info['extension'].lower() in ['yaml', 'yml']: aref = yaml.load(content, Loader=yaml.FullLoader) if type(aref) is dict and hasattr(self, 'group_id'): the_group_id = re.sub(r'.*:', '', self.group_id) if the_group_id in aref: aref = aref[the_group_id] if type(aref) is list: nowtime = datetime.datetime.utcnow() for entry in aref: if 'independent' in entry: new_entry = MachineLearning(group_id=self.group_id, independent=codecs.encode(pickle.dumps(entry['independent']), 'base64').decode(), dependent=codecs.encode(pickle.dumps(entry.get('dependent', None)), 'base64').decode(), modtime=nowtime, create_time=nowtime, active=True, key=entry.get('key', None), info=codecs.encode(pickle.dumps(entry['info']), 'base64').decode() if entry.get('info', None) is not None else None) db.session.add(new_entry) db.session.commit()
def start_from_file(self, fileref): #logmessage("Starting from file " + str(fileref)) existing_entry = MachineLearning.query.filter_by(group_id=self.group_id).first() if existing_entry is not None: return file_info = get_info_from_file_reference(fileref, folder='sources') if 'fullpath' not in file_info or file_info['fullpath'] is None or not os.path.exists(file_info['fullpath']): return #raise Exception("File reference " + str(fileref) + " is invalid") with open(file_info['fullpath'], 'rU', encoding='utf-8') as fp: content = fp.read() if 'mimetype' in file_info and file_info['mimetype'] == 'application/json': aref = json.loads(content) elif 'extension' in file_info and file_info['extension'].lower() in ['yaml', 'yml']: aref = yaml.load(content, Loader=yaml.FullLoader) if type(aref) is dict and hasattr(self, 'group_id'): the_group_id = re.sub(r'.*:', '', self.group_id) if the_group_id in aref: aref = aref[the_group_id] if type(aref) is list: nowtime = datetime.datetime.utcnow() for entry in aref: if 'independent' in entry: new_entry = MachineLearning(group_id=self.group_id, independent=codecs.encode(pickle.dumps(entry['independent']), 'base64').decode(), dependent=codecs.encode(pickle.dumps(entry.get('dependent', None)), 'base64').decode(), modtime=nowtime, create_time=nowtime, active=True, key=entry.get('key', None), info=codecs.encode(pickle.dumps(entry['info']), 'base64').decode() if entry.get('info', None) is not None else None) db.session.add(new_entry) db.session.commit()