示例#1
0
 def start_from_file(self, fileref):
     #logmessage("Starting from file " + str(fileref))
     existing_entry = db.session.execute(select(MachineLearning).filter_by(group_id=self.group_id)).first()
     if existing_entry is not None:
         return
     file_info = get_info_from_file_reference(fileref, folder='sources')
     if 'fullpath' not in file_info or file_info['fullpath'] is None or not os.path.exists(file_info['fullpath']):
         return
         #raise Exception("File reference " + str(fileref) + " is invalid")
     with open(file_info['fullpath'], 'r', encoding='utf-8') as fp:
         content = fp.read()
     if 'mimetype' in file_info and file_info['mimetype'] == 'application/json':
         aref = json.loads(content)
     elif 'extension' in file_info and file_info['extension'].lower() in ['yaml', 'yml']:
         aref = yaml.load(content, Loader=yaml.FullLoader)
     if isinstance(aref, dict) and hasattr(self, 'group_id'):
         the_group_id = re.sub(r'.*:', '', self.group_id)
         if the_group_id in aref:
             aref = aref[the_group_id]
     if isinstance(aref, list):
         nowtime = datetime.datetime.utcnow()
         for entry in aref:
             if 'independent' in entry:
                 depend = entry.get('dependent', None)
                 if depend is not None:
                     new_entry = MachineLearning(group_id=self.group_id, independent=codecs.encode(pickle.dumps(entry['independent']), 'base64').decode(), dependent=codecs.encode(pickle.dumps(depend), 'base64').decode(), modtime=nowtime, create_time=nowtime, active=True, key=entry.get('key', None), info=codecs.encode(pickle.dumps(entry['info']), 'base64').decode() if entry.get('info', None) is not None else None)
                 else:
                     new_entry = MachineLearning(group_id=self.group_id, independent=codecs.encode(pickle.dumps(entry['independent']), 'base64').decode(), modtime=nowtime, create_time=nowtime, active=False, key=entry.get('key', None), info=codecs.encode(pickle.dumps(entry['info']), 'base64').decode() if entry.get('info', None) is not None else None)
                 db.session.add(new_entry)
         db.session.commit()
示例#2
0
 def add_to_training_set(self, independent, dependent, key=None, info=None):
     self._initialize()
     nowtime = datetime.datetime.utcnow()
     if dependent is not None:
         new_entry = MachineLearning(group_id=self.group_id, independent=codecs.encode(pickle.dumps(independent), 'base64').decode(), dependent=codecs.encode(pickle.dumps(dependent), 'base64').decode(), info=codecs.encode(pickle.dumps(info), 'base64').decode() if info is not None else None, create_time=nowtime, modtime=nowtime, active=True, key=key)
     else:
         new_entry = MachineLearning(group_id=self.group_id, independent=codecs.encode(pickle.dumps(independent), 'base64').decode(), info=codecs.encode(pickle.dumps(info), 'base64').decode() if info is not None else None, create_time=nowtime, modtime=nowtime, active=False, key=key)
     db.session.add(new_entry)
     db.session.commit()
     return new_entry.id
示例#3
0
 def _save_entry(self, **kwargs):
     self._initialize()
     the_id = kwargs.get('id', None)
     need_to_reset = False
     if the_id is None:
         the_entry = MachineLearning(group_id=self.group_id)
         existing = False
     else:
         the_entry = db.session.execute(select(MachineLearning).filter_by(group_id=self.group_id, id=the_id)).scalar()
         existing = True
     if the_entry is None:
         raise Exception("There was no entry in the database for id " + str(the_id) + " with group id " + str(self.group_id))
     if 'dependent' in kwargs:
         if existing and the_entry.dependent is not None and the_entry.dependent != kwargs['dependent']:
             need_to_reset = True
         the_entry.dependent = codecs.encode(pickle.dumps(kwargs['dependent']), 'base64').decode()
         the_entry.active = True
     if 'independent' in kwargs:
         if existing and the_entry.independent is not None and the_entry.independent != kwargs['independent']:
             need_to_reset = True
         the_entry.independent = codecs.encode(pickle.dumps(kwargs['independent']), 'base64').decode()
     if 'key' in kwargs:
         the_entry.key = kwargs['key']
     if 'info' in kwargs:
         the_entry.info = codecs.encode(pickle.dumps(kwargs['info']), 'base64').decode()
     the_entry.modtime = datetime.datetime.utcnow()
     if not existing:
         db.session.add(the_entry)
     db.session.commit()
     if need_to_reset:
         self.reset()
示例#4
0
 def save_for_classification(self, indep, key=None, info=None):
     self._initialize()
     if key is None:
         existing_entry = db.session.execute(
             select(MachineLearning).filter_by(
                 group_id=self.group_id,
                 dependent=None,
                 independent=codecs.encode(pickle.dumps(indep),
                                           'base64').decode())).scalar()
     else:
         existing_entry = db.session.execute(
             select(MachineLearning).filter_by(
                 group_id=self.group_id,
                 key=key,
                 independent=codecs.encode(pickle.dumps(indep),
                                           'base64').decode())).scalar()
     if existing_entry is not None:
         #logmessage("entry is already there")
         return existing_entry.id
     new_entry = MachineLearning(
         group_id=self.group_id,
         independent=codecs.encode(pickle.dumps(indep), 'base64').decode(),
         create_time=datetime.datetime.utcnow(),
         active=False,
         key=key,
         info=codecs.encode(pickle.dumps(info), 'base64').decode()
         if info is not None else None)
     db.session.add(new_entry)
     db.session.commit()
     return new_entry.id
示例#5
0
 def _save_entry(self, **kwargs):
     self._initialize()
     the_id = kwargs.get('id', None)
     need_to_reset = False
     if the_id is None:
         the_entry = MachineLearning(group_id=self.group_id)
         existing = False
     else:
         the_entry = MachineLearning.query.filter_by(group_id=self.group_id, id=the_id).first()
         existing = True
     if the_entry is None:
         raise Exception("There was no entry in the database for id " + str(the_id) + " with group id " + str(self.group_id))
     if 'dependent' in kwargs:
         if existing and the_entry.dependent is not None and the_entry.dependent != kwargs['dependent']:
             need_to_reset = True
         the_entry.dependent = codecs.encode(pickle.dumps(kwargs['dependent']), 'base64').decode()
         the_entry.active = True
     if 'independent' in kwargs:
         if existing and the_entry.independent is not None and the_entry.independent != kwargs['independent']:
             need_to_reset = True
         the_entry.independent = codecs.encode(pickle.dumps(kwargs['independent']), 'base64').decode()
     if 'key' in kwargs:
         the_entry.key = kwargs['key']
     if 'info' in kwargs:
         the_entry.info = codecs.encode(pickle.dumps(kwargs['info']), 'base64').decode()
     the_entry.modtime = datetime.datetime.utcnow()
     if not existing:
         db.session.add(the_entry)
     db.session.commit()
     if need_to_reset:
         self.reset()
 def save_for_classification(self, text, key=None):
     self._initialize()
     if key is None:
         existing_entry = MachineLearning.query.filter_by(group_id=self.group_id, dependent=None, independent=codecs.encode(pickle.dumps(text), 'base64').decode()).first()
     else:
         existing_entry = MachineLearning.query.filter_by(group_id=self.group_id, key=key, independent=codecs.encode(pickle.dumps(text), 'base64').decode()).first()
     if existing_entry is not None:
         logmessage("entry is already there")
         return existing_entry.id
     new_entry = MachineLearning(group_id=self.group_id, independent=codecs.encode(pickle.dumps(text), 'base64').decode(), create_time=datetime.datetime.utcnow(), active=False, key=key)
     db.session.add(new_entry)
     db.session.commit()
     return new_entry.id
示例#7
0
 def start_from_file(self, fileref):
     #logmessage("Starting from file " + str(fileref))
     existing_entry = MachineLearning.query.filter_by(
         group_id=self.group_id).first()
     if existing_entry is not None:
         return
     file_info = get_info_from_file_reference(fileref, folder='sources')
     if 'fullpath' not in file_info or file_info[
             'fullpath'] is None or not os.path.exists(
                 file_info['fullpath']):
         raise Exception("File reference " + str(fileref) + " is invalid")
     with open(file_info['fullpath'], 'rU') as fp:
         content = fp.read().decode('utf8')
     if 'mimetype' in file_info and file_info[
             'mimetype'] == 'application/json':
         aref = json.loads(content)
     elif 'extension' in file_info and file_info['extension'].lower() in [
             'yaml', 'yml'
     ]:
         aref = yaml.load(content)
     if type(aref) is list:
         nowtime = datetime.datetime.utcnow()
         for entry in aref:
             if 'independent' in entry:
                 new_entry = MachineLearning(
                     group_id=self.group_id,
                     independent=codecs.encode(
                         pickle.dumps(entry['independent']),
                         'base64').decode(),
                     dependent=codecs.encode(
                         pickle.dumps(entry.get('dependent', None)),
                         'base64').decode(),
                     modtime=nowtime,
                     create_time=nowtime,
                     active=True,
                     key=entry.get('key', None),
                     info=codecs.encode(pickle.dumps(
                         entry['info']), 'base64').decode() if entry.get(
                             'info', None) is not None else None)
                 db.session.add(new_entry)
         db.session.commit()