예제 #1
0
 class Object(pw.Model):
     array_field = ArrayField()
     binary_json_field = BinaryJSONField()
     dattime_tz_field = DateTimeTZField()
     hstore_field = HStoreField()
     interval_field = IntervalField()
     json_field = JSONField()
     ts_vector_field = TSVectorField()
예제 #2
0
파일: user.py 프로젝트: comger/qor
class User(DBBaseModel):
    uid = CharField(default=str(uuid.uuid1()), index=True, unique=True)
    username = CharField(unique=True)
    nickname = CharField(null=True, default="")
    email = CharField(unique=True, default="")
    password = CharField()
    tel = CharField(null=True, default="")
    is_superuser = BooleanField(default=False, null=True)
    is_active = BooleanField(default=True, null=True)
    role = ForeignKeyField(UserRole, backref='user', null=True)
    attrs = HStoreField(default={})
예제 #3
0
class PageView(BaseModel):
    account = ForeignKeyField(Account, backref='pageviews')
    url = TextField()
    timestamp = DateTimeField(default=datetime.datetime.now)
    title = TextField(default='')
    ip = CharField(default='')
    referrer = TextField(default='')
    headers = HStoreField()
    params = HStoreField()

    @classmethod
    def create_from_request(cls, account, request):
        parsed = urlparse(request.args['url'])
        params = dict(parse_qsl(parsed.query))

        return PageView.create(account=account,
                               url=parsed.path,
                               title=request.args.get('t') or '',
                               ip=request.headers.get('x-forwarded-for',
                                                      request.remote_addr),
                               referrer=request.args.get('ref') or '',
                               headers=dict(request.headers),
                               params=params)
class NetworkMetadata(BaseModel):
    network = ForeignKeyField(Network, related_name='network_metadata')
    epoch = IntegerField()
    best_epoch = IntegerField()
    rms_test = FloatField(null=True)
    rms_train = FloatField(null=True)
    rms_validation = FloatField()
    rms_validation_descaled = FloatField(null=True)
    loss_test = FloatField(null=True)
    loss_train = FloatField(null=True)
    loss_validation = FloatField()
    metadata = HStoreField()

    @classmethod
    def from_dict(cls, json_dict, network):
        with db.atomic() as txn:
            stringified = {
                str(key): str(val)
                for key, val in json_dict.items()
            }
            try:
                rms_train = json_dict['rms_train']
                loss_train = json_dict['loss_train']
            except KeyError:
                loss_train = rms_train = None
            try:
                loss_test = json_dict['loss_test']
                rms_test = json_dict['loss_test']
            except KeyError:
                rms_test = loss_test = None
            try:
                rms_validation_descaled = json_dict['rms_validation_descaled']
            except KeyError:
                rms_validation_descaled = None
            network_metadata = NetworkMetadata(
                network=network,
                epoch=json_dict['epoch'],
                best_epoch=json_dict['best_epoch'],
                rms_train=rms_train,
                rms_validation=json_dict['rms_validation'],
                rms_validation_descaled=rms_validation_descaled,
                rms_test=rms_test,
                loss_train=loss_train,
                loss_validation=json_dict['loss_validation'],
                loss_test=loss_test,
                metadata=stringified)
            network_metadata.save()
            return network_metadata
예제 #5
0
class User(Model):
    username = CharField(unique=True)
    gistauth = CharField()
    pastebinauth = CharField()
    operationstatus = HStoreField()

    # operationstatus template
    # {
    #     'operation': 'gist' or 'pastebin',
    #     # gist params
    #     'description': 'gist description',
    #     'public': true or false,
    #     '<num>name': '<file name>',
    #     '<num>content': '<file content>',
    # }
    class Meta:
        database = pgs_db
        db_table = '__test_pastebot__'
예제 #6
0
class Media(Timestamped):
    # S3 bucket where the  medias lives
    media_uuid = UUIDField(primary_key=True, index=True)

    # Subscription's username is the Subscription's ID
    username = ForeignKeyField(User_profile, on_delete="CASCADE", index=True)

    # comma separated list of tags
    tags = HStoreField(null=True, index=True)

    # description of what the media is about - full text search
    description = TSVectorField(null=True)

    # geocode info
    latitude = DecimalField(max_digits=9, decimal_places=6, null=True)
    longitude = DecimalField(max_digits=9, decimal_places=6, null=True)

    # time taken
    media_created = DateTimeField(null=True)

    # likes
    likes = IntegerField(default=0)
class Network(BaseModel):
    filter = ForeignKeyField(Filter, related_name='filter', null=True)
    train_script = ForeignKeyField(TrainScript, related_name='train_script')
    feature_prescale_bias = HStoreField()
    feature_prescale_factor = HStoreField()
    target_prescale_bias = HStoreField()
    target_prescale_factor = HStoreField()
    feature_names = ArrayField(TextField)
    feature_min = HStoreField()
    feature_max = HStoreField()
    target_names = ArrayField(TextField)
    target_min = HStoreField()
    target_max = HStoreField()
    timestamp = DateTimeField(constraints=[SQL('DEFAULT now()')])

    @classmethod
    def find_partners_by_id(cls, network_id):
        q1 = Network.find_similar_topology_by_id(network_id,
                                                 match_train_dim=False)
        q2 = Network.find_similar_networkpar_by_id(network_id,
                                                   match_train_dim=False)
        return q1 & q2

    @classmethod
    def find_similar_topology_by_settings(cls, settings_path):
        with open(settings_path) as file_:
            json_dict = json.load(file_)
            cls.find_similar_topology_by_values(
                json_dict['hidden_neurons'],
                json_dict['hidden_activation'],
                json_dict['output_activation'],
                train_dim=json_dict['train_dim'])
        return query

    @classmethod
    def find_similar_topology_by_id(cls, network_id, match_train_dim=True):
        query = (Network.select(
            Hyperparameters.hidden_neurons, Hyperparameters.hidden_activation,
            Hyperparameters.output_activation).where(
                Network.id == network_id).join(Hyperparameters))

        train_dim, = (Network.select(Network.target_names).where(
            Network.id == network_id)).tuples().get()
        if match_train_dim is not True:
            train_dim = None
        query = cls.find_similar_topology_by_values(*query.tuples().get(),
                                                    train_dim=train_dim)
        query = query.where(Network.id != network_id)
        return query

    @classmethod
    def find_similar_topology_by_values(cls,
                                        hidden_neurons,
                                        hidden_activation,
                                        output_activation,
                                        train_dim=None):
        query = (Network.select().join(Hyperparameters).where(
            Hyperparameters.hidden_neurons == AsIs(hidden_neurons)).where(
                Hyperparameters.hidden_activation == AsIs(hidden_activation)
            ).where(
                Hyperparameters.output_activation == AsIs(output_activation)))

        if train_dim is not None:
            query = query.where(Network.target_names == AsIs(train_dim))
        return query

    @classmethod
    def find_similar_networkpar_by_settings(cls, settings_path):
        with open(settings_path) as file_:
            json_dict = json.load(file_)

        query = cls.find_similar_networkpar_by_values(
            json_dict['train_dim'], json_dict['goodness'],
            json_dict['cost_l2_scale'], json_dict['cost_l1_scale'],
            json_dict['early_stop_measure'])
        return query

    @classmethod
    def find_similar_networkpar_by_id(cls, network_id, match_train_dim=True):
        query = (Network.select(
            Hyperparameters.goodness, Hyperparameters.cost_l2_scale,
            Hyperparameters.cost_l1_scale,
            Hyperparameters.early_stop_measure).where(
                Network.id == network_id).join(Hyperparameters))

        filter_id, train_dim = (Network.select(
            Network.filter_id, Network.target_names).where(
                Network.id == network_id)).tuples().get()
        if match_train_dim is not True:
            train_dim = None

        query = cls.find_similar_networkpar_by_values(*query.tuples().get(),
                                                      filter_id=filter_id,
                                                      train_dim=train_dim)
        query = query.where(Network.id != network_id)
        return query

    @classmethod
    def find_similar_networkpar_by_values(cls,
                                          goodness,
                                          cost_l2_scale,
                                          cost_l1_scale,
                                          early_stop_measure,
                                          filter_id=None,
                                          train_dim=None):
        # TODO: Add new hyperparameters here?
        query = (Network.select().join(Hyperparameters).where(
            Hyperparameters.goodness == goodness).where(
                Hyperparameters.cost_l2_scale.cast('numeric') == AsIs(
                    cost_l2_scale)).where(
                        Hyperparameters.cost_l1_scale.cast('numeric') == AsIs(
                            cost_l1_scale)).where(
                                Hyperparameters.early_stop_measure ==
                                early_stop_measure))
        if train_dim is not None:
            query = query.where(Network.target_names == AsIs(train_dim))

        if filter_id is not None:
            query = query.where(Network.filter_id == AsIs(filter_id))
        else:
            print('Warning! Not filtering on filter_id')
        return query

    #@classmethod
    #def find_similar_networkpar_by_settings(cls, settings_path):
    #    with open(settings_path) as file_:
    #        json_dict = json.load(file_)

    #    query = cls.find_similar_networkpar_by_values(json_dict['train_dim'],
    #                                                json_dict['goodness'],
    #                                                json_dict['cost_l2_scale'],
    #                                                json_dict['cost_l1_scale'],
    #                                                json_dict['early_stop_measure'])
    #    return query

    @classmethod
    def find_similar_trainingpar_by_id(cls, network_id):
        query = (Network.select(
            Network.target_names, Hyperparameters.minibatches,
            Hyperparameters.optimizer, Hyperparameters.standardization,
            Hyperparameters.early_stop_after).where(
                Network.id == network_id).join(Hyperparameters))

        filter_id = (Network.select(Network.filter_id).where(
            Network.id == network_id)).tuples().get()[0]
        query = cls.find_similar_trainingpar_by_values(*query.tuples().get())
        query = query.where(Network.id != network_id)
        return query

    @classmethod
    def find_similar_trainingpar_by_values(cls, train_dim, minibatches,
                                           optimizer, standardization,
                                           early_stop_after):
        query = (Network.select().where(
            Network.target_names == AsIs(train_dim)
        ).join(Hyperparameters).where(
            Hyperparameters.minibatches == minibatches).where(
                Hyperparameters.optimizer == optimizer).where(
                    Hyperparameters.standardization == standardization).where(
                        Hyperparameters.early_stop_after == early_stop_after))
        return query

    @classmethod
    def from_folders(cls, pwd, **kwargs):
        for path_ in os.listdir(pwd):
            path_ = os.path.join(pwd, path_)
            if os.path.isdir(path_):
                try:
                    Network.from_folder(path_, **kwargs)
                except IOError:
                    print('Could not parse', path_, 'is training done?')

    @classmethod
    def from_folder(cls, pwd):
        with db.atomic() as txn:
            script_file = os.path.join(pwd, 'train_NDNN.py')
            #with open(script_file, 'r') as script:
            #    script = script.read()
            train_script = TrainScript.from_file(script_file)

            json_path = os.path.join(pwd, 'nn.json')
            nn = QuaLiKizNDNN.from_json(json_path)
            with open(json_path) as file_:
                json_dict = json.load(file_)
                dict_ = {}
                for name in [
                        'feature_prescale_bias', 'feature_prescale_factor',
                        'target_prescale_bias', 'target_prescale_factor',
                        'feature_names', 'feature_min', 'feature_max',
                        'target_names', 'target_min', 'target_max'
                ]:
                    attr = getattr(nn, '_' + name)
                    if 'names' in name:
                        dict_[name] = list(attr)
                    else:
                        dict_[name] = {
                            str(key): str(val)
                            for key, val in attr.items()
                        }

            dict_['train_script'] = train_script

            with open(os.path.join(pwd, 'settings.json')) as file_:
                settings = json.load(file_)

            dict_['filter_id'] = Filter.find_by_path_name(
                settings['dataset_path'])
            network = Network(**dict_)
            network.save()
            hyperpar = Hyperparameters.from_settings(network, settings)
            hyperpar.save()
            if settings['optimizer'] == 'lbfgs':
                optimizer = LbfgsOptimizer(hyperparameters=hyperpar,
                                           maxfun=settings['lbfgs_maxfun'],
                                           maxiter=settings['lbfgs_maxiter'],
                                           maxls=settings['lbfgs_maxls'])
            elif settings['optimizer'] == 'adam':
                optimizer = AdamOptimizer(
                    hyperparameters=hyperpar,
                    learning_rate=settings['learning_rate'],
                    beta1=settings['adam_beta1'],
                    beta2=settings['adam_beta2'])
            elif settings['optimizer'] == 'adadelta':
                optimizer = AdadeltaOptimizer(
                    hyperparameters=hyperpar,
                    learning_rate=settings['learning_rate'],
                    rho=settings['adadelta_rho'])
            elif settings['optimizer'] == 'rmsprop':
                optimizer = RmspropOptimizer(
                    hyperparameters=hyperpar,
                    learning_rate=settings['learning_rate'],
                    decay=settings['rmsprop_decay'],
                    momentum=settings['rmsprop_momentum'])
            optimizer.save()

            activations = settings['hidden_activation'] + [
                settings['output_activation']
            ]
            for ii, layer in enumerate(nn.layers):
                nwlayer = NetworkLayer(
                    network=network,
                    weights=np.float32(layer._weights).tolist(),
                    biases=np.float32(layer._biases).tolist(),
                    activation=activations[ii])
                nwlayer.save()

            NetworkMetadata.from_dict(json_dict['_metadata'], network)
            TrainMetadata.from_folder(pwd, network)

            network_json = NetworkJSON(network=network,
                                       network_json=json_dict,
                                       settings_json=settings)
            network_json.save()
            return network

    def to_QuaLiKizNDNN(self):
        json_dict = self.network_json.get().network_json
        nn = QuaLiKizNDNN(json_dict)
        return nn

    to_QuaLiKizNN = to_QuaLiKizNDNN

    def to_matlab(self):
        js = self.network_json.get().network_json
        newjs = {}
        for key, val in js.items():
            newjs[key.replace('/', '_').replace(':', '_')] = val
        io.savemat('nn' + str(self.id) + '.mat', newjs)

    def summarize(self):
        net = self.select().get()
        print({
            'target_names': net.target_names,
            'rms_test': net.network_metadata.get().rms_test,
            'rms_train': net.network_metadata.get().rms_train,
            'rms_validation': net.network_metadata.get().rms_validation,
            'epoch': net.network_metadata.get().epoch,
            'train_time': net.train_metadata.get().walltime[-1],
            'hidden_neurons': net.hyperparameters.get().hidden_neurons,
            'standardization': net.hyperparameters.get().standardization,
            'cost_l2_scale': net.hyperparameters.get().cost_l2_scale,
            'early_stop_after': net.hyperparameters.get().early_stop_after
        })