예제 #1
0
    def __init__(self,
                 word_vec_list,
                 args,
                 input_dimension=1500,
                 hidden_dimensions=None):
        self.session = load_session()
        self.args = args
        self.weights, self.biases = {}, {}
        self.input_dimension = input_dimension
        if hidden_dimensions is None:
            hidden_dimensions = [1024, 512, self.args.dim]
        self.hidden_dimensions = hidden_dimensions
        self.layer_num = len(self.hidden_dimensions)
        self.encoder_output = None
        self.decoder_output = None
        self.decoder_op = None

        self.word_vec_list = np.reshape(word_vec_list,
                                        [len(word_vec_list), input_dimension])
        if self.args.encoder_normalize:
            self.word_vec_list = preprocessing.normalize(self.word_vec_list)

        self._init_graph()
        self._loss_optimizer()
        tf.global_variables_initializer().run(session=self.session)
예제 #2
0
파일: bot.py 프로젝트: jkim0304/BCDbot
async def load_session(ctx, *, arg):
    global sess
    if sess:
        utils.save_session(sess, f'Sessions/{sess.name}.json')
    session_name = arg
    sess = utils.load_session(f'Sessions/{session_name}.json')
    await ctx.send(f'Loaded session: {session_name}.')
예제 #3
0
    def play(self, f=None, dev=0):
        cap = cv.VideoCapture(dev)
        pausa = False
        img_index = 0
        roi_index = 0

        #creating the time name value of the session
        time_session = strftime('%d_%m', gmtime())
        path = 'images/' + time_session + '/'

        if not os.path.exists(path + 'save.pkl'):
            img_index = 0
            roi_index = 0
        else:
            img_list,img_index = utils.load_session(path)

        while True:
            key = cv.waitKey(1) & 0xFF
            ret, self.frame = cap.read()
            #set the function mark_corner to left click
            cv.setMouseCallback('frame', self.mark_corner)

            if key == 27:
                cap.release()
                break

            if key == 32:
                pausa = not pausa

            #creates a capture
            if key == ord('c'):
                if not os.path.exists(path):
                    os.makedirs(path)
                cv.imwrite(path + '/img' + str(img_index) + '.png', self.frame)
                print('#Capture: img' + str(img_index))
                img_index+=1

            if key == ord('t') and self.roi_capt:
                cv.imwrite(path + '/roi' + '.png', self.roi)

            #saves the current session
            if key == ord('s'):
                utils.save_session(path)

            if pausa:
                continue


            if self.roi_capt:
                self.roi = self.roi_capture(self.frame,
                                      (self.ix, self.iy), (self.jx, self.jy), f, (255, 255, 255))
            cv.imshow('frame',self.frame)

        cv.destroyAllWindows()
예제 #4
0
 def __init__(self, args):
     self.args = args
     self.session = load_session()
     self.kgs = read_kgs_from_folder(args.training_data,
                                     args.dataset_division,
                                     args.alignment_module, False)
     self.entities = self.kgs.kg1.entities_set | self.kgs.kg2.entities_set
     self.word2vec_path = args.word2vec_path
     if os.path.exists(
             os.path.join(args.training_data,
                          'entity_local_name_1')) and os.path.exists(
                              os.path.join(args.training_data,
                                           'entity_local_name_2')):
         self.entity_local_name_dict = read_local_name(
             args.training_data, set(self.kgs.kg1.entities_id_dict.keys()),
             set(self.kgs.kg2.entities_id_dict.keys()))
     else:
         self.entity_local_name_dict = self._get_local_name_by_name_triple()
     self._generate_literal_vectors()
     self._generate_name_vectors_mat()
     self._generate_attribute_value_vectors()
    def __init__(self, data, args, attr_align_model):
        super().__init__(data, args, attr_align_model)

        self.flag1 = -1
        self.flag2 = -1
        self.early_stop = False

        self._define_variables()

        self._define_name_view_graph()
        self._define_relation_view_graph()
        self._define_attribute_view_graph()

        self._define_cross_kg_entity_reference_relation_view_graph()
        self._define_cross_kg_entity_reference_attribute_view_graph()
        self._define_cross_kg_relation_reference_graph()
        self._define_cross_kg_attribute_reference_graph()

        self._define_common_space_learning_graph()
        self._define_space_mapping_graph()

        self.session = load_session()
        tf.global_variables_initializer().run(session=self.session)
    def __init__(self, data, args, predicate_align_model):
        super().__init__(data, args, predicate_align_model)
        self.out_folder = generate_out_folder(self.args.output,
                                              self.args.training_data, '',
                                              self.__class__.__name__)

        self.flag1 = -1
        self.flag2 = -1
        self.early_stop = False

        self._define_variables()

        self._define_name_view_graph()
        self._define_relation_view_graph()
        self._define_attribute_view_graph()

        self._define_cross_kg_entity_reference_relation_view_graph()
        self._define_cross_kg_entity_reference_attribute_view_graph()
        self._define_cross_kg_attribute_reference_graph()
        self._define_cross_kg_relation_reference_graph()
        self._define_common_space_learning_graph()

        self.session = load_session()
        tf.global_variables_initializer().run(session=self.session)
예제 #7
0
        experiment.set_name(args.namestr)
        args.experiment = experiment

    # Because we all like reproducibility (...and also know where we keep our towels)
    # ------------------------------------------------------------------------------
    np.random.seed(42)
    torch.manual_seed(42)
    torch.cuda.manual_seed_all(42)

    # Obtain and train our model here:
    # ------------------------------------------------------------------------------
    model, optim = get_model()
    if use_cuda:
        model.cuda()

    training_loader, validation_loader = _dataloader(args)

    # load trained model if necessary
    if args.load_dir is not None:
        model, optim, start_epoch = load_session(model, optim, args)
    else:
        start_epoch = 0

    fit(model, training_loader, validation_loader, optim, start_epoch, args)

    args.experiment.end()

    # ------------------------------------------------------------------------------
    # So Long, and Thanks for All the Fish!   >< ((('>    >< ((('>    >< ((('>
    # ------------------------------------------------------------------------------
예제 #8
0
# load ONE and mice
import numpy as np
import utils
from oneibl.one import ONE
one = ONE()
mice_names, ins, ins_id, sess_id, _ = utils.get_bwm_ins_alyx(one)
stimuli_arr, actions_arr, stim_sides_arr, session_uuids = [], [], [], []

# select particular mice
mouse_name = 'KS016'
for i in range(len(sess_id)):
    if mice_names[i] == mouse_name:  # take only sessions of first mice
        data = utils.load_session(sess_id[i])
        if data['choice'] is not None and data['probabilityLeft'][0] == 0.5:
            stim_side, stimuli, actions, pLeft_oracle = utils.format_data(data)
            stimuli_arr.append(stimuli)
            actions_arr.append(actions)
            stim_sides_arr.append(stim_side)
            session_uuids.append(sess_id[i])

# format data
stimuli, actions, stim_side = utils.format_input(stimuli_arr, actions_arr,
                                                 stim_sides_arr)
session_uuids = np.array(session_uuids)

# import models
from models.expSmoothing_stimside import expSmoothing_stimside as exp_stimside
from models.expSmoothing_prevAction import expSmoothing_prevAction as exp_prevAction
from models.optimalBayesian import optimal_Bayesian as optBay
from models.biasedApproxBayesian import biased_ApproxBayesian as baisedApproxBay
from models.biasedBayesian import biased_Bayesian