コード例 #1
0
    def _print_loss(self, iteration):
        """Prints iteration, loss mean, loss std, elapsed time since last print
        """

        dt = time.time() - self._time_start_print
        loss_string = "iteration {:d}".format(iteration)
        loss_string += ", loss: {:.2e} \u00b1 {:.2e}".format(
            *self._loss_std_mean(self.print_every))
        loss_string += ", time: " + utils.get_formatted_time(dt)
        self._time_start_print = time.time()

        print(loss_string)
    def _append_to_dataset(self, buffer, dataset_name):
        assert isinstance(buffer, list)
        assert isinstance(dataset_name, str)

        now = time.time()
        file = self.file

        ndarray = np.stack(buffer)

        shape = ndarray.shape
        dataset = file[dataset_name]

        current_length = self.accumulated_stored_samples[dataset_name]

        dataset_shape = dataset.shape
        if dataset_shape[0] < current_length + self.buffer_size:
            dataset.resize(dataset.shape[0] + self.preassigned_buffer_size,
                           axis=0)
        self.logger.debug(
            "Prepare to update the dataset {}, in index range [{}, {}]".format(
                dataset_name, current_length, current_length + shape[0]))

        dataset[current_length:current_length + shape[0]] = ndarray

        self.accumulated_stored_samples[dataset_name] += shape[0]

        self.last_modified_timestamp[dataset_name] = time.time()
        self.last_modified_time[dataset_name] = get_formatted_time(
            self.last_modified_timestamp[dataset_name])

        dataset.attrs["last_modified_timestamp"] = json.dumps(
            self.last_modified_timestamp)
        dataset.attrs["last_modified_time"] = json.dumps(
            self.last_modified_time)

        self.logger.debug(
            "Data has been appended to {} with shape {}. Current dataset {} shape {}."
            .format(dataset.name, ndarray.shape, dataset_name, dataset.shape))
        buffer.clear()

        self.logger.debug(
            "TIMING: recorder take {} seconds to store {} data.".format(
                time.time() - now, ndarray.shape))

        return dataset_shape
コード例 #3
0
    def test(self, max_batch_size=16):
        """Creates predictions for submission by processing batches of
           molecules of the same size

        Args:
            max_batch_size (int): maximum batch size
        """

        print("Creating predictions for submission ...")
        time_start_total = time.time()

        # pass batch size to test loader
        self.test_loader.batch_size = max_batch_size

        pred_list = []
        id_list = []

        self.net.train(mode=False)
        # test loop
        for _, (single, pairs, type_data, id_data, batch_size,
                size) in enumerate(self.test_loader):

            # get prediction
            with torch.no_grad():
                pred = self.net(single, pairs, batch_size, size)

            # multiply with std and add mean from train set
            for i in range(8):
                pred[type_data == i] *= self.std[i]
                pred[type_data == i] += self.mean[i]

            pred_list.append(pred.cpu().flatten())
            id_list.append(id_data.flatten())

        self._create_submission_file(pred_list, id_list)

        # print total time
        dt = time.time() - time_start_total
        print("\ntotal time: " + utils.get_formatted_time(dt))
コード例 #4
0
    def train(self,
              iterations=100,
              optimizer='Adam',
              learning_rate=1e-4,
              weight_decay=0,
              momentum=0,
              betas=(0.9, 0.999),
              save_name=None,
              save_every=None,
              print_every=10):
        """Trains the network

        Args:
            iterations (int, optional): Number of iterations. Defaults to 10.
            optimizer (str, optional): 'Adam', 'SGD', 'SGD_Nesterov', 'RMSprop'
                                        or 'Adagrad'. Defaults to 'Adam'.
            learning_rate (float, optional): Learning rate. Defaults to 1e-4.
            weight_decay (float, optional): Regularization parameter.
                                            Defaults to 0.
            momentum (float, optional): Momentum of 'SGD', 'SGD_Nesterov'
                                        or 'RMSprop'. Defaults to 0.
            betas (tuple of floats, optional): Betas for Adam.
                                               Defaults to (0.9, 0.999).
            save_every (int, optional): Saves every specified iteration.
                                        Defaults to None.
            save_name (str, optional): String added to time_stamp.
                                       Defaults to None.
            print_every (int, optional): Prints every specified iteration.
                                         Defaults to None.
        """

        # Store hyper parameters
        self.optimizer_name = optimizer
        self.learning_rate = learning_rate
        self.weight_decay = weight_decay
        self.momentum = momentum
        self.betas = betas
        self.print_every = print_every
        self.save_every = save_every

        # reset if not loaded
        if self._loaded is False:
            self.train_loader.iteration = 0
            self.loss_best = float('inf')
            self._loss_list = []
            self._loaded = True

        # create new time stamp and folder if necessary
        if save_every is not None and self.time_stamp_path is None:
            time_stamp = utils.get_time_stamp()
            if save_name is not None:
                time_stamp = time_stamp + '_' + save_name
            print("timestamp: " + time_stamp + '\n')
            self.time_stamp_path = os.path.join(self.networks_path, time_stamp)
            os.mkdir(self.time_stamp_path)

        # save hyper parameters
        if save_every is not None:
            self._save_hyper()

        # pass number of iterations to train loader
        self.train_loader.set_iterations(iterations)

        # set optimizer and loss
        self._set_optimizer()
        self.loss = modules.MSELoss()

        # set timers
        time_start_total = time.time()
        self._time_start_print = time.time()
        self._time_start_save = time.time()

        self.net.train(mode=True)
        # training loop
        for _, (iteration, single, pairs, scc, type_data,
                size) in enumerate(self.train_loader):

            self.optimizer.zero_grad()
            pred = self.net(single, pairs, 1, size)
            loss = self.loss(pred, scc)
            loss.backward()
            self.optimizer.step()

            self._loss_list.append(loss.item())

            # print message
            if print_every is not None and (iteration % print_every == 0):
                self._print_loss(iteration)

            # save to file
            if save_every is not None and (iteration % save_every == 0):
                self._save(iteration)

        # print total time
        dt = time.time() - time_start_total
        print("\ntotal time: " + utils.get_formatted_time(dt))
コード例 #5
0
ファイル: main.py プロジェクト: niraj-chaudhary/ldapapi
def password_reset_form():
    msgs = []
    if request.method == "GET":
        return render_template("reset.html")
    elif request.method == "POST":
        empID = request.form.get("empID", None)
        otp_key = empID + "_ldapkey"
        if empID:
            result = ldhelper.search_user_mobile(empID)
            result2 = ldhelper.search_user_email(empID)
            if result:

                user_mobile = result[0][1]['mobile'][0]
                user_email = result2[0][1]['mail'][0]
                if redis_store.hmget(otp_key, 'otp')[0]:
                    cotp = redis_store.hmget(otp_key, 'otp')[0]
                else:
                    totp = pyotp.TOTP('base32secret')
                    cotp = totp.now()
                user_hash = hashlib.sha1(empID + user_email + cotp).hexdigest()
                # Store empID and OTP in redis, expiration time
                redis_store.hmset(
                    otp_key, {
                        'otp': cotp,
                        'key': user_hash,
                        'key1': user_hash,
                        'mail': user_email
                    })
                redis_store.expire(
                    otp_key,
                    app.config.get("KEY_EXPIRATION_MINS", 10) * 60)
                timestamp = urllib.quote_plus(get_formatted_time())
                print timestamp
                endpoint = "http://www.example.com/tools/sms.php?mobile={}&transaction_id=hjfkh&variable={}:{}".format(
                    user_mobile, cotp, timestamp)
                content = urllib2.urlopen(endpoint).read()
                if content == "Sent.":
                    msgs.append({
                        "class":
                        "success",
                        "body":
                        "We've sent you the OTP on Registered Mobile Number"
                    })
                    flash(
                        "OTP sent Successfully to your registered mobile number"
                    )
                    return redirect(
                        url_for("password_otp_verify",
                                empID=empID,
                                key=user_hash))
                else:
                    msgs.append({
                        "class":
                        "danger",
                        "body":
                        "There is some problem with your Registered mobile number."
                    })
            else:
                msgs.append({
                    "class":
                    "danger",
                    "body":
                    "Mobile Number not found with the respective Employee ID"
                })
        else:
            msgs.append({"class": "danger", "body": "Employee ID empty"})
        return render_template("reset.html", msgs=msgs)
    def __init__(self, config=None, logger=None, monitoring=False):
        self.created_timestamp = time.time()
        self.created_time = get_formatted_time(self.created_timestamp)
        self.default_config.update(config)
        self.config = self.default_config
        self.logger = logging.getLogger() if not logger else logger
        self.monitoring = monitoring
        if self.monitoring:
            try:
                print("You are using Opencv-python library, version: ",
                      cv2.__version__)
            except:
                raise ValueError("OpenCV not installed!")

        if ("exp_name" in self.config) and (self.config["exp_name"]):
            self.exp_name = self.config["exp_name"]
        else:
            self.exp_name = self.created_time
            self.config["exp_name"] = self.exp_name
        self.save_dir = self.config["save_dir"]

        if not os.path.exists(self.save_dir):
            os.makedirs(self.save_dir)

        self.dataset_names = self.config["dataset_names"]
        self.initialized_dataset = {
            k: False
            for k in self.config["dataset_names"]
        }
        self.filename = self._get_file_name()

        self.buffer_size = self.config["buffer_size"]
        self.preassigned_buffer_size = self.buffer_size
        self.compress = self.config["compress"] if self.config[
            "compress"] else None
        self.file = None
        self.filemode = None

        self.use_video_writer = self.config["use_video_writer"]
        self.video_writer = None
        self.videofile = None

        if os.path.exists(self.filename):
            self.file = self._get_file('a')
        else:
            self.file = self._get_file('w')
            if self.use_video_writer:
                self.videofile = self.filename.replace("h5", "avi")
                self.logger.info(
                    "We will use OpenCV Video Writer to store video at {}.".
                    format(self.videofile))
                fourcc = cv2.VideoWriter_fourcc(*"XVID")
                self.video_writer = cv2.VideoWriter(self.videofile, fourcc, 10,
                                                    (1280, 960))
                self.dataset_names = list(self.dataset_names)
                self.dataset_names.remove('frame')
        file = self.file

        for ds_name in self.dataset_names:

            if self.initialized_dataset[ds_name]:
                break

            shape = self.config["dataset_shapes"][ds_name]
            shape = (self.preassigned_buffer_size, *shape)
            file.create_dataset(ds_name,
                                shape=shape,
                                dtype=self.config["dataset_dtypes"][ds_name],
                                compression=self.compress,
                                chunks=shape,
                                maxshape=(None, *shape[1:]))

            file.attrs['filename'] = self.filename
            file.attrs['created_timestamp'] = self.created_timestamp
            file.attrs['created_time'] = self.created_time
            file.attrs["video_file_name"] = self.videofile

        config = json.dumps(config)
        file.attrs['config'] = config
        ds_names = json.dumps(self.dataset_names)
        file.attrs["dataset_names"] = ds_names
        timestamp = time.time()
        timen = get_formatted_time(timestamp)

        self.last_modified_timestamp = {
            k: timestamp
            for k in self.dataset_names
        }
        self.last_modified_time = {k: timen for k in self.dataset_names}
        self.buffers = {k: [] for k in self.dataset_names}
        self.accumulated_stored_samples = {k: 0 for k in self.dataset_names}

        info_msg = "{}: HDF5 file {} is ready! With metadata {} and datasets {}".format(
            self.last_modified_time, self.filename, config, ds_names)
        self.logger.info(info_msg)