def fit(self, train_df): self.ds = pd.Series(train_df.index) print("Fitting...") progress_bar = ProgressBar(len(train_df.columns)) for item in train_df.columns: target = train_df[item].interpolate().bfill() if self.use_boxcox: idx = target.index target, self.lmbda_boxcox[item] = boxcox(target) target = pd.Series(target, index=idx) target.index.name = "ds" target.name = "y" self.models[item] = pm.auto_arima( target, seasonal=False, exogenous=fourier( len(target), seasonality=self.seasonality, n_terms=self.n_fourier_terms), method="bfgs", suppress_warnings=True) progress_bar.update() progress_bar.finish() return self.models
def play_game(trie): letters = Cubes().shuffle() board = Board(letters) print board print 'You have 3 minutes - press Ctrl-C to finish early' bar = ProgressBar(60) try: event = Event() for i in range(60): bar.show(i) event.wait(3) bar.finish() os.system('say "time is up, stop boggling"') except KeyboardInterrupt: pass raw_input('\nPress <ENTER> to see answers') results = board_search(board, trie) answers = Answers() answers.add(results) print answers
def process_frame_segments(args, segments, width, height): """Post-process frame segments to set frame images, etc.""" fn = "process_frame_segments" globals.log.info("Processing frames...") frame_segments = [s for s in segments if isinstance(s, FrameSegment)] n = len(frame_segments) globals.log.debug("{fn}(): num frames = {n}".format(fn=fn, n=n)) progress = ProgressBar(max_value=n, quiet=args.quiet or args.debug or n == 0) progress.update(0) for i, f in enumerate(frame_segments): try: globals.log.debug( "{fn}(): frame (before) = {b}".format(fn=fn, b=f)) # Frame segments that use a frame from the previous segment. if (f.input_file == "^"): if (f.segment_number > 0): prev = segments[f.segment_number - 1] globals.log.debug( "{fn}(): prev = {p}".format(fn=fn, p=prev)) prev.generate_temp_file(args.output, width=width, height=height) f.use_frame( prev.generate_frame(f.frame_number, args.output, width=width, height=height)) else: globals.log.error( "frame segment {s} is attempting to use the last " "frame of a non-existent previous " "segment".format(s=f.segment_number)) sys.exit(1) # Frame segments whose frame comes from a PDF file. else: suffix = PurePath(f.input_file).suffix if (suffix.lower() == ".pdf"): f.use_frame(f.generate_temp_file(args.output, width=width, height=height)) else: globals.log.error( 'unexpected input file type "{s}" for frame segment ' "{f}".format(s=suffix, f=f.segment_number)) sys.exit(1) progress.update(i) globals.log.debug("{fn}(): frame (after) = ""{a}".format(fn=fn, a=f)) except SegmentError as e: progress.finish() globals.log.exception(e) sys.exit(1) else: progress.finish()
def predict(self, steps=365, freq="D"): print("Forecasting...") progress_bar = ProgressBar(len(self.models.items())) for item, model in self.models.items(): future = model.make_future_dataframe(steps, freq=freq) pred = model.predict(future).set_index("ds") pred = pred[["yhat", "yhat_lower", "yhat_upper"]] self.fcst[item] = pred if self.use_boxcox: self.fcst[item] = inv_boxcox(self.fcst[item], self.lmbda_boxcox[item]) progress_bar.update() progress_bar.finish() fcst_df = pd.concat(self.fcst, axis=1).sort_index(axis=1) return fcst_df
def fit(self, train_df): print("Fitting...") progress_bar = ProgressBar(len(train_df.columns)) for item in train_df.columns: target = train_df[item].dropna() if self.use_boxcox: idx = target.index target, self.lmbda_boxcox[item] = boxcox(target) target = pd.Series(target, index=idx) target.index.name = "ds" target.name = "y" target = target.reset_index() self.models[item] = Prophet(**self.prophet_config) self.models[item].fit(target) progress_bar.update() progress_bar.finish() return self.models
def predict(self, steps=365): print("Forecasting...") progress_bar = ProgressBar(len(self.models.items())) for item, model in self.models.items(): pred = model.predict( exogenous=fourier( steps, seasonality=self.seasonality, n_terms=self.n_fourier_terms), n_periods=steps, return_conf_int=True, alpha=(1.0 - self.interval_width)) fcst = pd.DataFrame() fcst["yhat_lower"] = pred[1][:,0] fcst["yhat"] = pred[0] fcst["yhat_upper"] = pred[1][:,1] self.fcst[item] = fcst if self.use_boxcox: self.fcst[item] = inv_boxcox( self.fcst[item], self.lmbda_boxcox[item]) progress_bar.update() progress_bar.finish() return pd.concat(self.fcst, axis=1)
file_size = os.path.getsize(file_path) expected_part_count = ceil(file_size / PART_SIZE) print('(' + str(ind) + '/' + str(total) + ') ' + filename + ': ' + str(round(file_size/1024/1024)) + 'MB') upload_id = glacier.initiate_multipart_upload(file_path) with open(file_path, mode = 'rb') as archive: progress_bar = ProgressBar(expected_part_count) part_ind = 0 content = archive.read(PART_SIZE) while True: progress_bar.advance() try: if glacier.upload_multipart_part(upload_id, part_ind, content): archive_id = glacier.complete_multipart_upload(upload_id, archive) print(filename + ',' + archive_id, file = log) progress_bar.finish() break; else: content = archive.read(PART_SIZE) part_ind += 1 except TimeoutError as error: print(repr(error) + ' skip file') break; except KeyboardInterrupt: print('\nUser stopped!') sys.exit(0) archive.close() ind += 1 log.close()
# find all the files with the allowed extensions filenames = [f for f in os.listdir(directory) if f.lower().endswith(tuple(exts))] total = len(filenames) ind = 1 for filename in filenames: file_path = directory + '/' + filename file_size = os.path.getsize(file_path) expected_part_count = ceil(file_size / PART_SIZE) print('(' + str(ind) + '/' + str(total) + ') ' + filename + ': ' + str(round(file_size/1024/1024)) + 'MB') upload_id = glacier.initiate_multipart_upload(file_path) with open(file_path, mode = 'rb') as archive: progress_bar = ProgressBar(expected_part_count) part_ind = 0 content = archive.read(PART_SIZE) while True: progress_bar.advance() if glacier.upload_multipart_part(upload_id, part_ind, content): archive_id = glacier.complete_multipart_upload(upload_id, archive) print(filename + ',' + archive_id, file = log) progress_bar.finish() break; else: content = archive.read(PART_SIZE) part_ind += 1 archive.close() ind += 1 log.close()