def post(self, bitbook_id, bitnote_id): note, bitbook, unused_bitfields = self.get_context( bitbook_id, bitnote_id) field_type = request.form['field_type'] field_title = request.form['field_title'] if field_type == 'Heading': h = request.form['Heading'] note.title = h note.save() else: field = [x for x in note.bitfields if x.title == field_title][0] if field: if field_type == 'CommentBox': body = request.form['body'] author = User.objects.get_or_404(id=current_user.id) #author = User().save() c = Comment(body=body, author=author) field.comments.append(c) elif field_type == 'Link': if request.form['link']: link = request.form['link'] field.link = link s = summary.Summary(link) s.extract() if s.url: field.og_url = s.url if s.title: field.og_title = s.title if s.image: field.og_image = s.image.url if s.description: field.og_description = s.description # l = opengraph.OpenGraph(url=link) # if l.is_valid(): # field.og_url = l.url # field.og_title = l.title # field.og_type = l.type # field.og_image = l.image # if 'description' in l.keys(): # field.og_description = l.description elif field_type == 'Location': geolocator = Nominatim() a = request.form['address'] l = geolocator.geocode(a) field.location = [l.latitude, l.longitude] field.addr = l.address else: constructor = globals()[field_type] mform = model_form(constructor, exclude=['created_at', 'title']) form = mform(request.form) form.populate_obj(field) note.save() return render_template('notes/note.html', note=note, bitbook=bitbook, unused_bitfields=unused_bitfields)
def pooled(df, outcomeKey, predictorKeys, poolByKey): description = """ Measurement Level Model: (y_i | alpha, beta, sigma_i) ~ normal(alpha + beta * values_i, sigma_i**2) Population Level Model; or Prior Level: No priors """ y = np.log(df[outcomeKey]).values.ravel() y[~np.isfinite(y)] = -1 x = df[predictorKeys].values.ravel() p = df[poolByKey].values.ravel() poolValues = df[poolByKey].unique() npar = len(poolValues) pidx = [] dy = np.empty(npar) for i, poolValue in enumerate(poolValues): idx = (p == poolValue) pidx.append(idx) dy[i] = np.std(y[idx]) dy[dy == 0.0] = y[dy == 0.0] def lnlike(params, *args): a = params[:-1] b = params[-1] xx, yy, dyy, ppidx = args lnl = 0 for i, idx in enumerate(ppidx): lnl += np.sum([ stats.norm.logpdf(yyy, a[i] + b * xxx, dy[i]) for yyy, xxx in zip(yy[idx], xx[idx]) ]) if not np.isfinite(lnl): return -np.inf return lnl guess = npar * [ 0.9, ] + [ -0.61, ] ndim, nwalkers, nburn, nstep = len(guess), 2 * len(guess), 10, 100 pos = [ np.array(guess) + 1e-4 * np.random.randn(ndim) for i in range(nwalkers) ] sampler = emcee.EnsembleSampler(nwalkers, ndim, lnlike, args=(x, y, dy, pidx)) pos, prob, state = sampler.run_mcmc(pos, nburn) sampler.reset() pos, prob, state = sampler.run_mcmc(pos, nstep, rstate0=state) summ = summary.Summary() summ.summarize(sampler.chain) import pdb pdb.set_trace()
def get_summary(url): res = '' s = summary.Summary(url) try: s.extract() res = s.description except: print('Error parsing url: ' + url) return res
def __init__(self): self.config_file = '../chemistry/meta.yml' self.magic_dict = self.load_config(self.config_file) self.output_dir = '' self.working_dir = os.getcwd() self.s = summary.Summary() self.current_file = '' self.replace_dict = {} [self.add_replace_dict(e) for e in self.magic_dict]
def get_screen(screen_name, params): screen = None if screen_name == "DesktopAsk": import desktop screen = desktop.DesktopAsk(params) elif screen_name == "Check": from modules.pages import check screen = check.Check(params) elif screen_name == "Timezone": import timezone screen = timezone.Timezone(params) params['settings'].set('timezone_start', True) elif screen_name == "Wireless": import wireless screen = wireless.Wireless(params) elif screen_name == "Welcome": import welcome screen = welcome.Welcome(params) elif screen_name == "UserInfo": import user_info screen = user_info.UserInfo(params) elif screen_name == "Location": import location screen = location.Location(params) elif screen_name == "Language": import language screen = language.Language(params) elif screen_name == "Keymap": import keymap screen = keymap.Keymap(params) elif screen_name == "Features": import features screen = features.Features(params) elif screen_name == "Summary": import summary screen = summary.Summary(params) elif screen_name == "Slides": import slides screen = slides.Slides(params) elif screen_name == "InstallationAsk": import ask screen = ask.InstallationAsk(params) elif screen_name == "InstallationAdvanced": import advanced screen = advanced.InstallationAdvanced(params) elif screen_name == "InstallationAlongside": import alongside screen = alongside.InstallationAlongside(params) elif screen_name == "InstallationAutomatic": import automatic screen = automatic.InstallationAutomatic(params) elif screen_name == "zfs": import zfs screen = zfs.InstallationZFS(params) return screen
def load_pages(self): if not os.path.exists('/home/reborn/.config/openbox'): self.pages["language"] = language.Language(self.params) self.pages["check"] = check.Check(self.params) self.pages["location"] = location.Location(self.params) self.pages["mirrors"] = mirrors.Mirrors(self.params) self.pages["timezone"] = timezone.Timezone(self.params) if self.settings.get('desktop_ask'): self.pages["keymap"] = keymap.Keymap(self.params) self.pages["desktop"] = desktop.DesktopAsk(self.params) self.pages["features"] = features.Features(self.params) else: self.pages["keymap"] = keymap.Keymap(self.params, next_page='features') self.pages["features"] = features.Features(self.params, prev_page='keymap') self.pages["installation_ask"] = installation_ask.InstallationAsk( self.params) self.pages[ "installation_automatic"] = installation_automatic.InstallationAutomatic( self.params) if self.settings.get("enable_alongside"): self.pages[ "installation_alongside"] = installation_alongside.InstallationAlongside( self.params) else: self.pages["installation_alongside"] = None self.pages[ "installation_advanced"] = installation_advanced.InstallationAdvanced( self.params) self.pages["installation_zfs"] = installation_zfs.InstallationZFS( self.params) self.pages["summary"] = summary.Summary(self.params) self.pages["user_info"] = user_info.UserInfo(self.params) self.pages["slides"] = slides.Slides(self.params) diff = 2 if os.path.exists('/home/antergos/.config/openbox'): # In minimal (openbox) we don't have a welcome screen diff = 3 num_pages = len(self.pages) - diff if num_pages > 0: self.progressbar_step = 1.0 / num_pages
def getFeedsSummary(themename, date, cursor): dates = ['all', 'yesterday', 'week', 'month'] result = {} if date not in dates: result['Error'] = 'invalid date' return json.dumps(result, indent=4) date = determine_date(date) themeid = str(logic.getAlertId(themename)) length = len(list(Connection.Instance().db[themeid].aggregate([{'$match': {'timestamp_ms': {'$gte': date} }},\ {'$unwind': "$entities.urls" },\ {'$group' : {'_id' :"$entities.urls.expanded_url" , 'total':{'$sum': 1}}}]))) feeds = Connection.Instance().db[themeid].aggregate([{'$match': {'timestamp_ms': {'$gte': date} }},\ {'$unwind': "$entities.urls" },\ {'$group' : {'_id' :"$entities.urls.expanded_url" , 'total':{'$sum': 1}}},\ {'$sort': {'total': -1}},\ {'$skip': cursor},\ {'$limit': 20}]) feeds = list(feeds) last_feeds = [] if len(feeds) == 0: print len(list(feeds)) last_feeds.append("Cursor is Empty.") else: cursor = int(cursor) + 20 if cursor >= length: cursor = length result['next_cursor'] = cursor for link in feeds: if link['_id'] != None: try: s = summary.Summary(link['_id']) s.extract() last_feeds.append({ 'url': link['_id'], 'im': str(s.image), 'title': str(s.title), 'description': str(s.description) }) except Exception as e: print e pass result['cursor_length'] = length result['feeds'] = last_feeds return json.dumps(result, indent=4)
def unpooled(df, outcomeKey, predictorKeys): description = """ Measurement Level Model: (y_i | alpha_ji, beta, sigma_i) ~ normal(alpha_ji + beta * values_i, sigma_i**2) where j is the county from which the measurement was taken. Population Level Model; or Prior Level: No priors """ y = np.log(df[outcomeKey]).values.ravel() y[~np.isfinite(y)] = -1 dy = np.std(y) x = df[predictorKeys].values.ravel() import pdb pdb.set_trace() def lnlike(params, *args): a = params[0] b = params[1] xx, yy, dyy = args lnlikes = [ stats.norm.logpdf(yyy, a + b * xxx, dyy) for yyy, xxx in zip(yy, xx) ] lnl = np.sum(lnlikes) if not np.isfinite(lnl): return -np.inf return lnl aGuess, bGuess = 1.3, -0.61 ndim, nwalkers, nburn, nstep = 2, 2 * 2, 100, 1000 pos = [ np.array((aGuess, bGuess)) + 1e-4 * np.random.randn(ndim) for i in range(nwalkers) ] sampler = emcee.EnsembleSampler(nwalkers, ndim, lnlike, args=(x, y, dy)) pos, prob, state = sampler.run_mcmc(pos, nburn) sampler.reset() pos, prob, state = sampler.run_mcmc(pos, nstep, rstate0=state) summ = summary.Summary() summ.summarize(sampler.chain)
def __init__(self, args): self.args = args drive = driveutils.drive(updateCredentials=self.args.updateCredentials, ignoreCredentials=self.args.ignoreCredentials) self.sourceLocations = [ location.create(url) for url in self.args.sourceUrls ] self.destLocation = location.create(self.args.destUrl) self.sourceFolderFactory = self.folderFactoryFromLocations( self.sourceLocations, drive) self.destFolderFactory = self.folderFactoryFromLocation( self.destLocation, drive) self.summary = summary.Summary() self.transferManager = self.createTransferManager(drive)
def main(): logging.basicConfig(filename='CoStar_log.log', level=logging.DEBUG) console = logging.StreamHandler() console.setLevel(logging.DEBUG) console.setFormatter(logging.Formatter("%(message)s")) logging.getLogger().addHandler(console) folder_name = 'Offices' # folder_name = 'Multifamily' file_id = "020-1.html" file_path = r'F:/code_test/CoStar_Project/2018_5_25_new_source_files/' \ r'{0}/{1}'.format(folder_name, file_id) logging.info(file_path) file = file_operation.file_read(file_path) doc = pq(file) file_id = re.match(r".*(?=.htm)", file_id).group() s = summary.Summary(doc, file_id) a = s.assessment() logging.info('=====>') # logging.info("file file_id is:", file_id) logging.info("file_id is: {}".format(file_id)) # logging.info("main.py type(a) ->", type(a)) logging.info("main.py type(a) -> {}".format(type(a))) # logging.info(dict(a)) logging.info("{}".format(dict(a))) logging.info('=====>') # logging.info("file_id is:", file_id) logging.info("file_id is: {}".format(file_id)) logging.info('main.py result --->') # logging.info(s.result) # logging.info("{}".format(s.result).encode('utf-8')) logging.info(s.result.__str__().encode('utf-8')) # Save results in Redis Database. redisdb.save_in_redis('Summary', s.result, 0)
def main(): filename = [] for i in range(0, 4): tmp = "../sample/camera_photo/0317_test/" + str(i + 1) + ".jpg" filename.append(tmp) ## 5 classes model classes = [ 'musical_symbol_bass_clef', 'musical_symbol_half_note', 'musical_symbol_quarter_note', 'musical_symbol_quarter_rest', 'musical_symbol_g_clef' ] model_number = "0401_9" model_dir = "../Model/" + model_number + "/" model_name = "model_5_classes_" + model_number + ".meta" true_label_file = [] for i in range(0, 4): tmp = "../sample/true_label/0317_test/" + str( i + 1) + "_true_label_5_classes.csv" true_label_file.append(tmp) t_acc_matrix = init_Conf_mat.InitConfMat(classes) #for i in range(0, 1): for i in range(0, 4): y_position, y_position_bottom = five.five_lines(filename[i]) symbol_Info = x_scan.x_cut(y_position, y_position_bottom, filename[i]) allInfo_symbol = predict.predict_symbol(symbol_Info, filename[i], classes, model_dir, model_name) allInfo_symbol, t_acc_matrix = accuracy.acc(allInfo_symbol, true_label_file[i], classes, filename[i], t_acc_matrix, model_name) OutputResult.Output(allInfo_symbol, classes, filename[i], model_name) #pitch.Identify(allInfo_symbol, filename[i], y_position, y_position_bottom); summary.Summary(t_acc_matrix, model_name)
def get_field_value(self, owner, field, value): if field == 'info_link': if PermitData.objects.filter(owner=owner, info_link__link=value).count() == 0: link = InfoLink.objects.filter(link=value).first() if link is None: link = InfoLink( link=value ) try: print 'Fetching %s'%value page = summary.Summary(value) page.extract() link.description = page.description link.title = page.title link.image = page.image except: pass link.save() return link elif PermitData.objects.filter(**{ field: value, 'owner': owner }).count() == 0: return value return None
def flip_to_magazine(): if not all(_ in request.json for _ in ('magazine_id', 'link', 'comment')): return jsonify({ 'success': False, 'message': 'please provide all informations' }) user = get_user(request.headers) if user is None: return jsonify({'success': False, 'message': 'Please log in'}) print(f'magazine_id: {request.json["magazine_id"]}', file=sys.stderr) existing_magazine = Database().get_magazine(request.json['magazine_id']) if existing_magazine is None: return jsonify({'success': False, 'message': 'Magazine_id is wrong'}) image_link = '' title = '' description = '' try: s = summary.Summary(request.json['link']) s.extract() image_link = str(s.image) title = str(s.title) description = str(s.description) print(f"Title: {s.title}", file=sys.stderr) print(f"Description: {s.description}", file=sys.stderr) print(f"Image: {s.image}", file=sys.stderr) except: pass Database().new_flip(request.json['magazine_id'], request.json['link'], request.json['comment'], user['unique_login'], image_link, title, description) return jsonify({ 'success': True, 'message': 'Successfully flipped to magazine' })
else: schemapath = os.path.join(os.environ['XMLSUMMARYBASEROOT'], 'xml/XMLSummary.xsd') summarypath = os.path.join(os.environ['XMLSUMMARYBASEROOT'], 'python/XMLSummaryBase') sys.path.append(summarypath) import summary outputxml = os.path.join(os.getcwd(), 'summary.xml') if not os.path.exists(outputxml): sys.stderr.write( "XMLSummary not passed as 'summary.xml' not present in working dir" ) else: try: XMLSummarydata = summary.Summary(schemapath, construct_default=False) XMLSummarydata.parse(outputxml) except: sys.stderr.write( "Failure when parsing XMLSummary file 'summary.xml'") try: fn = open('__parsedxmlsummary__', 'w') for name, method in activeSummaryItems().iteritems(): try: fn.write('%s = %s\n' % (name, str(method(XMLSummarydata)))) except Exception, e: fn.write('%s = None\n' % name) sys.stderr.write( 'XMLSummary warning: Method "%s" not available for this job\n' % name)
def deamon(soc=-1): """ Main loop, gets battery data, gets summary.py to do logging""" try: import summary logsummary = summary.Summary() summary = logsummary.summary printtime = time.strftime("%Y%m%d%H%M%S ", time.localtime()) while int(printtime) <= int(summary['current']['timestamp']): print(printtime, summary['current']['timestamp']) print("Error: Current time before last sample time") time.sleep(30) printtime = time.strftime("%Y%m%d%H%M%S", time.localtime()) batdata = Readings() # initialise batdata after we have valid sys time alarms = Alarms(batdata) # initialise alarms print(str(printtime)) filecopy(config['files']['summaryfile'], config['files']['summaryfile'] + "R" + str(int(printtime))) if soc > config['battery']['capacity']: print("Battery DOD must be less than Battery Capacity") else: if soc < 0: batdata.soc = summary['current']['ah'][0] batdata.socadj = summary['current']['dod'][0] else: batdata.soc = soc batdata.socadj = soc summary['current']['dod'][3] = 0 summary['current']['dod'][ 3] = -100 # flag don't adjust leakage current prevtime = logsummary.currenttime prevbatvoltage = batdata.batvoltsav[numcells] # logsummary.startday(summary) # logsummary.starthour(summary) while True: try: for i in range(config['sampling']['samplesav']): # printvoltage = '' # for i in range(numcells+1): # printvoltage = printvoltage + str(round(batdata.batvolts[i],3)).ljust(5,'0') + ' ' # print (printvoltage) batdata.getraw() # if batdata.batvoltsav[numcells] >= 55.2 and prevbatvoltage < 55.2: # reset SOC counter? # print batdata.socadj/(float(summary['current']['dod'][3])*24.0) if batdata.batvoltsav[numcells] < config['battery']['vreset'] \ and prevbatvoltage >= config['battery']['vreset'] \ and summary['current']['dod'][3] != 0 \ and -batdata.currentav[0] < config['battery']['ireset']: # reset SOC counter? if summary['current']['dod'][3] <= 0: socerr = 0 else: socerr = batdata.socadj / ( float(summary['current']['dod'][3]) * 24.0) socerr = max(socerr, -0.01) socerr = min(socerr, 0.01) config['battery']['ahloss'] = config['battery'][ 'ahloss'] - socerr / 2 batconfigdata = SafeConfigParser() batconfigdata.read('battery.cfg') batconfigdata.set('battery', 'ahloss', str(config['battery']['ahloss'])) with open('battery.cfg', 'w') as batconfig: batconfigdata.write(batconfig) batconfig.closed batdata.soc = 0.0 batdata.socadj = 0.0 summary['current']['dod'][3] = 0 else: batdata.soc = batdata.soc + batdata.batah batdata.socadj = batdata.socadj + batdata.batahadj batdata.ah = batdata.ah + batdata.batah batdata.inahtot = batdata.inahtot + batdata.inah batdata.pwrbattot = batdata.pwrbattot + batdata.pwrbat batdata.pwrintot = batdata.pwrintot + batdata.pwrin prevbatvoltage = batdata.batvoltsav[numcells] # check alarms alarms.scanalarms(batdata) # update summaries logsummary.update(summary, batdata) if logsummary.currenttime[4] != logsummary.prevtime[ 4]: # new minute loadconfig() logsummary.updatesection(summary, 'hour', 'current') logsummary.updatesection(summary, 'alltime', 'current') logsummary.updatesection(summary, 'currentday', 'current') logsummary.updatesection(summary, 'monthtodate', 'current') logsummary.updatesection(summary, 'yeartodate', 'current') logsummary.writesummary() batdata.ah = 0.0 batdata.ahadj = 0.0 batdata.inahtot = 0.0 batdata.pwrbattot = 0.0 batdata.pwrintot = 0.0 for i in range(batdata.numiins): batdata.kWhin[i] = 0.0 batdata.kWhout[i] = 0.0 for i in range(numcells): batdata.baltime[i] = 0 if logsummary.currenttime[3] != logsummary.prevtime[ 3]: # new hour logsummary.starthour(summary) if logsummary.currenttime[3] < logsummary.prevtime[ 3]: # newday logsummary.startday(summary) if logsummary.currenttime[1] != logsummary.prevtime[ 1]: # new month logsummary.startmonth(summary) if logsummary.currenttime[0] != logsummary.prevtime[ 0]: # new year logsummary.startyear(summary) except KeyboardInterrupt: sys.stdout.write('\n') logsummary.close() sys.exit(9) break except Exception as err: log.critical(err) raise
_("Error"), _("There has been an error while taking a screenshot. Please try again later" )) loading_window.destroy() def show_settings_window(self): loading_window = loading.Loading() while Gtk.events_pending(): Gtk.main_iteration() settings_window.show_window() loading_window.destroy() if __name__ == '__main__': notifications_window = notifications.Notifications() summary_window = summary.Summary() send_window = send.Send() controls_window = controls.Controls() settings_window = settings.Settings() tray = TrayIcon() # control notifications every 5 seconds interval_ms = 5 GLib.timeout_add_seconds(interval_ms, tray.get_notifications_interval) Gtk.main()
batdata.baltime[i] = 0 if currenttime[8:10] != prevtime[8:10]: # new hour logsummary.starthour(summary) if currenttime[6:8] != prevtime[6:8]: # newday logsummary.startday(summary) if currenttime[4:6] != prevtime[4:6]: # new month logsummary.startmonth(summary) if currenttime[0:4] != prevtime[0:4]: # new year logsummary.startyear(summary) import summary logsummary = summary.Summary() batdata = Readings() # initialise batdata alarms = Alarms(batdata, summary) # initialise alarms def deamon(soc=-1): """Battery Management deamon to run in background""" numtries = 0 while True: try: initmain(soc) while True: mainloop() numtries = 0 except KeyboardInterrupt:
def multilevel(values, sigma): description = """ Measurement Level Model: (y_i | alpha_ji, beta, sigma) ~ normal(alpha_ji + beta * values_i, sigma**2) Population Level Model; or Prior Level: (alpha_ji | mu_a, alpha_a) = normal(mu_a, alpha_a**2) Hyperprior Level: Define prior(mu_a, alpha_a, sigma, beta) """ y = np.log(df[outcomeKey]).values.ravel() y[~np.isfinite(y)] = -1 x = df[predictorKeys].values.ravel() p = df[poolByKey].values.ravel() poolValues = df[poolByKey].unique() npar = len(poolValues) pidx = [] dy = np.empty(npar) for i, poolValue in enumerate(poolValues): idx = (p == poolValue) pidx.append(idx) dy[i] = np.std(y[idx]) dy[dy == 0.0] = y[dy == 0.0] priorMu_a = priors.Uniform(0, 100) priorSigma_a = priors.Uniform(0, 100) def lnprior(params): mu = params[-2] sigma = params[-1] return priorMu_a.lnlike(mu) + priorSigma_a.lnlike(sigma) def lnlike(params, *args): xx, yy, dyy, ppidx = args alpha = params[:-3] b = params[-3] mu = params[-2] sigma = params[-1] # Hyperprior lnpriorH = lnprior(params) if not np.isfinite(lnpriorH): return -np.inf # Likelihood of a_i under the population level model lnpriorP = np.sum([stats.norm.logpdf(a, mu, sigma) for a in alpha]) if not np.isfinite(lnpriorP): return -np.inf # Likelihood of y_i under the measurement level model lnlikeM = 0 for i, idx in enumerate(ppidx): lnlikeM += np.sum([ stats.norm.logpdf(yyy, alpha[i] + b * xxx, dy[i]) for yyy, xxx in zip(yy[idx], xx[idx]) ]) if not np.isfinite(lnlikeM): return -np.inf return lnpriorH + lnpriorP + lnlikeM guess = npar * [ 0.9, ] + [ -0.61, ] ndim, nwalkers, nburn, nstep = len(guess), 2 * len(guess), 10, 100 pos = [ np.array(guess) + 1e-4 * np.random.randn(ndim) for i in range(nwalkers) ] sampler = emcee.EnsembleSampler(nwalkers, ndim, lnlike, args=(x, y, dy, pidx)) pos, prob, state = sampler.run_mcmc(pos, nburn) sampler.reset() pos, prob, state = sampler.run_mcmc(pos, nstep, rstate0=state) summ = summary.Summary() summ.summarize(sampler.chain) import pdb pdb.set_trace()
def randomEffects(treatN, treatD): description = """In this case, we assume that the probability p is different in each trial, but that they are drawn from a common distribution. This distribution is a beta(a, b), where a and b are hyperparameters of the model. We will apply common priors on a and b of an exponential distribution with scale = 0. Measurement Level Model: (d_i | n_i p_i) ~ binomial(n_i, p_i) Population Level Model; or Prior Level: (p_i | a, b) ~ beta(a, b) Hyperprior Level: Define prior(a, b) """ priorA = priors.ExponentialPrior(100.) priorB = priors.ExponentialPrior(100.) def lnprior(params): a = params[-2] b = params[-1] return priorA.lnlike(a) + priorB.lnlike(b) def lnlike(params, *args): # Hyperprior lp = lnprior(params) if not np.isfinite(lp): return -np.inf # Likelihood of p_i under the population level model a = params[-2] b = params[-1] priorP = stats.beta(a, b) lnpriorP = np.sum(priorP.logpdf(params[:-2])) if not np.isfinite(lnpriorP): return -np.inf # Likelihood of d_i under the measurement level model nn, dd = args lnlikeP = np.sum([ stats.binom.logpmf(d, n, p) for d, n, p in zip(dd, nn, params[:-2]) ]) return lp + lnpriorP + lnlikeP pGuess = np.mean(1.0 * treatD / treatN) aGuess = 3.0 bGuess = 191.0 ndim, nwalkers, nburn, nstep = len(treatN) + 2, 100, 1000, 10000 guess = [ pGuess, ] * len(treatN) guess.append(aGuess) guess.append(bGuess) pos = [ np.array(guess) + 1e-3 * np.random.randn(ndim) for i in range(nwalkers) ] sampler = emcee.EnsembleSampler(nwalkers, ndim, lnlike, args=(treatN, treatD)) pos, prob, state = sampler.run_mcmc(pos, nburn) sampler.reset() pos, prob, state = sampler.run_mcmc(pos, nstep, rstate0=state) import pdb pdb.set_trace() summ = summary.Summary() summ.summarize(sampler.chain)
def load_file(self, path, nameonly=False, decodewrap=False): """引数のファイル(wid, wsmファイル)を読み込む。""" try: f = cwfile.CWFile(path, "rb", decodewrap=decodewrap) no = nameonly md = self.materialdir ie = self.image_export if path.lower().endswith(".wsm"): data = summary.Summary(None, f, nameonly=no, materialdir=md, image_export=ie) data.skintype = self.skintype else: filetype = f.byte() f.seek(0) f.filedata = [] if filetype == 0: data = area.Area(None, f, nameonly=no, materialdir=md, image_export=ie) elif filetype == 1: data = battle.Battle(None, f, nameonly=no, materialdir=md, image_export=ie) elif filetype == 2: if os.path.basename(path).lower().startswith("battle"): data = battle.Battle(None, f, nameonly=no, materialdir=md, image_export=ie) else: data = cast.CastCard(None, f, nameonly=no, materialdir=md, image_export=ie) elif filetype == 3: data = item.ItemCard(None, f, nameonly=no, materialdir=md, image_export=ie) elif filetype == 4: lpath = os.path.basename(path).lower() if lpath.startswith("package"): data = package.Package(None, f, nameonly=no, materialdir=md, image_export=ie) elif lpath.startswith("mate"): data = cast.CastCard(None, f, nameonly=no, materialdir=md, image_export=ie) else: data = info.InfoCard(None, f, nameonly=no, materialdir=md, image_export=ie) elif filetype == 5: if os.path.basename(path).lower().startswith("item"): data = item.ItemCard(None, f, nameonly=no, materialdir=md, image_export=ie) else: data = skill.SkillCard(None, f, nameonly=no, materialdir=md, image_export=ie) elif filetype == 6: if os.path.basename(path).lower().startswith("info"): data = info.InfoCard(None, f, nameonly=no, materialdir=md, image_export=ie) else: data = beast.BeastCard(None, f, nameonly=no, materialdir=md, image_export=ie) elif filetype == 7: data = skill.SkillCard(None, f, nameonly=no, materialdir=md, image_export=ie) elif filetype == 8: data = beast.BeastCard(None, f, nameonly=no, materialdir=md, image_export=ie) else: f.close() raise ValueError(path) if not nameonly: # 読み残し分を全て読み込む f.read() f.close() return data, "".join(f.filedata) except: cw.util.print_ex() return None, None