def print_documentation(object_name): print "--------- %s ---------" % utils.bold(object_name) print "" classes = enumerate_all_test_classes() for test_class_name, test_class in classes: arr = (object_name).split(".") if test_class_name == object_name: # get the class info print "%s: %s" % (utils.bold("Prereqs"), test_class.required_config) print "%s: %s" % (utils.bold("Collects"), test_class.collects) print "" print utils.format(test_class.__doc__) print "" print "%s:" % (utils.bold("Tests")) inst = test_class(None, {}) for method in inst.list_tests(): print method print "" sys.exit(0) elif len(arr) == 3 and ".".join(arr[:2]) == test_class_name: # get the method info print utils.format(getattr(test_class, arr[2]).__doc__) print "" sys.exit(0) print "The test name specified (%s) was incorrect. Please specify the full test name." % object_name sys.exit(0)
def get_data(bstream, endianness): val_type = bstream.read(8).int if val_type in int_types: data = int_types[val_type](bstream, endianness, val_type) elif -20 < val_type < -10: data = get_hour(bstream.read(format(val_type, endianness))) elif val_type < 0: data = bstream.read(format(val_type, endianness)) elif 20 > val_type > 10: attributes = bstream.read(8).int length = bstream.read(format(INT, endianness)) data = [get_hour(x) for x in bstream.readlist(format_list(val_type, endianness, length))] elif 10 > val_type > 0: attributes = bstream.read(8).int length = bstream.read(format(INT, endianness)) nptype, bstype = format_raw_list(val_type, length) data = np.fromstring(bstream.read(bstype).bytes, dtype=nptype) #data = bstream.readlist(format_list(val_type, endianness, length)) elif val_type > 90: data = [] else: attributes = bstream.read(8).int length = bstream.read(format(INT, endianness)) data = [get_data(bstream, endianness) for _ in range(length)] return data
def nextplaylist(self, nb): current = self.currentsong() playlist = self.playlistinfo() deb = int(current["pos"]) end = int(self.status()["playlistlength"]) res = "" for i in range(nb): song = playlist[(deb + i) % end] res += utils.format(song) + "\n" return res[:-1]
def nextplaylist(self, nb=5): nb = int(nb) if nb > 15: return "Non mais oh, faudrait pas trop exagérer..." current = self.currentsong() playlist = self.playlistinfo() deb = int(current["pos"]) end = int(self.status()["playlistlength"]) res = "" for i in range(nb): song = playlist[(deb + i) % end] res += utils.format(song) + "\n" return res[:-1]
def search(self, search, title, artist): req = [] if search: req = self.playlistsearch("Artist", search) req.extend(self.playlistsearch("Title", search)) elif title: req = self.playlistsearch("Title", title) elif artist: req = self.playlistsearch("Artist", artist) if req == []: return "Cherches un peu mieux que ça" res = "" for elt in req: res += "%s\n" % (utils.format(elt)) return res[0:-1]
def search(self, args): l = args.split(" ") if len(l) == 2: filter = l[0] field = l[1] req = self.playlistsearch(filter, field) else: req = self.playlistsearch("Artist", l[0]) req.extend(self.playlistsearch("Title", l[0])) if req == []: return "Cherches un peu mieux que ça" res = "" for elt in req: res += "%s\n" % (utils.format(elt)) return res[0:-1]
if not ent.pos.is_inside(Pos((0, 0)), Pos((1024, 720))): ent.die() for ent in entities: ent.raycast(screen) for ent in entities: ent.update(dt) #update screen screen.fill((200, 200, 200)) for ent in entities: ent.draw(screen) pygame.display.flip() pygame.display.update() clock.tick(fps) pygame.display.set_caption('nn-gen-survive ' + utils.format(clock.get_fps())) pygame.quit() print("Game Stoppped") if leveleditor: drawables.append(car) #also save the car drawables.append(backgroundmap) utils.save(map, drawables) print("Data has been saved") quit()
def toString(self): return utils.format(self.x, 2) + ":" + utils.format(self.y, 2)
def get_symbol_list(bstream, endianness, val_type): attributes = bstream.read(8).int length = bstream.read(format(INT, endianness)) data = [str_convert(bstream, endianness) for i in range(length)] return data
def get_date(bstream, endianness, val_type): return datetime.datetime.fromordinal(bstream.read(format(val_type, endianness))+Y2KDAYS)
def build(): # STEP: clone FruityMod if not os.path.exists(mod_dir): print("Downloading {}".format("FruityMod")) fruity_url = r"https://github.com/gskleres/FruityMod-StS/archive/v0.6.2b.zip" utils.mkdir("cache") download_file = tempfile.NamedTemporaryFile(suffix=".zip", dir="cache", delete=False).name with urllib.request.urlopen(fruity_url) as response, open( download_file, "wb") as out_file: shutil.copyfileobj(response, out_file) utils.unzip(download_file, mod_dir, shift=1, remove=True) # STEP: fetch libs mod_jar = os.path.join(spire_dir, "ModTheSpire.jar") if not os.path.exists(mod_jar): print("Downloading ModTheSpire") download_file = tempfile.NamedTemporaryFile(suffix=".zip", dir="..", delete=False).name urllib.request.urlretrieve( "https://github.com/kiooeht/ModTheSpire/releases/download/v2.6.0/ModTheSpire.zip", download_file) with zipfile.ZipFile(download_file, "r") as archive, open(mod_jar, "wb") as file: jar_data = archive.read("ModTheSpire.jar") file.write(jar_data) os.remove(download_file) base_jar = os.path.join(spire_dir, "mods", "BaseMod.jar") if not os.path.exists(base_jar): print("Downloading BaseMod") urllib.request.urlretrieve( "https://github.com/daviscook477/BaseMod/releases/download/v2.9.1/BaseMod.jar", base_jar) from spire import name_id import textwrap import io import json print("Generating data") image_dir = os.path.join("assets", "images") if os.path.exists(os.path.join("cache", "DEBUG")): image_dir = os.path.join("todo", "images") # STEP: generate cards from engi_mod import cards with open(os.path.join("templates", "card.java"), encoding="utf-8") as file: card_template = file.read() for card in cards: with open(os.path.join(mod_dir, *r"src\main\java\fruitymod\cards".split("\\"), name_id(card["name"]) + ".java"), "w", encoding="utf-8") as file: file.write(format(card_template, card)) # STEP: patch code templates_cache = os.path.join("cache", "templates") if not os.path.exists(templates_cache): utils.mkdir(templates_cache) shutil.copy( os.path.join( mod_dir, *r"src\main\java\fruitymod\FruityMod.java".split("\\")), os.path.join(templates_cache, "FruiyMod.java")) shutil.copy( os.path.join( mod_dir, *r"src\main\java\fruitymod\characters\TheSeeker.java".split( "\\")), os.path.join(templates_cache, "TheSeeker.java")) shutil.copy( os.path.join( mod_dir, *r"src\main\resources\localization\FruityMod-CardStrings.json". split("\\")), os.path.join(templates_cache, "FruityMod-CardStrings.json")) image_code = io.StringIO() add_code = io.StringIO() unlock_code = io.StringIO() for card in cards: id = name_id(card["name"], upper=True).lower() image_file = os.path.join(image_dir, id + ".png") image_file = "cards/{}.png".format( id if os.path.exists(image_file) else "runic_binding") image_code.write( format( 'public static final String {{ name_id(card["name"], upper=True) }} = "{{ image_file }}";' ) + "\n") if card["rarity"] != "special": add_code.write( format('BaseMod.addCard(new {{ name_id(card["name"]) }}());') + "\n") unlock_code.write( format('UnlockTracker.unlockCard("{{ card["name"] }}");') + "\n") with open(os.path.join(templates_cache, "FruiyMod.java"), encoding="utf-8") as file: fruity_lines = [line for line in file] for i, line in enumerate(fruity_lines): if "public static final String PHASE_COIL" in line: fruity_lines.insert( i + 1, "\n" + textwrap.indent(image_code.getvalue(), " " * 4)) break for i, line in enumerate(fruity_lines): if "BaseMod.addCard(new Nexus())" in line: fruity_lines.insert( i + 1, "\n" + textwrap.indent(add_code.getvalue(), " " * 4 * 2)) fruity_lines.insert( i + 2, "\n" + textwrap.indent(unlock_code.getvalue(), " " * 4 * 2)) break with open(os.path.join( mod_dir, *r"src\main\java\fruitymod\FruityMod.java".split("\\")), "w", encoding="utf-8") as file: file.write("".join(fruity_lines)) with open(os.path.join(templates_cache, "TheSeeker.java"), encoding="utf-8") as file: seeker_lines = [line for line in file] # STEP: starting relic from engi_mod import relic for i, line in enumerate(seeker_lines): if "Arcanosphere" in line: del seeker_lines[i:i + 2] seeker_lines.insert( i, "\n{}\n\n".format( textwrap.indent( textwrap.dedent( format(""" retVal.add("{{ relic }}"); UnlockTracker.markRelicAsSeen("{{ relic }}"); """)).strip(), " " * 4 * 2))) break # STEP: starting deck from engi_mod import deck if not deck: deck = [card["name"] for card in cards if card["rarity"] != "special"] for i, line in enumerate(seeker_lines): if "Strike_P" in line: for j, line in enumerate(seeker_lines): if "AstralHaze" in line: break del seeker_lines[i:j + 1] seeker_lines.insert( i, "\n{}\n\n".format( textwrap.indent( "\n".join('retVal.add("{}");'.format(card) for card in deck), " " * 4 * 2))) break with open(os.path.join( mod_dir, *r"src\main\java\fruitymod\characters\TheSeeker.java".split("\\")), "w", encoding="utf-8") as file: file.write("".join(seeker_lines)) card_strings = json.load( open(os.path.join(templates_cache, "FruityMod-CardStrings.json"), encoding="utf-8")) for card in cards: data = { "NAME": card["name"], "DESCRIPTION": card["desc"], } desc = card.get("upgrade_desc") if desc: data["UPGRADE_DESCRIPTION"] = desc card_strings[card["name"]] = data json.dump( card_strings, open(os.path.join( mod_dir, *r"src\main\resources\localization\FruityMod-CardStrings.json". split("\\")), "w", encoding="utf-8"), sort_keys=True, indent=4) # STEP: generate powers from engi_mod import powers with open(os.path.join("templates", "power.java"), encoding="utf-8") as file: power_template = file.read() for power in powers: with open(os.path.join(mod_dir, *r"src\main\java\fruitymod\powers".split("\\"), power["id"] + ".java"), "w", encoding="utf-8") as file: file.write(format(power_template, power)) # STEP: generate actions from engi_mod import actions with open(os.path.join("templates", "action.java"), encoding="utf-8") as file: action_template = file.read() for action in actions: with open(os.path.join( mod_dir, *r"src\main\java\fruitymod\actions\unique".split("\\"), action["id"] + ".java"), "w", encoding="utf-8") as file: file.write(format(action_template, action)) # STEP: generate java files from engi_mod import javas with open(os.path.join("templates", "java.java"), encoding="utf-8") as file: java_template = file.read() for java in javas: with open(os.path.join(mod_dir, *r"src\main\java".split("\\"), *java["package"], java["name"] + ".java"), "w", encoding="utf-8") as file: file.write(format(java_template, java)) # STEP: card images print("Generating images") import numpy as np portrait_masks = {} for type in "attack skill power".split(): image = utils.open_data( os.path.join("templates", "1024Portraits_{}_mask.png".format(type))) image = image / 255 image = np.repeat(image[:, :, :1], 4, axis=-1) portrait_masks[type] = image for card in cards: id = name_id(card["name"], upper=True).lower() image_file = os.path.join(image_dir, id + ".png") target_p_file = os.path.join( mod_dir, *r"src\main\resources\img\cards".split("\\"), id + "_p" + ".png") target_file = os.path.join( mod_dir, *r"src\main\resources\img\cards".split("\\"), id + ".png") if os.path.exists(target_p_file): continue if os.path.exists(image_file): image = utils.open_data(image_file) from skimage.transform import resize target = 500, 380 r = image.shape[0] / image.shape[1] if r >= target[0] / target[1]: size = np.ceil(target[1] * r).astype("int"), target[1] x = np.round((size[0] - target[0]) / 2).astype("int") image = resize(image, size, mode="edge")[x:x + target[0]] else: size = target[0], np.ceil(target[0] / r).astype("int") image = resize(image, size, mode="edge")[:, :target[1]] image *= portrait_masks[card["type"]] from PIL import Image img = Image.fromarray( np.round(image * 255).astype("uint8").transpose((1, 0, 2))) img.save(target_p_file) target = 250, 190 image = resize(image, target, mode="edge") img = Image.fromarray( np.round(image * 255).astype("uint8").transpose((1, 0, 2))) img.save(target_file) # STEP: card borders utils.sync( os.path.join("assets", "512"), os.path.join(mod_dir, *r"src\main\resources\img\512".split("\\"))) utils.sync( os.path.join("assets", "1024"), os.path.join(mod_dir, *r"src\main\resources\img\1024".split("\\"))) # STEP: keywords from engi_mod import keywords keyword_code = io.StringIO() for name, keyword in keywords.items(): words = ", ".join('"{}"'.format(word) for word in [name.lower()] + keyword["words"]) keyword_code.write( format( 'BaseMod.addKeyword(new String[] {"{{ name }}", {{ words }}}, "{{ keyword["desc"] }}");' ) + "\n") with open(os.path.join( mod_dir, *r"src\main\java\fruitymod\FruityMod.java".split("\\")), encoding="utf-8") as file: fruity_lines = [line for line in file] for i, line in enumerate(fruity_lines): if '{"intangible", "Intangible"}, "All damage and HP loss you suffer is reduced to 1."' in line: fruity_lines.insert( i + 1, "\n" + textwrap.indent(keyword_code.getvalue(), " " * 4 * 2)) break with open(os.path.join( mod_dir, *r"src\main\java\fruitymod\FruityMod.java".split("\\")), "w", encoding="utf-8") as file: file.write("".join(fruity_lines)) # STEP: mod info old_info = os.path.join( mod_dir, *r"src\main\resources\ModTheSpire.config".split("\\")) if os.path.exists(old_info): os.remove(old_info) from engi_mod import info json.dump(info, open(os.path.join( mod_dir, *r"src\main\resources\ModTheSpire.json".split("\\")), "w", encoding="utf-8"), indent=4) # STEP: maven project pom_template = os.path.join(templates_cache, "pom.xml") if not os.path.exists(pom_template): shutil.copy(os.path.join(mod_dir, "pom.xml"), pom_template) with open(pom_template, encoding="utf-8") as file: pom = file.read() pom = pom.replace( "${basedir}/../lib/ModTheSpire.jar", "/".join(spire_dir.split(os.path.sep) + ["ModTheSpire.jar"])) pom = pom.replace( "${basedir}/../lib/BaseMod.jar", "/".join(spire_dir.split(os.path.sep) + ["mods", "BaseMod.jar"])) pom = pom.replace( "${basedir}/../lib/desktop-1.0.jar", "/".join(spire_dir.split(os.path.sep) + ["desktop-1.0.jar"])) jar_file = os.path.join(spire_dir, "mods", "EngiMod.jar") pom = pom.replace("../_ModTheSpire/mods/FruityMod.jar", "/".join(jar_file.split(os.path.sep))) with open(os.path.join(mod_dir, "pom.xml"), "w", encoding="utf-8") as file: file.write(pom) # STEP: compile if os.path.exists(jar_file): os.remove(jar_file) with utils.cd(mod_dir): os.system("mvn package") if not os.path.exists(jar_file): print("Compilation failed") return # STEP: test with utils.cd(spire_dir): os.system("ModTheSpire.jar")
parser.add_argument("--refine", type=bool, default=True, help="whether to refine the transmission estimated") opt = parser.parse_args() # path cal FILE_DIR = os.path.dirname(os.path.abspath(__file__)) BASE_ROOT = os.path.dirname(FILE_DIR) DST_ROOT = os.path.join(BASE_ROOT, 'res') utils.assure_dir(DST_ROOT) src_path = os.path.abspath(opt.input) dst_name = utils.format( os.path.split(src_path)[1], opt.window, opt.radius, opt.omega, opt.t_min, opt.refine) dst_path = os.path.join(DST_ROOT, dst_name) dehazer = dehaze.DarkPriorChannelDehaze(wsize=opt.window, radius=opt.radius, omega=opt.omega, t_min=opt.t_min, refine=opt.refine) src_img = cv2.imread(src_path, cv2.IMREAD_COLOR) img_dehaze = dehazer(src_img) cv2.imwrite(dst_path, img_dehaze) print('Saved to: {}'.format(dst_path))
print calculate_agreement(my_ratings,exclusive_fuzzy_rating) data = {} fuzzy_data = {} for threshold in np.arange(10,100,10): data[threshold] = calculate_agreement(my_ratings,automatic_ratings[threshold]) fuzzy_data[threshold] = calculate_agreement([my_ratings[x] for x in fuzzy_idx[threshold]],fuzzy_ratings[threshold]) tpr,fpr = zip(*sorted(data.values(),key=lambda item:item[0])) fuzzy_tpr,fuzzy_fpr = zip(*sorted(fuzzy_data.values(),key=lambda item:item[0])) #print sorted(zip(tpr,fpr),key=lambda item:item[0]) #print np.dot(tpr,fpr)/float(len(tpr)) #(0.6529680365296804, 0.6295955882352942) fig = plt.figure() ax = fig.add_subplot(111) ax.plot(fpr,tpr,'k.-',label=tech.format('tokens')) ax.plot([0.6666666666666666], [0.6341911764705882],'k*',markersize=8,clip_on=False) ax.plot([0.6529680365296804], [0.7795955882352942],'kD',markersize=8,clip_on=False) ax.plot(fuzzy_fpr,fuzzy_tpr,'r.-',label=tech.format('fuzzy match')) ax.plot([0,1],[0,1],'k-') tech.adjust_spines(ax) ax.set_aspect('equal') ax.set_xlabel(tech.format('False Positive Rate')) ax.set_ylabel(tech.format('True Positive Rate')) plt.legend(frameon=False,numpoints=1) plt.savefig('all-fuzzy-roc-curve.png') plt.savefig('all-fuzzy-roc-curve.tiff')
def get_datetime_list(bstream, endianness, val_type): attributes = bstream.read(8).int length = bstream.read(format(INT, endianness)) dt = bstream.readlist(format_list(val_type, endianness, length)) data = [datetime.datetime.fromordinal(int(x)+Y2KDAYS)+datetime.timedelta(milliseconds=x%1*MILLIS) for x in dt] return data
def get_nanodatetime_list(bstream, endianness, val_type): attributes = bstream.read(8).int length = bstream.read(format(INT, endianness)) dt = bstream.readlist(format_list(val_type, endianness, length)) data = [datetime.datetime.utcfromtimestamp(x/1E9+Y2KMILLIS) for x in dt] return data
def get_date_list(bstream, endianness, val_type): attributes = bstream.read(8).int length = bstream.read(format(INT, endianness)) data = [datetime.datetime.fromordinal(x+Y2KDAYS) for x in bstream.readlist(format_list(val_type, endianness, length))] return data
def get_month_list(bstream, endianness, val_type): attributes = bstream.read(8).int length = bstream.read(format(INT, endianness)) data = [get_date_from_q(x) for x in bstream.readlist(format_list(val_type, endianness, length))] return data
def get_char_list(bstream, endianness, val_type): attributes = bstream.read(8).int length = bstream.read(format(INT, endianness)) nptype, bstype = format_raw_list(val_type, length) data = bstream.read(bstype).bytes return data
async def on_command_error(self, ctx, error): """The event triggered when an error is raised while invoking a command. ctx : Context error : Exception edited from https://gist.github.com/EvieePy/7822af90858ef65012ea500bcecf1612 """ # if hasattr(ctx.command, 'on_error'): # return ignored = (commands.CommandNotFound, commands.CheckFailure) error = getattr(error, 'original', error) if isinstance(error, ignored): return elif isinstance(error, KeyError): command = self.client.get_command(ctx.invoked_with) responses = get_responses()[command.cog.qualified_name.lower()][ command.name] embed = embed_template( title="An internal responder message error has occured", description= f"Missing responder `{error.args[0]}` for command `command.name`" ) embed.color = 15138816 await ctx.send(content=None, embed=embed) else: try: command = self.client.get_command(ctx.invoked_with) responses = get_responses()[ command.cog.qualified_name.lower()][command.name] message = "" if isinstance(error, commands.MissingRequiredArgument): message = responses["error"][f"missing_{error.param.name}"] elif isinstance(error, commands.BadArgument): if len(error.args) == 1: message = responses["error"][error.args[0]] elif len(error.args) == 2: message = responses["error"][error.args[1]].format( profile=format(error.args[0], 'single_code')) elif isinstance(error, discord.Forbidden): message = responses["error"]['forbidden'] embed = embed_template(title="An error has occured", description=message) embed.color = 15138816 await ctx.send(content=None, embed=embed) except KeyError: command = self.client.get_command(ctx.invoked_with) responses = get_responses()[ command.cog.qualified_name.lower()][command.name] embed = embed_template( title="An internal responder message error has occured", description= f"Missing responder `{error.args[0]}` for command `{command.name}`" ) embed.color = 15138816 await ctx.send(content=None, embed=embed) raise error
def get(self, attr, default=""): return utils.format(super(BaseModel, self).get(attr, default))
epoch = 0 # TRAIN for epoch in range(epoch + 1, NUM_EPOCHS + 1): total_loss = 0.0 for batch_idx, batch in enumerate(dataloader): data, target = batch data = data.to(device) optimizer.zero_grad() output = model(data) if MODEL_TYPE == 'NEG': output = output.squeeze(1) target = format(target, MODEL_TYPE) loss = criterion(output, target) if MODEL_TYPE == 'NEG': negative_samples, negative_targets = dataset.generate_negative_samples( data, K) output = model(negative_samples).squeeze(1) negative_targets = format(negative_targets, MODEL_TYPE) loss += criterion(output, negative_targets) loss.backward() optimizer.step() total_loss += loss.item() if batch_idx % 100 == 0:
jaccard_matrices[disease] = np.array([[tech.jaccard(corpus[sources[i]][disease],corpus[sources[j]][disease]) for i in xrange(len(sources))] for j in xrange(len(sources))]) fig = plt.figure() ax = fig.add_subplot(111) cax = ax.imshow(jaccard_matrices[disease],interpolation='nearest',aspect='equal',vmin=0,vmax=1) ax.set_xticks(range(len(sources))) ax.set_yticks(range(len(sources))) ax.set_xticklabels(map(tech.format,sources)) ax.set_yticklabels(map(tech.format,sources)) cbar = plt.colorbar(cax) cbar.set_label(tech.format('Jaccard Similarity')) fig.tight_layout() plt.savefig('jaccard-similarity-%s-w-twitter'%disease) cPickle.dump(jaccard_matrices,open('jaccard-similarities.json',WRITE)) #--- BOOTSTRAPPING lens = [len(corpus[source][disease]) for source in sources] #Does order matter? amalgamated_corpus = ' '.join(' '.join(corpus[source][disease]) for disease in keywords for source in sources) #N.B. Don't depucliated -- must preserve original word frequencies for resampling jaccard_distributions = tech.resample(amalgamated_corpus,n_partitions=len(lens),partition_sizes=lens,repetitions=10000, monitor=True,save=True) fig = plt.figure() ax = fig.add_subplot(111) ax.hist(jaccard_distributions,color='k') tech.adjust_spines(ax)
def transfer_to_target(source, target): print '--- '+source+' ---' corresponding_target = join(target, fname(source)) print '\tTarget: '+corresponding_target if isdir(source): print '\tSource is a directory. Entering ...' for item in listdir(source): transfer_to_target(join(source, item), corresponding_target) print '\tLeaving '+source+' ...' if x.behavior in [smv, smerge]: try: rmdir(source) except: print 'FAILED TO REMOVE DIRECTORY' return elif isfile(source): print '\tSource is a file.' if not exists(corresponding_target): print '\tCorresponding target doesn\'t exist.' if not exists(target): mkdir(target) scp(source, target) hash_both(source, corresponding_target) if x.sourcehash == x.targethash and x.behavior in [smv, smerge]: remove(source) return elif isdir(corresponding_target): print 'Aborting: Unmergeable: Source File and Target Directory.' return elif isfile(corresponding_target): print '\tTarget exists and is a file.' sourcesize = getsize(source) print '\t'+source+': '+format(sourcesize) targetsize = getsize(corresponding_target) print '\t'+corresponding_target+': '+format(targetsize) if sourcesize == targetsize: hash_both(source, corresponding_target) if x.sourcehash != x.targethash: if x.behavior != smerge: print 'Aborting: Same filesize but content binary differs.' return else: _source += '.'+x.sourcehash print '\tDifferent file. Saving as '+_source+'.' move(source, _source) transfer_to_target(_source, target) return else: print '\tSuccess: File complete. Content binary equal.' if x.behavior in [smv, smerge]: remove(source) return elif sourcesize > targetsize: # smaller blocksize = 256*1024 pos = (targetsize / blocksize) * blocksize def HashSource(): x.sourcehash = md5sum(source, end=targetsize) print '\t'+source+' (partial): '+x.sourcehash thread1 = Thread( target=HashSource ) def HashTarget(): x.targethash = md5sum(corresponding_target) print '\t'+corresponding_target+': '+x.targethash thread2 = Thread( target=HashTarget ) thread1.start() thread2.start() thread1.join() thread2.join() if x.sourcehash != x.targethash: print 'Aborting: Partial content binary differs.' return else: from utils import run from fsremote import login, path if pos == 0: print '\tRestarting transfer ...' remove(corresponding_target) scp(source, target) hash_both(source, corresponding_target) if x.sourcehash == x.targethash and x.behavior in [smv, smerge]: remove(source) return else: print '\tContinuing transfer ...' truncate(corresponding_target, pos) run('dd if="'+source+'" bs='+str(blocksize)+' skip='+str(pos/blocksize)+' | ssh -C '+login(target)+' dd of="'+path(corresponding_target)+'" bs='+str(blocksize)+' seek='+str(pos/blocksize)) hash_both(source, corresponding_target) if x.sourcehash == x.targethash and x.behavior in [smv, smerge]: remove(source) return elif sourcesize < targetsize: # bigger print 'Aborting: Target file is bigger.' return
def get_nanodatetime(bstream, endianness, val_type): dt = bstream.read(format(val_type, endianness))/1E9 return datetime.datetime.utcfromtimestamp(dt+Y2KMILLIS)
def get_datetime(bstream, endianness, val_type): dt = bstream.read(format(val_type, endianness)) return datetime.datetime.fromordinal(int(dt)+Y2KDAYS) + datetime.timedelta(milliseconds = dt%1*MILLIS)
def get_month(bstream, endianness, val_type): return get_date_from_q(bstream.read(format(val_type, endianness)))
def currentsongf(self): song = self.currentsong() return utils.format(song)
def get_template(templater): return utils.format(templater.template, templater.template_args())
def get_bool_list(bstream, endianness, val_type): attributes = bstream.read(8).int length = bstream.read(format(INT, endianness)) data = [bool(x) for i,x in enumerate(bstream.readlist(format_list(val_type, '', 2*length))) if i%2 == 1] return data
help='load trained model (default: off)') parser.add_argument("-e", "--epoch-num", dest="n_epochs", default=1000, type=int, help='Numpber of training epochs (default: 1000)') parser.add_argument("-m", "--memory-size", dest="memory_size", default=5000, type=int, help='Numpber of memory size (default: 5000)') args = parser.parse_args() start_date = utils.format(args.start_date) end_date = utils.format(args.end_date) env = Env(start_date, end_date) agent = Agent(env.actions, len(env.columns), env.state_size, args.memory_size) if args.load: print("[Agent] load model") agent.load_model() terminal = False n_epochs = args.n_epochs loops = -1 e = 0 total_frame = 0 do_replay_count = 0
class SQLMixin(object): id = Column(Integer, primary_key=True, nullable=False, autoincrement=True) # time 的时间戳是秒位单位, 其他的是毫秒为单位 created_time = Column(DateTime, default=lambda: format()) updated_time = Column(DateTime, default=lambda: format()) @classmethod def new(cls, form): m = cls() for name, value in form.items(): setattr(m, name, value) m.save() return m.to_json() def save(self): db.session.add(self) db.session.commit() @classmethod def delete(cls, id): m = cls.one(id=id) db.session.delete(m) db.session.commit() @classmethod def update(cls, id, **kwargs): m = cls.query.filter_by(id=id).first() for name, value in kwargs.items(): setattr(m, name, value) setattr(m, 'updated_time', format()) m.save() return m.to_json() @classmethod def all(cls, **kwargs): ms = cls.query.filter_by(**kwargs).order_by(desc('updated_time')).all() ms = [m.to_json() for m in ms] return ms @classmethod def one(cls, **kwargs): m = cls.query.filter_by(**kwargs).first() return m @classmethod def columns(cls): return cls.__mapper__.c.items() def to_json(self): d = {} for attr, column in self.columns(): if hasattr(self, attr): v = getattr(self, attr) if attr in ['updated_time', 'created_time']: v = v.strftime("%Y-%m-%d %H:%M:%S") d[attr] = v return d def __repr__(self): """ __repr__ 是一个魔法方法 简单来说, 它的作用是得到类的 字符串表达 形式 比如 print(u) 实际上是 print(u.__repr__()) 不明白就看书或者 搜 """ name = self.__class__.__name__ s = '' for attr, column in self.columns(): if hasattr(self, attr): v = getattr(self, attr) s += '{}: ({})\n'.format(attr, v) return '< {}\n{} >\n'.format(name, s)