def chapter_one(): #necessary imports from options import quick_pause from options import options #sets player health as "health" using SUD_classes obj = bat(100, 100) health = obj.HP #sets look variables using SUD_classes look = rooms("\nThis room is number one\n\n", "\nThis room is number two\n\n", "\nThis is room number three\n\n") room1 = look.room_1 room2 = look.room_2 room3 = look.room_3 #sets search variable using SUD_classes search = rooms("\nYou find one item, cuz it's room one\n\n", "\nYou find two items, cuz it's room two\n\n", "\nYou find three items, cuz it's room three\n\n") room1 = search.room_1 room2 = search.room_2 room3 = search.room_3 print( "\nYou totter towards the only exit of the room, the door, which is cracked open.\n" ) quick_pause() choice1 = input("What do you do now? ") loop = True while loop: if choice1 == "look": print(look.room_1) choice1 = input("What do you do now? ") elif choice1 == "search": print(search.room_1) choice1 = input("What do you do now? ") elif choice1 == "options": options() choice1 = input("What do you do now? ") elif choice1 == "stats": print("\nYou are at: " + str(health) + " HP\n\n") choice1 = input("What do you do now? ") elif choice1 == "test": break print("test success!")
def login(): u=os.system("dialog --backtitle 'HADOOP' --title 'USERNAME' --inputbox 'enter your username' 7 30 2>/tmp/username.txt") p=os.system("dialog --backtitle 'HADOOP' --title 'PASSWORD' --insecure --passwordbox 'enter your password' 7 30 2>/tmp/password.txt") f=open("/tmp/username.txt") u=f.read() f.close() f1=open("/tmp/password.txt") p=f1.read() f1.close() if u=="": if p=="": while True: os.system("dialog --backtitle 'HADOOP' --title 'MENU' --menu 'select a option' 12 50 4 1 'create single node cluster' 2 'start all services' 3 'stop all services' 4 'Go back to main menu' 2>/tmp/menu.txt") m=open("/tmp/menu.txt") ch=m.read() m.close() #print type(ch) if ch=="1": import single_node single_node.chooseip() elif ch=="2": import single_node single_node.s_all() elif ch=="3": import single_node single_node.s_all1() elif ch=="4": q=os.system("dialog --backtitle 'HADOOP' --title 'WARNING' --yesno 'are you sure Y/N' 5 40") if q==0: import options options.options() #exit() else: continue elif ch=="": q=os.system("dialog --backtitle 'HADOOP' --title 'WARNING' --yesno 'are you sure Y/N' 5 40") if q==0: import options options.options() #exit() else: continue else: print "wrong choice" else: os.system("dialog --msgbox 'password is incorrect' 7 30") login() else: os.system("dialog --msgbox 'username is incorrect' 7 30") login()
def menu(): while True: #!son.play() screen.blit(background, (0, 0)) text('menu', font, (255, 255, 255), screen, 20, 20) mx, my = pygame.mouse.get_pos() button_1 = pygame.Rect(490, 300, 300, 75) button_2 = pygame.Rect(490, 400, 300, 75) button_3 = pygame.Rect(490, 500, 300, 75) button_4 = pygame.Rect(490, 600, 300, 75) if button_1.collidepoint((mx, my)): if click: jeu() if button_2.collidepoint((mx, my)): if click: instructions() if button_3.collidepoint((mx, my)): if click: options() if button_4.collidepoint((mx, my)): if click: credits() pygame.draw.rect(screen, (0, 0, 0), button_1) pygame.draw.rect(screen, (0, 0, 0), button_2) pygame.draw.rect(screen, (0, 0, 0), button_3) pygame.draw.rect(screen, (0, 0, 0), button_4) text('JOUER', font, (255, 255, 255), screen, 575, 320) text('INSTRUCTIONS', font, (255, 255, 255), screen, 505, 420) text('OPTIONS', font, (255, 255, 255), screen, 555, 520) text('CREDITS', font, (255, 255, 255), screen, 555, 620) click = False for event in pygame.event.get(): if event.type == QUIT: pygame.quit() sys.exit() if event.type == KEYDOWN: if event.key == K_ESCAPE: pygame.quit() sys.exit() if event.type == MOUSEBUTTONDOWN: if event.button == 1: click = True pygame.display.update() mainClock.tick(60)
def main_menu_enter(window, event, menu): """A főmenüben kezeli az enter gomb megyomását.""" if event.key == pygame.K_RETURN: if menu == 1: tetris.tetris(window) if menu == 2: scoreboard.scoreboard(window) if menu == 3: options.options(window) if menu == 4: pygame.quit()
def __init__(self, parent, id): # load options into self.opt self.opt = options.options() # variables to store current events like highlight etc. # for hover and click self.hittest = {} # to store the hittest paths self.Highlight = None # hightlight this on move self.HighlightClick = None # hightlight constantly # for selection self.selectionDrawing = [] self.selectionDrawingDirection = "right" self.selectionDrawingDiff = 0 # initialise the window DNApyBaseDrawingClass.__init__(self, parent, wx.ID_ANY) self.parent = parent genbank.dna_selection = (1,1) self.Bind(wx.EVT_LEFT_DOWN, self.OnLeftDown) self.Bind(wx.EVT_LEFT_UP, self.OnLeftUp) #self.Bind(wx.EVT_RIGHT_UP, self.OnRightUp) self.Bind(wx.EVT_MOTION, self.OnMotion)
def loadConfluenceBrowser(self, window, confluence): self.options = options.options() panel = window.get_side_panel() image = gtk.Image() f = urllib.urlopen(self.options.url.replace('/rpc/xmlrpc', '/favicon.ico')) data = f.read() pbl = gtk.gdk.PixbufLoader() pbl.write(data) pixbuf = pbl.get_pixbuf() pbl.close() image.set_from_pixbuf(pixbuf) self.treestore = gtk.TreeStore(str, str, str) self.confluence = confluence # we'll add some data now - 4 rows with 3 child rows each for parent in self.confluence.getSpaces(): piter = self.treestore.append(None, (parent.name, parent.key, 'isSpace')) self.browser.set_model(self.treestore) self.browser.show_all() self.browser.queue_draw() #spaces = self.confluence.getSpaces() panel.add_item(self, "Confluence Browser", image) # store per window data in the window object windowdata = {"ConfluenceBrowser": self}
def write(self): id = database.database.id self.workbook = xlsxwriter.Workbook(self.filePath.text()) self.worksheet = self.workbook.add_worksheet() self.data = database.database( "database", "attendance", 4, ["id", "TEXT", "name", "TEXT", "date", "TEXT", "state", "INTEGER"]) info = np.asarray(self.data.select("attendance", 1, ["id", id])) print(info) titels = ["name", "date", "attendance"] row = 0 col = 0 for i in titels: self.worksheet.write(row, col, i) col += 1 dataToWrite = [] for i in range(len(info)): dataToWrite.append(info[i, 2:]) dataToWrite = np.unique(dataToWrite, axis=0) print(dataToWrite) row = 1 for i in range(len(dataToWrite)): for j in range(3): self.worksheet.write(row, j, dataToWrite[i][j]) row += 1 self.workbook.close() self.op = options.options() self.close() self.op.show()
def activate(self, window): self.confluencewidget = ConfluenceBrowser(window) self.options = options.options(window, self.confluencewidget) if self.options.loginPassed is True: self.confluence = confluencerpclib.Confluence(self.options.url, True) self.confluence.login(self.options.username, self.options.password) self.confluencewidget.loadConfluenceBrowser(window, self.confluence) pass
def bridge_chap_one(): #imports from options import options choice_options = input('Type "options" for available options: ') if choice_options == "options": options() quick_pause() chapter_one() elif choice_options != "options": quick_pause() print( "\nThat wasn't what I told you to do. Try again and don't f**k with me\n" ) quick_pause() bridge_chap_one()
def main(argv): # These are features under development: # 1) @TODO: Generate the name using the path folder pieces. This is # working but we need to find a way to detect automatically when using # pieces or not. # 3) @TODO: Words to add to the special words list. # 4) @TODO: Add deep search higher than 1 using pagination links # get options from parameters rootpath, debug, force, use_pieces, deep, min_match, number_format = options( argv) for extension in VIDEO_EXTENSIONS: for infile in get_files(rootpath, extension): # remove path and extension filename = os.path.basename(infile)[:-len(extension) - 1] path = os.path.dirname(infile) + os.sep if debug == 1: print """ ------------------------------------------------------------------------------- """ print 'Starting process for "' + path + filename + '"' # do not search if already have subtitles and is not in force mode # @TODO: Allow different subtitle types if force == 0 and os.path.isfile(path + filename + '.' + SUBTITLE_EXTENSION): if debug == 1: print 'Subtitle already exists' else: # find season and episode in file name or path (use_pieces), # search and full search search, season_episode, full_search = real_name( filename, path, rootpath, use_pieces, number_format) if search in BANNED_SEARCH: if debug == 1: print 'Not a valid search' else: if (season_episode == False): if debug == 1: print 'Not valid series name' else: if debug == 1: print 'Valid series name' subtitle_url = best_subtitle_url(search, full_search, path + filename, min_match, force, deep, debug) if subtitle_url is False: if debug == 1: print "Best subtitle not found" if debug == 1: print 'Ending process for "' + path + filename + '"' print """
def main(): # alap app = QApplication(sys.argv) o = options() w = alapWidget(o) #w = mouseTracker() w.show() #app.installEventFilter(w) sys.exit(app.exec_())
def getExperiment(config_file): CONFIG_FILE_NAME = config_file torch.autograd.set_detect_anomaly(True) args = options(CONFIG_FILE_NAME) print("The system will use following resource: {:}".format( args.argsCommon.device)) print("Experiment Name: " + args.argsCommon.experiment_name) print("Experiment will be saved to " + args.argsCommon.experiment_save_path) device = torch.device("cuda:0" if torch.cuda.is_available() and args.argsCommon.device == "gpu" else "cpu") dataloaders = getFlirAdasKaistCombinedDataLoaders(args.argsDataset) currentModel = model.make_model(args.argsModel) currentModel.to(device) possibles = globals().copy() loss_dict = dict() loss_dict["types"] = [] loss_dict["functions"] = [] loss_dict["weights"] = [] for loss in args.argsLoss.loss.split('+'): weight, loss_type = loss.split('*') loss_dict["functions"].append( possibles.get('get' + loss_type + 'Loss')(args.argsLoss)) loss_dict["weights"].append(float(weight)) loss_dict["types"].append(loss_type) loss_dict["types"].append('total') lr_scheduler, optimizer = possibles.get('get' + args.argsMethod.optimizer + 'Optimizer')( currentModel.parameters(), args.argsMethod) method = getBaseMethod(currentModel, loss_dict, optimizer, args.argsMethod) benchmark = BenchmarkMetrics(args.argsBenchmark) logger = LoggerTensorBoard( args.argsCommon.experiment_save_path, args.argsCommon.experiment_save_path + '/tensorboard') experiment = getBaseExperiment(currentModel, dataloaders, loss_dict, method, optimizer, lr_scheduler, benchmark, logger, args.argsExperiment) return experiment
def correct(self): self.data=database.database("database","User",2,["name","TEXT","password","TEXT"]) # name=self.data.select("User","name",self.lineEdit.text().replace(" ", "")) # print(name) database.database.id=self.lineEdit.text().replace(" ", "") print(database.database.id) if (self.lineEdit.text() == "" or self.lineEdit_2.text() == ""): self.ui = null.null() if self.data.idFound("User",2,["name",self.lineEdit.text().replace(" ", ""),"password",self.lineEdit_2.text().replace(" ", "")]) : print("enter1") self.ui = options.options() self.close() self.ui.show() else: self.ui = incorrect.incorrect() self.close() self.ui.show()
def run_reprocess(wd=None): # by default assume we're in the working directory at this point if wd is not None: os.chdir(wd) update_status(None,'Running') solsfile = glob.glob('DDS3_full*smoothed.npz') if len(solsfile) < 1: die('Cannot find the correct solution file -- exiting') solsfile = str(solsfile[0]) o = options('reprocess-vlow.cfg',option_list) cubefiles=['image_full_vlow_QU.cube.dirty.fits','image_full_vlow_QU.cube.dirty.corr.fits'] cthreads=[] flist=[] ddf_kw = {} do_polcubes('DATA','[DDS3_full_smoothed,DDS3_full_slow]',[o['image_uvmin'],1.600000],'image_full_vlow',ddf_kw,beamsize=o['vlow_psf_arcsec'],imsize=o['vlow_imsize'],cellsize=o['vlow_cell'],robust=o['vlow_robust'],options=o,catcher=None) if o['compress_polcubes']: for cubefile in cubefiles: if o['restart'] and os.path.isfile(cubefile+'.fz'): warn('Compressed cube file '+cubefile+'.fz already exists, not starting compression thread') else: report('Starting compression thread for '+cubefile) thread = threading.Thread(target=compress_fits, args=(cubefile,o['fpack_q'])) thread.start() cthreads.append(thread) flist.append(cubefile) if o['compress_polcubes']: # cthreads and flist exist for thread in cthreads: if thread.isAlive(): warn('Waiting for a compression thread to finish') thread.join() if o['delete_compressed']: for f in flist: warn('Deleting compressed file %s' % f) os.remove(f) update_status(None,'Complete')
def login(): u=os.system("dialog --backtitle 'HADOOP' --title 'USERNAME' --inputbox 'enter your username' 7 30 2>/tmp/username.txt") p=os.system("dialog --backtitle 'HADOOP' --title 'PASSWORD' --insecure --passwordbox 'enter your password' 7 30 2>/tmp/password.txt") f=open("/tmp/username.txt") u=f.read() f.close() f1=open("/tmp/password.txt") p=f1.read() f1.close() if u=="": if p=="": while True: os.system("dialog --backtitle 'HADOOP' --title 'MENU' --menu 'select a option' 25 60 17 1 'create namenode and jobtracker' 2 'format, start namenode and jobtracker' 3 'create datanode and tasktracker' 4 'start datanode and tasktracker' 5 'HIVE (use this option when a cluster is ready)' 6 'use PIG (when a cluster is ready)' 7 'set high priority to a job' 8 'make a client' 9 'Decommision nodes' 10 'commision nodes' 11 'hbase' 12 'create hdfs users' 13 'set quota on space' 14 'set qouta on file' 15 'setup fair scheduler' 16 'upload a file' 17 'Go back to main menu' 2>/tmp/menu.txt") m=open("/tmp/menu.txt") ch=m.read() m.close() #print type(ch) if ch=="1": #allip.all_ip() namenode.chooseip() #namenode() elif ch=="2": namenode.chooseipstart() elif ch=="3": datanode.chooseip1() elif ch=="4": datanode.chooseipstart1() elif ch=="5": datanode.hive() elif ch=="6": datanode.pig() elif ch=="7": datanode.priority() elif ch=="8": datanode.client() elif ch=="9": os.system("dialog --infobox 'processing please wait...' 3 34") ii=commands.getoutput('nmap -sP 192.168.109.0/24 | grep 192 | cut -d: -f 2 | cut -c 22-36 > /root/Desktop/hup.txt') f=open("/root/Desktop/hup.txt") ii=f.read() f.close() iilist = ii.split('\n') f20=open("/tmp/snon.txt") d=f20.read() f20.close() i=d i=int(i) k = iilist[i] name=k allip.all_ip() os.system("dialog --backtitle 'HADOOP' --inputbox 'enter ips to make that exclude from cluster' 8 60 2>/tmp/decomm.txt") f20=open("/tmp/decomm.txt") d=f20.read() f20.close() dlist = d.split() deco=d datanode.decomm(name,deco) elif ch=="10": os.system("dialog --infobox 'processing please wait...' 3 34") ii=commands.getoutput('nmap -sP 192.168.109.0/24 | grep 192 | cut -d: -f 2 | cut -c 22-36 > /root/Desktop/hup.txt') f=open("/root/Desktop/hup.txt") ii=f.read() f.close() iilist = ii.split('\n') f20=open("/tmp/snon.txt") d=f20.read() f20.close() i=d i=int(i) k = iilist[i] name=k allip.all_ip() os.system("dialog --backtitle 'HADOOP' --inputbox 'enter ips to make that include from cluster' 8 60 2>/tmp/comm.txt") f20=open("/tmp/comm.txt") d=f20.read() f20.close() dlist = d.split() co=d datanode.comm(name,co) #datanode.comm() elif ch=="11": datanode.hbase() elif ch=="12": datanode.users() elif ch=="13": datanode.spacequota() elif ch=="14": datanode.filequota() elif ch=="15": datanode.fairsch() elif ch=="16": datanode.uploadfiles() elif ch=="17": #os.system("dialog --backtitle 'HADOOP' --title 'WARNING' --inputbox 'are you sure Y/N' 5 40 2>/tmp/decision.txt") #f2=open("/tmp/decision.txt") #d=f2.read() #f2.close() #if d=="y" or d=="Y" or d=="yes" or d=="YES" or d=="Yes": q=os.system("dialog --backtitle 'HADOOP' --title 'WARNING' --yesno 'are you sure Y/N' 5 40") if q==0: import options options.options() #exit() else: continue elif ch=="": q=os.system("dialog --backtitle 'HADOOP' --title 'WARNING' --yesno 'are you sure Y/N' 5 40") if q==0: import options options.options() #exit() else: continue else: print "wrong choice" else: os.system("dialog --msgbox 'password is incorrect' 7 30") login() else: os.system("dialog --msgbox 'username is incorrect' 7 30") login()
import numpy as np import numpy.random as rand import matplotlib.pyplot as plt import options import gol_utils as utils import copy import time import h5py print "Welcome to Game of Life" #Instantiates "options" object from "options" module and assigns it to options options = options.options() #Gave the user multiple options to modify attributes for the simulation options.parseArguments() #The following attributes are imported from options class in options module numx = options.numx numy = options.numy numsteps = options.numsteps numsleep = options.numsleep outputfile = options.outputfile inputfile = options.inputfile if inputfile == "": #Checks if an input file has been given, board = rand.rand(numx, numy) #Seeds a board with random numbers #between (inclusive) and #1 (noninclusive) for i in range(numx): #Rounds numbers to 0 or 1
r.add_facet_labels(t) plot_offsets(t, r.clist, 'red') if savefig is not None: plt.savefig(savefig) if __name__ == '__main__': # Main loop if len(sys.argv) < 2: warn( 'quality_pipeline.py must be called with at least one parameter file\nor a command-line option list.\nE.g "pipeline.py example.cfg second_example.cfg --solutions-robust=0.1"\nSee below for a complete list of possible options with their default values.' ) print_options(option_list) sys.exit(1) o = options(sys.argv[1:], option_list) if o['pbimage'] is None: die('pbimage must be specified') if o['nonpbimage'] is None: die('nonpbimage must be specified') if o['list'] is not None: # fix up the new list-type options for i, cat in enumerate(o['list']): try: o[cat] = o['filenames'][i] except: pass try: o[cat + '_matchrad'] = o['radii'][i] except: pass
def main(msin, config_path): o = options(config_path, option_list) if o['pbimage'] is None: die('pbimage must be specified') # fix up the new list-type options for i, cat in enumerate(o['list']): try: o[cat] = o['filenames'][i] except: pass try: o[cat + '_matchrad'] = o['radii'][i] except: pass try: o[cat + '_fluxfactor'] = o['fluxfactor'][i] except: pass if o['logging'] is not None and not os.path.isdir(o['logging']): os.mkdir(o['logging']) # pybdsm source finding sfind_image(o['catprefix'], o['pbimage'], o['sfind_pixel_fraction']) # matching with catalogs for cat in o['list']: print 'Doing catalogue', cat crossmatch_image(o['catprefix'] + '.cat.fits', cat) filter_catalog(o['catprefix'] + '.cat.fits', o['catprefix'] + '.cat.fits_' + cat + '_match.fits', o['pbimage'], o['catprefix'] + '.cat.fits_' + cat + '_match_filtered.fits', cat, options=o) # Filter catalogs (only keep isolated compact sources within 3deg of pointing centre) # Astrometric plots if 'FIRST' in o['list']: report('Plotting position offsets') plot_position_offset( '%s.cat.fits_FIRST_match_filtered.fits' % o['catprefix'], o['pbimage'], '%s.cat.fits_FIRST_match_filtered_positions.png' % o['catprefix'], 'FIRST', options=o) t = Table.read(o['catprefix'] + '.cat.fits_FIRST_match_filtered.fits') bsra = np.percentile(bootstrap(t['FIRST_dRA'], np.mean, 10000), (16, 84)) bsdec = np.percentile(bootstrap(t['FIRST_dDEC'], np.mean, 10000), (16, 84)) mdra = np.mean(t['FIRST_dRA']) mddec = np.mean(t['FIRST_dDEC']) print 'Mean delta RA is %.3f arcsec (1-sigma %.3f -- %.3f arcsec)' % ( mdra, bsra[0], bsra[1]) print 'Mean delta DEC is %.3f arcsec (1-sigma %.3f -- %.3f arcsec)' % ( mddec, bsdec[0], bsdec[1]) report('Plotting flux ratios') # Flux ratio plots (only compact sources) plot_flux_ratios( '%s.cat.fits_FIRST_match_filtered.fits' % o['catprefix'], o['pbimage'], '%s.cat.fits_FIRST_match_filtered_fluxerrors.png' % o['catprefix'], options=o) report('Plotting flux scale comparison') # Flux scale comparison plots if 'TGSS' in o['list']: plot_flux_errors( '%s.cat.fits_TGSS_match_filtered.fits' % o['catprefix'], o['pbimage'], '%s.cat.fits_TGSS_match_filtered_fluxratio.png' % o['catprefix'], 'TGSS', options=o) t = Table.read(o['catprefix'] + '.cat.fits_TGSS_match_filtered.fits') ratios = t['Total_flux'] / (t['TGSS_Total_flux'] / o['TGSS_fluxfactor']) bsratio = np.percentile(bootstrap(ratios, np.median, 10000), (16, 84)) print 'Median LOFAR/TGSS ratio is %.3f (1-sigma %.3f -- %.3f)' % ( np.median(ratios), bsratio[0], bsratio[1]) if 'NVSS' in o['list']: t = Table.read(o['catprefix'] + '.cat.fits_NVSS_match_filtered.fits') t = t[t['Total_flux'] > 10e-3] ratios = t['Total_flux'] / t['NVSS_Total_flux'] bsratio = np.percentile(bootstrap(ratios, np.median, 10000), (16, 84)) print 'Median LOFAR/NVSS ratio is %.3f (1-sigma %.3f -- %.3f)' % ( np.median(ratios), bsratio[0], bsratio[1]) # Noise estimate hdu = fits.open(o['pbimage']) imagenoise = get_rms(hdu) print 'An estimate of the image noise is %.3f muJy/beam' % (imagenoise * 1E6) return 0
#Local Repo from options import options #Web service from flask import Flask, make_response, jsonify, request, redirect, url_for from flask.views import MethodView #GPIO import RPi.GPIO as GPIO #Ext REPO import time #Adding options op = options() #GPIO COMMANDS def setupGPIO(PIN): # which naming convention is to be used. GPIO.setmode(GPIO.BCM) # Open the pin #DEFAULT = 17 op.printText("+ SETUP FOR GPIO {} STARTED".format(PIN), False) GPIO.setup(PIN, GPIO.OUT) # Turning it off GPIO.output(PIN, GPIO.LOW) op.printText("+ GPIO {} TURNED OFF".format(PIN), False) op.printText("+ SETUP FOR GPIO {} ENDED".format(PIN), False)
if isinstance(t,str): t=Table.read(t) if 'Facet' not in t.columns: r.add_facet_labels(t) plot_offsets(t,r.clist,'red') if savefig is not None: plt.savefig(savefig) if __name__=='__main__': # Main loop if len(sys.argv)<2: warn('quality_pipeline.py must be called with at least one parameter file\nor a command-line option list.\nE.g "pipeline.py example.cfg second_example.cfg --solutions-robust=0.1"\nSee below for a complete list of possible options with their default values.') print_options(option_list) sys.exit(1) o=options(sys.argv[1:],option_list) if o['pbimage'] is None: die('pbimage must be specified') if o['nonpbimage'] is None: die('nonpbimage must be specified') if o['list'] is not None: # fix up the new list-type options for i,cat in enumerate(o['list']): try: o[cat]=o['filenames'][i] except: pass try: o[cat+'_matchrad']=o['radii'][i] except: pass
def napoleon(type_cartes, taille_jeu): pygame.init() pygame.display.set_caption("MIASHS") ##Définition des règles regles = int(taille_jeu / 4) fenetreX, fenetreY = (200+(regles+1)*80, 750) fenetre = pygame.display.set_mode((fenetreX, fenetreY)) ##Chargement des images fond = pygame.image.load("images/fond/fond.png") fond = pygame.transform.scale(fond, (fenetreX, fenetreY)) repertoire_cartes = ("images/" + type_cartes + "/cartes/") liste_images = fonctions_generales.generation_jeu_aleatoire(repertoire_cartes, regles, 1) nombre_cartes = len(liste_images) cartes_dico = images(liste_images, type_cartes) lignes = 4 colonnes = int(nombre_cartes / lignes) ## creation d'une liste de base, et melange de cartes_dico liste_cartes = [name for name in cartes_dico] ## prends chaque nom de cartes_dico dans la biblioteque ## rajout de cartes_dico vides a la fin des listes. cartes_dico = rajoute_carte_vide(cartes_dico) for i in range(lignes): liste_cartes.append("V00.png") shuffle(liste_cartes) shuffled = [liste_cartes[x:x + colonnes + 1] for x in range(0, len(liste_cartes), colonnes + 1)] ## cree une liste deux dimensions (lignes * colonnes) avec comme valeur les valeurs de liste_cartes ## select transparence select1 = pygame.image.load("images/select.png").convert_alpha() select2 = pygame.image.load("images/select2.png").convert_alpha() mouseX, mouseY = (-1,-1) select_depart = False ## variable si la carte de depart a été selectionnée coord_depart = (-1,-1) select_dest = False ## variable si la carte de destination a été selectionnée coord_dest = (-1,-1) myfont = pygame.font.SysFont("monospace", 20) game_started = False ## devient true quand l'utilisateur commence a jouer! (utilise pour les options) allow_redo = False ## permet de limiter le nombre de 'redo's de l'utilisateur a une fois redo = False ## si l'utilisateur veux revenir en arriere d'un mouvement regles_jeu = [regles, "start", "sup", "same_symbol", "ace on empty", "napoleon"] all_options = False while True: mouseX, mouseY = pygame.mouse.get_pos() for event in pygame.event.get(): if event.type == QUIT or (event.type == KEYDOWN and event.key == K_ESCAPE): pygame.quit() sys.exit() elif event.type == MOUSEBUTTONDOWN: ## conversion coordonnées brutes en coordonnées tableau tableauX, tableauY = (mouseX - 30) // 80, (mouseY - 30) // 118 if select_depart == False and 0 <= tableauX < colonnes + 1 and 0 <= tableauY < lignes and shuffled[tableauY][tableauX] != "V00.png": ## pour la carte de depart coord_depart = tableauX, tableauY select_depart = True ## si l'utilisateur click sur une carte au lieu du vide en deuxieme choix elif select_dest == False and 0 <= tableauX < colonnes + 1 and 0 <= tableauY < lignes and shuffled[tableauY][tableauX] != "V00.png": coord_depart = tableauX, tableauY select_depart = True elif select_dest == False and 0 <= tableauX < colonnes + 1 and 0 <= tableauY < lignes and shuffled[tableauY][tableauX] == "V00.png": ## pour la position de destination coord_dest = tableauX, tableauY select_dest = True elif selection == "menu": principale.main() elif selection == "options": type_cartes, restart = options.options(fenetre, type_cartes, taille_jeu, 1, all_options) if restart: napoleon(type_cartes, taille_jeu) repertoire_cartes = "images/" + type_cartes + "/cartes/" cartes_dico = images(liste_images, type_cartes) cartes_dico = rajoute_carte_vide(cartes_dico) elif selection == "retour" and allow_redo: redo = True elif event.type == KEYDOWN: if event.key == K_o: shuffled = cheat_ordonne(shuffled, lignes, colonnes) time.sleep(0.2) ## affiche fond fenetre.blit(fond, (0,0)) ## reviens en arriere d'un mouvement if redo: shuffled[memory_depart[1]][memory_depart[0]] = shuffled[memory_dest[1]][memory_dest[0]] shuffled[memory_dest[1]][memory_dest[0]] = "V00.png" redo = False allow_redo = False ## affiche cartes_dico for y in range(lignes): for x in range(colonnes + 1): # + 1 pour rajouter la carte vide à la fin de chaque ligne fenetre.blit(cartes_dico[shuffled[y][x]], (x * 80 + 30 , y * 118 + 30)) ## affiche contour carte depart if select_depart: fenetre.blit(select1, (coord_depart[0] * 80 + 30 , coord_depart[1] * 118 + 30)) ## select transparence ## affiche contour emplacement dest if select_dest: fenetre.blit(select2, (coord_dest[0] * 80 + 30 , coord_dest[1] * 118 + 30)) ## select transparence ## affiche les boutons a droite selection = fonctions_generales.barre_laterale(fenetre, fenetreX, (mouseX,mouseY)) pygame.display.flip() if select_dest: carte_depart = shuffled[coord_depart[1]][coord_depart[0]] carte_compare = shuffled[coord_dest[1]][coord_dest[0]-1] regles_jeu[4] = coord_dest[0] if fonctions_generales.check_move(carte_depart, carte_compare, regles_jeu): memory_card = shuffled[coord_depart[1]][coord_depart[0]] memory_depart = coord_depart memory_dest = coord_dest shuffled[coord_dest[1]][coord_dest[0]] = shuffled[coord_depart[1]][coord_depart[0]] shuffled[coord_depart[1]][coord_depart[0]] = "V00.png" game_started = True ## le jeu a commence allow_redo = True select_depart,select_dest = False, False time.sleep(0.2) ## pour qu'il y ait une ptite pause pour qu'on voit bien les couleurs des contours check_end(shuffled, lignes, colonnes, regles)
def login(): u = os.system( "dialog --backtitle 'HADOOP' --title 'USERNAME' --inputbox 'enter your username' 7 30 2>/tmp/username.txt" ) p = os.system( "dialog --backtitle 'HADOOP' --title 'PASSWORD' --insecure --passwordbox 'enter your password' 7 30 2>/tmp/password.txt" ) f = open("/tmp/username.txt") u = f.read() f.close() f1 = open("/tmp/password.txt") p = f1.read() f1.close() if u == "": if p == "": while True: os.system( "dialog --backtitle 'HADOOP' --title 'MENU' --menu 'select a option' 25 60 17 1 'create namenode and jobtracker' 2 'format, start namenode and jobtracker' 3 'create datanode and tasktracker' 4 'start datanode and tasktracker' 5 'HIVE (use this option when a cluster is ready)' 6 'use PIG (when a cluster is ready)' 7 'set high priority to a job' 8 'make a client' 9 'Decommision nodes' 10 'commision nodes' 11 'hbase' 12 'create hdfs users' 13 'set quota on space' 14 'set qouta on file' 15 'setup fair scheduler' 16 'upload a file' 17 'Go back to main menu' 2>/tmp/menu.txt" ) m = open("/tmp/menu.txt") ch = m.read() m.close() #print type(ch) if ch == "1": #allip.all_ip() namenode.chooseip() #namenode() elif ch == "2": namenode.chooseipstart() elif ch == "3": datanode.chooseip1() elif ch == "4": datanode.chooseipstart1() elif ch == "5": datanode.hive() elif ch == "6": datanode.pig() elif ch == "7": datanode.priority() elif ch == "8": datanode.client() elif ch == "9": os.system( "dialog --infobox 'processing please wait...' 3 34") ii = commands.getoutput( 'nmap -sP 192.168.109.0/24 | grep 192 | cut -d: -f 2 | cut -c 22-36 > /root/Desktop/hup.txt' ) f = open("/root/Desktop/hup.txt") ii = f.read() f.close() iilist = ii.split('\n') f20 = open("/tmp/snon.txt") d = f20.read() f20.close() i = d i = int(i) k = iilist[i] name = k allip.all_ip() os.system( "dialog --backtitle 'HADOOP' --inputbox 'enter ips to make that exclude from cluster' 8 60 2>/tmp/decomm.txt" ) f20 = open("/tmp/decomm.txt") d = f20.read() f20.close() dlist = d.split() deco = d datanode.decomm(name, deco) elif ch == "10": os.system( "dialog --infobox 'processing please wait...' 3 34") ii = commands.getoutput( 'nmap -sP 192.168.109.0/24 | grep 192 | cut -d: -f 2 | cut -c 22-36 > /root/Desktop/hup.txt' ) f = open("/root/Desktop/hup.txt") ii = f.read() f.close() iilist = ii.split('\n') f20 = open("/tmp/snon.txt") d = f20.read() f20.close() i = d i = int(i) k = iilist[i] name = k allip.all_ip() os.system( "dialog --backtitle 'HADOOP' --inputbox 'enter ips to make that include from cluster' 8 60 2>/tmp/comm.txt" ) f20 = open("/tmp/comm.txt") d = f20.read() f20.close() dlist = d.split() co = d datanode.comm(name, co) #datanode.comm() elif ch == "11": datanode.hbase() elif ch == "12": datanode.users() elif ch == "13": datanode.spacequota() elif ch == "14": datanode.filequota() elif ch == "15": datanode.fairsch() elif ch == "16": datanode.uploadfiles() elif ch == "17": #os.system("dialog --backtitle 'HADOOP' --title 'WARNING' --inputbox 'are you sure Y/N' 5 40 2>/tmp/decision.txt") #f2=open("/tmp/decision.txt") #d=f2.read() #f2.close() #if d=="y" or d=="Y" or d=="yes" or d=="YES" or d=="Yes": q = os.system( "dialog --backtitle 'HADOOP' --title 'WARNING' --yesno 'are you sure Y/N' 5 40" ) if q == 0: import options options.options() #exit() else: continue elif ch == "": q = os.system( "dialog --backtitle 'HADOOP' --title 'WARNING' --yesno 'are you sure Y/N' 5 40" ) if q == 0: import options options.options() #exit() else: continue else: print "wrong choice" else: os.system("dialog --msgbox 'password is incorrect' 7 30") login() else: os.system("dialog --msgbox 'username is incorrect' 7 30") login()
def main(): # Read config file from command line config_file = sys.argv[1] yaml_file = open(config_file) config = yaml.safe_load(yaml_file) # Set options structure opt = options() opt.palette = config["test_palette"] opt.test_comp = config["test_composition"] opt.outer_center = config["outer_center"] opt.outer_intervals = config["outer_intervals"] opt.cti_file = config["cti_file"] opt.mixture = config["mixture_name"] opt.nx = config["palette_resolution"] opt.pres = config["pressure"] opt.temp = config["temperature"] opt.phi = config["equivalence_ratio"] opt.t_fin = config["final_time"] opt.target_mw = config["target_mw"] opt.target_hc = config["target_hc"] opt.output_file = config["output_file"] opt.override_targets = config["override_targets"] opt.write_output = config["write_output"] # Close YAML file yaml_file.close() # Set gas gas = ct.Solution(opt.cti_file) # Check override check_override(gas, opt) # Dump options opt.dump() # Generate mesh mesh_data = mesh_generate_box(gas, opt) print "Mesh generation complete!" print "Number of points = ", (np.array(mesh_data)).shape[0] # Estimate number of processes nProcs = multiprocessing.cpu_count() print "Number of cores = ", nProcs # Create gas objects for all processes pool = multiprocessing.Pool(processes=nProcs, initializer=init_process, initargs=(opt.cti_file, )) try: os.remove(opt.output_file) except OSError: pass f = open(opt.output_file, 'a') res = pool.map( eval_idt, zip(itertools.repeat(opt.cti_file), itertools.repeat(opt), mesh_data)) # End parallelism pool.close() pool.join() print "Computation finished!" dump_mat = [] if (opt.write_output): for idx, mesh_point in enumerate(mesh_data): dump_point = np.hstack((mesh_point, res[idx])) dump_mat.append(dump_point) np.savetxt(f, dump_mat, fmt='%.3e') f.close()
import gym import torch from util import NormalizedActions from collections import deque from agent import Agent import numpy as np from options import options options = options() opts = options.parse() batch = opts.batch env = NormalizedActions(gym.make('BipedalWalker-v2')) from IPython.display import clear_output import matplotlib.pyplot as plt policy = Agent(env) def plot(frame_idx, rewards): clear_output(True) plt.figure(figsize=(20,5)) plt.subplot(131) plt.title('Episode %s. reward: %s' % (frame_idx, rewards[-1])) plt.plot(rewards) plt.show() rewards = []
def __init__(self, confluence): self.confluence = confluence self.options = options.options()
def exit(self): self.op = options.options() self.close() self.op.show()
stateDict = { 'net': net.state_dict(), 'acc': acc, 'epoch': epoch, } torch.save(stateDict, checkpointPath+dataset+modelName+'Epoch'+str(epoch)+'.pth') if __name__ == '__main__': ##Parsing options dataset,checkpointPath,resumeTrainingBool,learningRates,epochs, modelName, sgdMomentum, sgdWeightDecay, trainingBatchSize = options().parseOptions() ##Check whether learning rate array and epoch array has same dimension if len(learningRates) != len(epochs): sys.exit('Learning rate array and epoch array must have same dimension') device = 'cuda' if torch.cuda.is_available() else 'cpu' ##defining device net = defineModelArchitecture().importModel(dataset,modelName) ##importing the network net = net.to(device) ##copying network to device if device == 'cuda': net = torch.nn.DataParallel(net) cudnn.benchmark = True currentEpochNumber = 0 ##current epoch number starts from 0 unless training is resumed from checkpoint if resumeTrainingBool: # checking whether to resume from checkpoint
def login(): u=os.system("dialog --backtitle 'HADOOP' --title 'USERNAME' --inputbox 'enter your username' 7 30 2>/tmp/username.txt") p=os.system("dialog --backtitle 'HADOOP' --title 'PASSWORD' --insecure --passwordbox 'enter your password' 7 30 2>/tmp/password.txt") f=open("/tmp/username.txt") u=f.read() f.close() f1=open("/tmp/password.txt") p=f1.read() f1.close() if u=="": if p=="": while True: os.system("dialog --backtitle 'HADOOP' --title 'MENU' --menu 'select a option' 12 50 2 1 'create automatical cluster' 2 'go back to main menu' 2>/tmp/menu.txt") m=open("/tmp/menu.txt") ch=m.read() m.close() #print type(ch) if ch=="1": #find netid of current computer os.system("dialog --infobox 'processing please wait...' 3 34") net=commands.getoutput("ifconfig eth0 | grep 192 | awk '{print$2}'|cut -c 6-20") net=net+'/24' netip=commands.getoutput("nmap -sP %s | grep 192 | awk '{print$5}'"%net) netiplist=netip.split('\n') #find total ram memory of each active ip in the network memo=dict() #dictionary containing memory info of all active ips #print "hi" for item in netiplist: if item=='192.168.109.1' or item=='192.168.109.2' or item=='192.168.109.254': continue x=commands.getoutput("ssh %s free -m | grep Mem | awk '{print$2}'"%item) memo[item]=x sorted_x = sorted(memo.items(), key=operator.itemgetter(1)) nn_ip = sorted_x[0][0] k=nn_ip #print k n_hdfs(k) n_core(k) n_mapred(k) n_start(k) d_hdfs(k) d_core(k) d_mapred(k) i=0 for df in sorted_x: i+=1 #print i j=1 for fd in sorted_x: if j<i: #print sorted_x[j][0] k=sorted_x[j][0] thread.start_new_thread(copy,(k,)) thread.start_new_thread(d_start,(k,)) j+=1 elif ch=="2": q=os.system("dialog --backtitle 'HADOOP' --title 'WARNING' --yesno 'are you sure Y/N' 5 40") if q==0: import options options.options() #exit() else: continue elif ch=="": q=os.system("dialog --backtitle 'HADOOP' --title 'WARNING' --yesno 'are you sure Y/N' 5 40") if q==0: import options options.options() #exit() else: continue else: print "wrong choice" else: os.system("dialog --msgbox 'password is incorrect' 7 30") login() else: os.system("dialog --msgbox 'username is incorrect' 7 30") login()
from gen_tpu_for_vtr import bit_vector from gen_tpu_for_vtr import process_arrays from gen_tpu_for_vtr import process_LO import os entries = os.listdir("verilog") data = "" for entry in entries: filename = "verilog/" + entry f = open(filename,'r') data += f.read() data += "\n" f = open("verilog/final.v","w") uut = options(f) uut.write() uut = matmul_defines(f) uut.write() f.close() f = open("verilog/final.v",'a') f.write(data) f.close() f = open("verilog/final.v", 'r') f2 = open("verilog/final2.v", 'w') process_arrays(f, f2) f2.close() f.close()
from dataloaders.kaistDataset import KaistDataset from options import options import torch CONFIG_FILE_NAME = "../configs/encoderDecoderFusionv2.ini" args = options(CONFIG_FILE_NAME) dataset_train = KaistDataset(args.argsDataset, train=True) dataset_test = KaistDataset(args.argsDataset, train=False) def checkImageValidity(image): result1 = torch.isnan(image).any() result2 = torch.isinf(image).any() result3 = (image < 0).any() result = result1.__or__(result2) result = result.__or__(result3) return result bad_indexs_inp = [] bad_indexs_gt = [] f = open('bad_sections.txt', 'a') print(len(dataset_train)) for index in range(len(dataset_train)): if index % 1000 == 0: print(f'{index} file processed') data = dataset_train.__getitem__(index) for image in data['inputs']: if checkImageValidity(image): bad_indexs_inp.append(dataset_train.imageFiles[0][index]) print(f'Invalid image path in inputs: {dataset_train.imageFiles[0][index]}, {index}')
import sys sys.path.append('../utils') sys.path.append('../model') import options import importlib from utils_loadModel import * from collections import OrderedDict # get input argument args = options.options() args.initialize() args = args.parser.parse_args() print args.coarseModel # load model coarseModel, fineModel, detailModel = loadModels(args) print type(coarseModel) from skimage import io import os import numpy as np import time import torch.optim as optim from torch.autograd import Variable import torchvision.transforms as transforms import torch from tensorboardX import SummaryWriter # set random seed for all possible randomness
import torch.nn as nn import torch.nn.functional as F from utils import ReverseLayerF from options import options import torchvision.models as models # options opt = options().opt extractor = opt.extractor classifier = opt.classifier discriminator = opt.discriminator class Extractor(nn.Module): def __init__(self): super(Extractor, self).__init__() model = models.__dict__[extractor]() newmodel = nn.Sequential(*(list(model.children())[:-4])) self.extractor = newmodel def forward(self, x): x = self.extractor(x) print(x.size()) x = x.view(-1, 128 * 32 * 32) return x # class Extractor(nn.Module): # def __init__(self): # super(Extractor, self).__init__() # self.extractor = nn.Sequential(
#option5 = options() #option5.eta = 0.02 #option5.maxepoch = 100 #option5.num_hid =61 #config.append(option1) #config.append(option5) #config.append(option3) # initialize CUDA print 'initializing CUDA' cm.cublas_init() cm.CUDAMatrix.init_random(1) print 'CUDA initialized' config = [] option0 = options() option0.eta = 0.002 option0.maxepoch = 200 option0.num_hid = 2000 option1 = options() option1.eta = 0.02 option1.maxepoch = 100 option1.num_hid = 2000 option2 = options() option2.eta = 0.01 option2.maxepoch = 200 option2.num_hid = 61 config.append(option1)#gaussian layer config.append(option1)#binary layer2 config.append(option1)#binary layer3 config.append(option1)#binary layer4
import copy import random import threading import time import os import translate import kotoha_player from privval import * import sys import options from idol_db import * client = Client() kot_player = None current_vc = None option = options.options() active = True music_list = [] kot_player = None flg = False connected = False idolJson = GetIdolJson() async def reload(): update_servers(client) update_channels(client)
This is called globals.py, but it actually parses all the arguments and performs the global OpenRAM setup as well. """ import os import debug import shutil import optparse import options import sys import re import importlib USAGE = "Usage: openram.py [options] <config file>\nUse -h for help.\n" # Anonymous object that will be the options OPTS = options.options() def parse_args(): """ Parse the optional arguments for OpenRAM """ global OPTS option_list = { optparse.make_option("-b", "--backannotated", action="store_true", dest="run_pex", help="Back annotate simulation"), optparse.make_option("-o", "--output",
def __init__(self,argvs,rpt): ## { g_opt = options(); g_opt.set_support('eda','\n\tspecify the eda tool, for now, valid only for vcs tool, \n\tand interfaces will be retained for other eda tools.','edatool'); g_opt.set_support('help','\n\thelp option to display all valid options for fgen tool.'); g_opt.set_support('rtl','\n\tspecify the rtl type to lead the tool to generate the different \n\tcombination of compile.lis. Default rtl_type is \'asic\'.','rtl_type'); g_opt.set_support('path','\n\tspecify the path to generate the filelist, this is an optional param, \n\tif not specified, the default path: <proj_home>/out/sim/<rtl_type>/tests/ will be used.','dest_path'); g_opt.set_support('proj_home','\n\tthe project home specified by caller.','path'); g_opt.set_support('erf','\n\texternal rtl file, this means the file should be added to rtl.lis, this option used to load common rtls, \n\twith different rtl mode, the erf can specify corresponding rtl modules.','file_name'); g_opt.set_support('esf','\n\texternal sim_model file, this means the file should be added to sim_model.lis, this option used to load common sim_models, \n\twith different rtl mode, the esf can specify corresponding sim_model modules.','file_name'); g_opt.set_support('uvc','\n\tspecify the uvc package with path.','uvc_pkg'); g_opt.set_support('y','\n\tto specify the library path, by different eda tools, this option will be \n\ttranslated to the corresponding options recognized by the tool. Besides, \n\tthis option only support the eda tool that can support filelist option insertion.','library'); g_opt.set_support('libext','\n\tthis option work together with -y option, to specify the \n\tsearching file with specified extension. This option can be input \n\twith multiple times for specifying multiple extensions.','extensions'); g_opt.set_support('target','\n\tthe option specify the destined usage for calling this tool. \n\tthe target name for now are valid within: \n <sim>: for simulation, in this target, the compile.lis, rtl.lis, \n\tdpi.lis, fsim.lis and sim_model.lis will be generated nomatter files above exists or not. \n <wav>: for wave vision, in this target, wave.lis, rtl.lis, fsim.lis \n\tand sim_model.lis will be generated nomatter files above exists or not.','target_name'); g_opt.set_support('pt','\n\tthe project type option, valid only within IP, SUBS and SOC.','proj_type'); g_opt.set_support('debug','\n\tthe debug enable switch, use this option to display debug information.'); ## -- option process ------------------------------------------------------------------------- if g_opt.load(argvs) == False: ## { rpt.fatal("program fatal occurred when check option."); exit(1); ## } self.g_proj_home = g_opt.get_param('proj_home'); if self.g_proj_home == False: self.g_proj_home = './'; ## if the proj_home is not specified by user, then use default value: './' ## the var that indicates the eda tool user want to use, then we will generate corresponding file list. ## value valid in range of: 'vcs' and 'xcelium'. self.g_eda = g_opt.get_param('eda'); if self.g_eda == False: self.g_eda = 'vcs'; ## default use vcs tool if no user entered -eda while g_opt.exists('y'): ## { ## while the 'y' param exists, then to pop the param until all user entered -y are poped from the argv pool self.g_lpaths.append(g_opt.get_param('y')); ## } while g_opt.exists('libext'): ## { ## and this option can be specified by user with multiple times self.g_libext.append(g_opt.get_param('libext')); ## } self.g_help = g_opt.exists('help'); self.g_debug = g_opt.exists('debug'); self.g_rtl_t = g_opt.get_param('rtl'); if self.g_rtl_t == False: self.g_rtl_t = 'asic'; ## if get the False type, then use the default value: 'asic' self.g_target = g_opt.get_param('target'); if self.g_target == False: self.g_target = 'sim'; ## if get the False param, then use default: 'sim'. self.g_pt = g_opt.get_param('pt'); if self.g_pt == False: self.g_pt = 'IP'; ## if get the False param, then use default: 'IP' self.g_o_path = g_opt.get_param('path'); if self.g_o_path == False: self.g_o_path = self.g_proj_home+'/out/sim/'+self.g_rtl_t+'/tests'; ## if the path not specified by user, then use default output path . while g_opt.exists('erf'); ## {
SocketServer.ThreadingTCPServer.__init__(self,bind_to,handler) def verify_request(self,reqsocket,client_address): hosts = map(gethostbyname,self.options.get('hosts',[])) if hosts and client_address[0] not in hosts: print "Hosts allowed:",hosts print "Rejecting connection from", client_address[0] return 0 else: return 1 # MAIN ------------------------------------------------------------------------ if __name__ == '__main__': syntax={'--port':'21','--hosts':[],'--start':0,'--dir':'.', '--allow':('.txt','.ux','.csv','.doc','.html','.rtf'), '--allow-also':[], '--help':0,'--once':0} opts=options(syntax,sys.argv[1:]) try: hosts = opts['--hosts'] start = opts['--start'] base = opts['--dir'] allow = list(opts['--allow']) + opts['--allow-also'] port = int(opts['--port']) once = opts['--once'] help = opts['--help'] except: port = 21 start = 0 base = '.' hosts = [] allow = ['.txt'] once = 0
def golf(type_cartes, taille_jeu): pygame.init() pygame.display.set_caption("MIASHS") ##Définition des règles regles = int(taille_jeu / 4) ## determine les lignes et colonnes grace a regles lignes, colonnes = taille_golf(regles) fenetreX, fenetreY = 80+(colonnes*75)+((colonnes-1)*45)+80, 750 fenetre = pygame.display.set_mode((fenetreX, fenetreY)) ##Chargement des images fond = pygame.image.load("images/fond/fond.png").convert() fond = pygame.transform.scale(fond, (fenetreX, fenetreY)) select = pygame.image.load("images/select.png").convert_alpha() repertoire_cartes = ("images/" + type_cartes + "/cartes/") liste_images = fonctions_generales.generation_jeu_aleatoire(repertoire_cartes, regles, 1) cartes_dico = images(liste_images, type_cartes) ## creation d'une liste de base, et melange de cartes_dico liste_cartes = [name for name in cartes_dico] shuffle(liste_cartes) tableau_cartes = [liste_cartes[x:x+lignes] for x in range(0, colonnes * lignes, lignes)] pioche_cartes = liste_cartes[colonnes * lignes:] ## le dos des cartes_dico (pour la pioche), carte vide dos = pygame.image.load("images/" + type_cartes + "/dos/dos.png").convert_alpha() cartes_dico = rajoute_carte_vide(cartes_dico) carte_pioche = "V00.png" mouseX, mouseY = (-1,-1) select_card = False coord_card = (-1,-1) pioche = False myfont = pygame.font.SysFont("monospace", 20) allow_redo = False ## permet de limiter le nombre de 'redo's de l'utilisateur a une fois redo = False ## si l'utilisateur veux revenir en arriere d'un mouvement last_move = '' ## prends les valeurs 'pioche' ou 'tableau' pour indiquer le dernier type de mouvement de l'utilisateur regles_jeu = [regles, "start", "both+", "any", "hello", "golf"] ## a etre utilise pour la fonction check_move all_options = False while True: mouseX, mouseY = pygame.mouse.get_pos() for event in pygame.event.get(): if event.type == QUIT or (event.type == KEYDOWN and event.key == K_ESCAPE): pygame.quit() sys.exit() elif event.type == MOUSEBUTTONDOWN: click_type, ind = check_mouse((mouseX, mouseY), tableau_cartes, len(pioche_cartes), colonnes, (fenetreX, fenetreY)) if click_type == "cartes" and carte_pioche != "V00.png": coord_card = (mouseX - 80) // 120, len(tableau_cartes[ind]) - 1 select_card = True game_started = True last_move = click_type elif click_type == "pioche": memory_pioche = carte_pioche carte_pioche = pioche_cartes.pop() allow_redo = True last_move = click_type elif selection == "menu": principale.main() elif selection == "options": type_cartes, restart = options.options(fenetre, type_cartes, taille_jeu, 1, all_options) if restart: golf(type_cartes, taille_jeu) repertoire_cartes = "images/" + type_cartes + "/cartes/" cartes_dico = images(liste_images, type_cartes) cartes_dico = rajoute_carte_vide(cartes_dico) elif selection == "retour" and allow_redo: start_time = pygame.time.get_ticks() redo = True elif event.type == KEYDOWN: if event.key == K_p: print(fonctions_generales.ordre_valeurs(regles, "start")) ## affiche fond fenetre.blit(fond, (0,0)) ## reviens en arriere d'un mouvement if redo: if last_move == 'cartes': tableau_cartes[memory_coord[0]].append(carte_pioche) carte_pioche = memory_pioche redo = False allow_redo = False elif last_move == 'pioche': pioche_cartes.append(carte_pioche) carte_pioche = memory_pioche redo = False allow_redo = False ## affiche cartes_dico for x in range(colonnes): for y in range(lignes): try: fenetre.blit(cartes_dico[tableau_cartes[x][y]], (x * 120 + 80, y * 50 + 30)) except: break ## affiche pioche for i in range(len(pioche_cartes)): fenetre.blit(dos, (80 + i * 5, 400)) ## affiche contour carte (+ 1 pour colonnes pour la colonne vide du depart) if select_card == True: fenetre.blit(select, (coord_card[0] * 120 + 80 , coord_card[1] * 50 + 30)) ## affiche la carte visible fenetre.blit(cartes_dico[carte_pioche], (300, 400)) ## affiche les boutons a droite selection = fonctions_generales.barre_laterale(fenetre, fenetreX, (mouseX,mouseY)) pygame.display.flip() ## apres le display flip pour que le time.sleep soit precevable if select_card: carte_select = tableau_cartes[coord_card[0]][coord_card[1]] if fonctions_generales.check_move(carte_select, carte_pioche, regles_jeu): memory_pioche = carte_pioche memory_coord = coord_card[0], coord_card[1] carte_pioche = tableau_cartes[coord_card[0]][coord_card[1]] del(tableau_cartes[coord_card[0]][coord_card[1]]) allow_redo = True select_card = False time.sleep(0.2) check_end(tableau_cartes, carte_pioche, pioche_cartes, regles_jeu)
''' msg_data = json.loads(msg_json) msg_id = msg_data['msg_id'] response = { 'status_code': 200, 'status_text': 'OK', 'data': 'what up?', 'msg_id': msg_id, } response_json = json.dumps(response) print 'Sending back to Tornado: %s' % (response_json) stream.send_multipart(response_json) ''' if __name__ == '__main__': opts = options.options() # print 'opts=', repr(opts) # print ' logging=', opts.logging # logging.info('WTF???') ''' goptions = dict() if len(sys.argv) > 1: goptions['immediate_flush'] = False ''' #EchoRouter = SockJSRouter(ModSock, '/sock', options) # Модуль 1. Постоянное соединение ''' TestRouter = [('/info/control', ModHTTP), ('/info/clients', Clients), (r'/test(.*)', TestHandler)] # Модуль 2. HTTP управление settings = { "template_path": os.path.join(os.path.dirname(__file__), "templates"), "static_path": os.path.join(os.path.dirname(__file__), "static"), "cookie_secret": "61oETzKXQAGaYdkL5gEmGeJJFuYh7EQnp2XdTP1o/Vo=",
hat_x = utils.crop_valid(hat_x, k) x = utils.crop_valid(x, k) y = utils.crop_valid(y, k) val_psnr += loss.psnr(hat_x, x) val_ssim += loss.ssim(hat_x, x) val_l1 += F.l1_loss(hat_x, x).item() return val_psnr / len(loader), val_ssim / len(loader), val_l1 / len(loader) if __name__ == '__main__': device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') parser = options.options() opts = parser.parse_args() ### data loader datapath = opts.datapath savepath = './results' savepath = os.path.join(savepath, 'uniform_T_%02d_S_%02d' % (opts.n_out, opts.n_in)) if opts.blind: savepath += '_blind_0.5_to_%2.2f' % (255 / 100 * opts.sigma) else: savepath += '_nonblind_%2.2f' % (255 / 100 * opts.sigma) os.makedirs(savepath, exist_ok=True) modelpath = os.path.join(savepath, 'weights') os.makedirs(modelpath, exist_ok=True) dt_tr = datasets.UniformTrainDataset(opts.datapath,