def GET(self, srv_id): help.check_access() # Get Server Information Server = list(db().executeLiteral( "SELECT * FROM servers WHERE srv_id = ?", [srv_id])[0]) if Server[1] == 0: Server = ["N/A"] + Server else: name = db().executeLiteral( "SELECT name FROM environments WHERE eid = ?", [Server[1]])[0][0] Server = [name] + Server # Get Service information for the server Services = db().execute( "Server_Details", 1, [Server[1]]) data = [] for ser in Services: result = Logger().getAnalytics( srv_id, ser[0] ) data.append( [ser[2], ser[1], result[2], result[3], result[4], str( result[0] ) + '-' + str( result[1] )] ) path = join("./configs/", srv_id) file_reader = open( path, "r") master = cfg.Config( file_reader.read() ) file_reader.close() masterInfo = [['Analytics Server' , master.Analytic_Settings.items()] , ['Management Server', master.Management_Settings.items()] , ['Data Logger', master.Logging.items()]] return help.renderPage("Server Details", render.server_details( Server, data, masterInfo ))
def gatherContributions(aminoSeqList): """ Takes a list of amino acids and returns a list of dictionaries where every entry in the dictionary is the base and the contribution of that base to the information for that position Args: aminoSeqList (string): List of amino acids Returns: Dictionary: List of Dictionaries """ countsList = HelperFunctions.gatherCounts(aminoSeqList) listOfDicts = [0 for x in range(len(countsList))] index = 0 for oneCountList in countsList: probabilityPerCount = HelperFunctions.calcProbs(oneCountList) probabilityList = [value for key, value in probabilityPerCount.items()] entropy = HelperFunctions.entropy(probabilityList) info = HelperFunctions.information(entropy, 4) for key, value in probabilityPerCount.items(): if (probabilityPerCount[key] == value): probabilityPerCount[key] = value * info listOfDicts[index] = probabilityPerCount index = index + 1 return listOfDicts
def during_day_check(api, stock_list): clock =api.get_clock() if clock.is_open: logging.info('During Day Check...') global df df = stock_list if df['5 day avg'].iloc[0] == 0: first_of_day_trades(api, df) positions = {p.symbol: p for p in api.list_positions()} position_symbol = set(positions.keys()) for sym in position_symbol: stock = df.loc[df['Symbol'] == sym] max_price_loss = -.02 if ((positions[sym].current_price - stock['Todays open'].iloc[0])/stock['Todays open'].iloc[0]) <= max_price_loss: stop_price = float(positions[sym].current_price) * .95 logging.info('Trying to sell {qty_to_sell} shares of {sym} stock for {price}'.format(qty_to_sell=positions[sym].qty, sym=sym,price=stop_price)) HelperFunctions.make_order(api, 'sell', sym, positions[sym].qty, order_type='stop', stop_price=stop_price) else: pass #If any stocks sold, new stocks need bought number_of_positions = len(positions) if number_of_positions < target_positions: df = HelperFunctions.buy_positions(api, df, target_positions) else: logging.info('Markets Closed...') df.iloc[0:0]
def GET(self, eid): # TODO get format from somewhere help.check_access() services = db().execute("Environment_Details", 0, [eid]) page_info = { 'xAxis': ["Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"] } version_requests = {} for ser in services: data = [randint(0, 100) for x in range(7)] if ser[0] in version_requests: version_requests[ser[0]] = [ x + y for x, y in zip(version_requests[ser[0]], data) ] else: version_requests[ser[0]] = data page_info["services"] = version_requests return json.dumps(page_info)
def next_frame(self): while True: # Randomly fire a bullet for h in helpfunc.all_hexes(): spin_center = (h, 4, 0) self.bullets.append(Bullet(hexmodel=self.hexes, color=self.spincolor, pos=choice(self.mirror_hexes(spin_center)))) self.draw_background() self.draw_spinners(spin_center) # Draw the bullets for bullet in self.bullets: if not bullet.move_bullet(): # bullet has moved off the board self.bullets.remove(bullet) # kill the bullet self.time += 1 # Random move the spin centers if helpfunc.one_in(10): new_spot = choice(helpfunc.hex_ring(spin_center, 1)) if self.hexes.cell_exists(choice(helpfunc.hex_ring(spin_center, 1))): spin_center = new_spot # Change the colors self.background = change_color(self.background, 0.05) self.spincolor = change_color(self.background, -0.1) yield self.speed # random time set in init function
def POST(self): help.check_access() session = web.ctx.session usr = session.get("username", False) if usr: filedir = "./temp/uploads/" + usr + "/" try: stat(filedir) except: mkdir(filedir) else: raise web.forbidden() x = web.input(file={}) if 'file' in x: filepath = x.file.filename.replace( '\\', '/') # replaces the windows-style slashes with linux ones. filename = filepath.split( '/' )[-1] # splits by / and chooses the last part (the filename with extension) fout = open( filedir + filename, 'w' ) # creates the file where the uploaded file should be stored fout.write(x.file.file.read() ) # writes the uploaded file to the newly created file. fout.close() # closes the file, upload complete. return 200
def draw_planet(self): self.trails.cycle_faders(refresh=False) self.trails.add_fader(color=hue_to_color(self.color), pos=self.pos, intense=1.0, growing=False, change=0.25) # Draw a middle ring gradient_color = gradient_wheel(hue_to_color(self.color), 0.9) for coord in helpfunc.hex_ring(self.pos, 1): self.trails.add_fader(color=gradient_color, pos=coord, intense=0.9, growing=False, change=0.25) # Draw an outer ring gradient_color = gradient_wheel(hue_to_color(self.color), 0.7) for coord in helpfunc.hex_ring(self.pos, 2): self.trails.add_fader(color=gradient_color, pos=coord, intense=0.7, growing=False, change=0.25)
def pick_start_rain_spot(): while True: start_spot = choice( helpfunc.hex_ring(helpfunc.get_random_center(), 5)) # Starting position on outside ring h, x, y = start_spot if x + y < 0: # Approximately top third of display return start_spot
def redirect(): count = db().executeLiteral("SELECT count(*) FROM environments", [])[0][0] if (count == 0): help.append_user_alerts( 'info', 'Need Environments first!', 'You cannot create a new path without having Environments to traverse, please create an Environment now.' ) raise web.seeother("environment_form_0")
def redirect(): count = db().executeLiteral("SELECT count(*) FROM paths", [])[0][0] if (count == 0): help.append_user_alerts( 'info', 'Need a Development path first!', 'You cannot create a new service without having a path to follow, please create one now.' ) raise web.seeother("path_form_0")
def problem6(x): sumOfSquares = [] squareOfSums = [] for x in range(1, x + 1): sumOfSquares.append(x**(2)) squareOfSums.append(x) return (HF.performOpOnList(squareOfSums, 1))**(2) - HF.performOpOnList( sumOfSquares, 1)
def draw_stuff(self, clock): pos = get_cell((self.h, self.x_coord, self.y_coord)) color = rgb_to_hsv((self.gray, self.gray, self.gray)) # Draw two lines self.hexes.set_cells(helpfunc.hex_line_in_direction(pos, 1, 1), color) hex1 = helpfunc.hex_in_direction(pos, 0, 1) self.hexes.set_cells(helpfunc.hex_line_in_direction(hex1, 1, 1), color)
def generate(self): hf.create_folder(self.dataset.destination_path) for img in self.dataset: if not img is None: path = os.path.join(self.dataset.destination_path, img.get_property_name("Species")) self._add_folder(path) self._add_tagged_img(img, path)
def GET(self, sgid): help.check_access() redirect() paths = db().executeLiteral("SELECT pid, name FROM paths", []) data = db().executeLiteral("SELECT * FROM services WHERE sgid = ?", [sgid])[0] return help.renderPage("Edit Service", render.service_group_form(sgid, data, paths))
def processEvents(self, event): if event.type == KEYDOWN: if event.key == K_ESCAPE: HF.exitAction() if event.key == K_RETURN: self.player.interactWithBattleMenu() self.selected = self.player.battleMenu.change_child( event, self.enemies)
def __init__(self, gameSettings = None): ### --------- initialise pygame and set up the window pygame.init() pygame.display.set_caption(Config.PAGE_TITLE) Config.screen.fill(Config.BACKGROUND_COLOUR) self.gameScore = 0 self.userEscape = False # User ends game by ESC self.gameOver = False # Game over by death self.freezeActiveBallsTimer = 0 self.clock = pygame.time.Clock() ### --------- Generate the snake self.snake = Snake_Module.Snake() self.snakeSprite = pygame.sprite.Group() self.snakeSprite.add(self.snake.get_sections()) # needed for clear/draw self.snakeSections = self.snake.get_sections() ### --------- Initial food self.foodGroup = pygame.sprite.Group() # config['food']['standard']['initial'] for n in range(Config.INITIAL_FOOD_NUM): self.foodGroup.add(Food_Module.make_food(self.snake)) for n in range(Config.INITIAL_FOOD_SUPER_NUM): self.foodGroup.add(Food_Module.make_food(self.snake,'FoodSuper')) for n in range(Config.INITIAL_FOOD_MYSTERIOUS_NUM): self.foodGroup.add(Food_Module.make_food(self.snake, 'FoodMysterious')) for n in range(Config.INITIAL_FOOD_CURSE_NUM): self.foodGroup.add(Food_Module.make_food(self.snake, 'FoodCurse')) ### --------- Initial balls self.ballGroup = pygame.sprite.Group() for n in range(Config.INITIAL_BALL_NUM): self.ballGroup.add(Ball_Module.BallStandard(HelperFunctions.generateSafeXY(self.snake, self.ballGroup, None, self.foodGroup))) self.ballKillerGroup = pygame.sprite.Group() if Config.INITIAL_BALL_KILLER_NUM != 0: self.ballKillerGroup.add(Ball_Module.BallKiller(HelperFunctions.generateSafeXY(self.snake, self.ballGroup, None, self.foodGroup)))
def GET(self): help.check_access() log = Logger() # Counts the number of services and the number of servers known to the Management Console. counts = db().executeLiteral( "SELECT (SELECT count(*) FROM services), (SELECT count(*) FROM servers)", [])[0] analytics = log.getAnalytics() tiles = [ analytics[0], analytics[3], analytics[1], counts[0], counts[1], analytics[5] ] data = db().execute("Dashboard", 0, []) developing = [] for row in data: percentage = (row[2] * 100) / row[3] temp = [row[0], help.strVersion(row[1]), percentage, row[4]] developing.append(temp) #ENVIRONMENTS envinfo = db().execute("Dashboard", 1, []) environments = [] for env in envinfo: machines = db().executeLiteral( "SELECT srv_id FROM servers WHERE eid = ?", [env[0]]) envServices = db().execute("Dashboard", 3, [env[0]]) if len(machines) == 0 or len(envServices) == 0: continue processedServices = [] for ser in envServices[:5]: processedServices.append( [ser[0], ser[1], help.strVersion(ser[2]), ser[3]]) reqtCount = 0 for server in machines: server = server[0] reqtCount += log.getAnalytics(server)[0] temp = [ env[1], env[2], reqtCount, len(machines), processedServices, env[0] ] environments.append(temp) print developing print environments return help.renderPage( "Dashboard", render.dashboard(tiles, developing, environments))
def data_preparation(pet_image_location_all, loss_function, batch_size=1, shuffle=True, num_workers=4, pretrained=True, fully_connected_layer_size=2, num_epochs=1): #this part will not work for CustomAnimal10DataSet -> TODO: figure out the way to make it generic #dataset_dict = CustomDataSet.split_to_train_validate_dataset(pet_image_location_all) #figure out how to make the dataset object type generic """ image_datasets = {x: CustomAnimal10DataSet(pet_image_location_all, dataset_dict[x], HelperFunctions.my_transform) for x in ['train', 'val']} dataloaders = {x: DataLoader(image_datasets[x], batch_size=batch_size, shuffle=shuffle, num_workers=num_workers) for x in ['train', 'val']} dataset_sizes = {x: len(image_datasets[x]) for x in ['train', 'val']} """ image_dataset = CustomAnimal10DataSet(pet_image_location_all, HelperFunctions.my_transform) dataloader = { "train": DataLoader(image_dataset, batch_size=batch_size, shuffle=shuffle, num_workers=num_workers) } dataset_size = {"train": len(image_dataset)} device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu") # add mechanism to make the model generic model_ft = models.resnet18(pretrained=pretrained) num_ftrs = model_ft.fc.in_features HelperFunctions.freeze_all_layers(model_ft) model_ft.fc = nn.Linear(num_ftrs, fully_connected_layer_size) model_ft = model_ft.to(device) # Figure out to make those generic as well # Observe that all parameters are being optimized optimizer_ft = optim.SGD(model_ft.parameters(), lr=0.001, momentum=0.9) # Decay LR by a factor of 0.1 every 7 epochs exp_lr_scheduler = torch.optim.lr_scheduler.StepLR(optimizer_ft, step_size=7, gamma=0.1) model_ft = HelperFunctions.train_model(model_ft, loss_function, optimizer_ft, exp_lr_scheduler, dataloader, device, dataset_size, num_epochs) path_to_model = r".\Animal10_model.pth" torch.save(model_ft.state_dict(), path_to_model)
def median_string(dna, k): distance = float('inf') median = None for i in range(0, 4**k): pattern = hf.NumberToPattern(i, k) if distance > hf.distanceBetweenPatternAndStrings(pattern, dna): distance = hf.distanceBetweenPatternAndStrings(pattern, dna) median = pattern return median
def calculateGeneticTraits(parent1, parent2): potentialChild = list() potentialChild = hp.generateSinglePairs(parent1, parent2) potentialChild = [''.join(sorted(x)) for x in potentialChild] seperateTraits = hp.chunk(potentialChild, int(len(parent1)/2)) seperateTraits = hp.createTraits(seperateTraits) Genomes = hp.buildGenome(seperateTraits) del seperateTraits return Genomes
def GET(self): #Return service add form help.check_access() redirect() paths = db().executeLiteral("SELECT pid, name FROM paths", []) return help.renderPage( "Add Service", render.service_version_form([], [], paths, si().getVersion()))
def change_name(self, name): self.name = name root = os.getcwd() filename = root + '/BotMeta/' + name pickle_out = open(filename, 'wb') pickle.dump(self, pickle_out) pickle_out.close() self.sleep() hf.set_current_bot(self)
def __init__(self, hexmodel): self.name = "Spinners" self.hexes = hexmodel self.bullets = [] # List that holds Bullets objects self.speed = random() + 0.05 self.background = random_color() # Background color self.spincolor = random_color() # Spinner color self.center = helpfunc.get_center(helpfunc.get_random_hex()) self.time = 0
def edit(self, running=False): """ Open a ``tk.TopLevel`` dialog to edit the settings of this command Parameters ---------- running : bool (optional) Whether or not the sequence is actively running: dictates\ if the window will actually allow edits or be just for show. """ self.running = running self.updateInstList() self.rows = int(self.rows) self.window = tk.Toplevel(self.gui.root) hf.centerWindow(self.window) if not running: self.window.resizable(height=False, width=False) self.window.geometry('400x20') else: self.window.resizable(False, False) self.rowheight = 30 self.window.grab_set() self.window.wm_title('Edit Single Measurement') self.window.attributes("-topmost", True) self.window.protocol( "WM_DELETE_WINDOW", self.accept ) # if they delete the window, assume they liked their settings self.instBoxes = [] # Keep track of the combobox widgets self.paramBoxes = [] self.selInstsVar = [] self.selParamsVar = [] self.instTraces = [] state = tk.DISABLED if self.running else tk.NORMAL self.addRow = tk.Button(self.window, text='...', command=self.createRow, width=50, state=state, height=self.rowheight) self.addRow.grid(column=1, columnspan=4, row=self.rows, sticky='NSEW') self.window.grid_columnconfigure(0, weight=0) self.window.grid_columnconfigure(1, weight=1) self.window.grid_columnconfigure(2, weight=1) self.window.grid_columnconfigure(3, weight=1) self.window.grid_columnconfigure(4, weight=0) self.window.grid_rowconfigure(0, weight=1, minsize=self.rowheight) for ii in range( self.rows ): # how many commands are added here? (default 0 for first edit) self.createRow(new=False) self.window.minsize(400, self.rowheight * (self.rows + 1)) self.gui.root.wait_window(self.window)
def GET(self, sgid): help.check_access() service_group = list(db().executeLiteral( "SELECT * FROM services WHERE sgid = ?", [int(sgid)])[0]) service_group[0] = str(service_group[0]) path_graphic = getPathInfo(sgid) version_info = getVersionInfo(sgid) return help.renderPage( "Service Details", render.service_details(service_group, path_graphic, version_info))
def move_planet(self): new_spot = helpfunc.clock_cell(self.pos, self.direction, self.center) if new_spot == self.pos: self.life = 0 # Kill stationary planets return self.pos = new_spot self.life -= 1 if helpfunc.one_in(5): self.center = choice(helpfunc.hex_ring(self.center, 1)) return self.life > 0
def move_branch(self): # Random chance that path changes if helpfunc.one_in(6): self.direction = helpfunc.turn_left_or_right(self.direction) self.pos = helpfunc.hex_in_direction( self.pos, self.direction) # Where is the dendron going? if self.life > 0: self.life -= 1 return self.pos
def testMilliTimeStamp(self): alpha = HelperFunctions.millitimestamp() time.sleep(1) beta = HelperFunctions.millitimestamp() self.assertGreater( beta - alpha, 900, "millitimestamp counts units smaller than milliseconds") self.assertLess( beta - alpha, 1500, "millitimestamp counts units greater than milliseconds")
def move_ring(self): while True: new_spot = helpfunc.hex_in_direction(self.pos, self.direction, 1) # Where is the ring going? h, x, y = new_spot if abs(x) < 7 and abs(y) < 7: self.pos = new_spot # On board. Update spot break self.direction = helpfunc.rand_dir( ) # Off board. Pick a new direction
def handleRandoms(self): time = pygame.time.get_ticks() if ((time % Config.RANDOM_FOOD_MYSTERIOUS_CHANCE) == 0): print "Random Melon of Mystery generated" self.foodGroup.add(Food_Module.FoodMysterious(None, None, None, HelperFunctions.generateSafeXY(self.snake, self.ballGroup, self.ballKillerGroup, self.foodGroup))) if ((time % Config.RANDOM_FOOD_CURSE_CHANCE) == 0): print "Random Berries of Bane generated" self.foodGroup.add(Food_Module.FoodCurse(None, None, None, HelperFunctions.generateSafeXY(self.snake, self.ballGroup, self.ballKillerGroup, self.foodGroup)))
def create_life_model(self, size): life_model = LifeModel(self.hexes) for h in helpfunc.all_hexes(): center = helpfunc.get_center(h) life_model.add(center) for ring in range(1, size + 1): for coord in helpfunc.hex_ring(center, ring): life_model.add(coord) return life_model
def draw_stuff(self, clock): pos = get_cell((self.h, self.x_coord, self.y_coord)) width = int((self.y_coord / 2) + 2) height = int((self.y_coord / 2) + 1) self.hexes.set_cells(get_box(pos, width, height), self.wall) hex1 = helpfunc.hex_in_direction(pos, 2, height) roof_corner = helpfunc.hex_in_direction(hex1, 4, 1) self.hexes.set_cells(get_triangle(roof_corner, height + 1, width), self.roof)
def __init__(self, hexmodel, h, main_color): self.hexes = hexmodel self.h = h self.color = random_color_range(main_color, 0.1) self.direction = choice((0, 1, 1.5, 2, 3)) self.pos = helpfunc.get_center(self.h) self.life = randint(5, 15) # How long the rock is around self.boulder = 0 if helpfunc.one_in(10): self.boulder = 1
def testChoke(self): self.connection._GitHubConnection__choke() alpha = HelperFunctions.millitimestamp() self.connection._GitHubConnection__choke() delta = HelperFunctions.millitimestamp() self.assertGreater( delta, alpha + 79, "The choke delay is less than 0.08 seconds: " + str((delta - alpha) / 1000) + " (" + str(delta) + ")", ) self.assertLess( delta, alpha + 90, "The choke delay is too much over 0.08 seconds: " + str((delta - alpha) / 1000) + " (" + str(delta) + ")", )
def adjust_tail_size(self, number): size = self.size[0] current_direction = self.direction if number > 0: for count in range(number): # ### TODO - randomly generate from the colour of the food eaten if self.curseTail > 0: colour = Config.BACKGROUND_COLOUR else: colour = HelperFunctions.random_rgb() # Randomise colour of new tail section lastindex = len(self.segments) - 1 X = self.segments[lastindex].rect.topleft[0] Y = self.segments[lastindex].rect.topleft[1] # New tail section position if current_direction == Snake.SnakeMove.RIGHT: X = X - size + (count*size) elif current_direction == Snake.SnakeMove.LEFT: X = X + size + (count*size) elif current_direction == Snake.SnakeMove.UP: Y = Y - size + (count*size) elif current_direction == Snake.SnakeMove.DOWN: Y = Y + size + (count*size) self.segments.append(Snake._SnakeSegment(colour, self.size, [X, Y])) else: for count in range(abs(number)): # leave at least the head if(len(self.segments) > 1): del self.segments[-1]
def __init__(self, colour=None, size=None, position=None): pygame.sprite.Sprite.__init__(self) # Make sure snake valid parameters if colour == None: colour = Snake._DEFAULT_COLOUR if size == None: size = Snake._DEFAULT_SIZE #if size[0] != size[1]: # raise Exception('Invalid tile size. Width and height must be equal.') if position == None: position = Snake._DEFAULT_POSITION self.color = colour self.size = size #self.head = Snake._SnakeHead(colour, size, position) self.segments.append(Snake._SnakeSegment(colour, size, position)) self.head = self.segments[0] #self.tail = Snake._SnakeTail() self.curseTail = 0 self.direction = Snake.SnakeMove.UP print "TODO: make this use self.adjust_tail_size()" for x in range(1, Config.INITIAL_LENGTH): # Initial Length tailposition = [(position[0] - x*size[0]), position[1]] # self.tail.add_tail_section(colour, size, tailposition) self.segments.append(Snake._SnakeSegment(HelperFunctions.random_rgb(), size, tailposition))
def output_results(): """ This function writes results to the output feature class. """ HF.pGP.AddMessage("--------" + getCurTime() + "--------") HF.pGP.AddMessage("Outputting results...") outField = "SpCluster" # Add results field. if not properties.dcFields.has_key(outField.upper()): try: HF.pGP.AddField(inputs.sOutputFC, outField, "text") except: HF.pGP.GetMessages(2) else: outField = "SpCluster_1" if not properties.dcFields.has_key(outField.upper()): try: HF.pGP.AddField(inputs.sOutputFC, outField, "text") except: HF.pGP.GetMessages(2) # Add results to output FC HF.pGP.AddMessage(HF.sWritingResults) # sFieldList = properties.sFID + ";" + sField + ";" + cField pRows = HF.pGP.UpdateCursor(inputs.sOutputFC) pRow = pRows.Next() # add message of processes iError = 0 iCnt = 0 fInterval = len(attri) / 5.0 fMore = fInterval iComplete = 20 iKey = 0 # test = [] while pRow <> None: iKey = pRow.GetValue("FID") # print iKey try: pRow.SetValue(outField, clusterAttri[iKey, -1]) pRows.UpdateRow(pRow) iCnt = iCnt + 1 if iCnt > fInterval: fInterval, iComplete = HF.check_progress(fInterval, fMore, iComplete) # except: pass except: iError = iError + 1 pRow = pRows.Next() # print "++++++++++++++++++++++" # print iError HF.pGP.AddMessage(HF.s100Percent) HF.pGP.AddMessage(" ") pRows = None
def problem7(): counter = 0 i = 1 while counter < 10001: i += 1 if hf.isPrime(i): counter += 1 return i
def problem4(): biggest = 0 for i in range(100,1000): for j in range(100,1000): if hf.isPalindrome(i*j): if i*j > biggest: biggest = i*j return biggest
def problem5(): factors = mset() for i in range(21): pf = mset(hf.primeFactorization(i)) a = pf - factors factors += a b = list(factors.elements()) return reduce(lambda x,y: x*y, b)
def curse_tail(self): self.randomize_snake_colour() self.curseTail = Config.FOOD_CURSE_TIME_TO_WEAR_OFF * Config.FPS for (idx, s) in enumerate(self.segments): if idx > 0: s.set_colour(Config.BACKGROUND_COLOUR) else: s.set_colour(HelperFunctions.random_rgb())
def __init__(self, config): self.config = config self.stringSanitizer = StringSanitizer() self.gitHubUserName = config.user self.gitHubRepoName = config.repo self.github = Github(config.get('authentication', 'ghusername'), config.get('authentication', 'ghpassword')) try: # Try to open the repository self.repo = self.github.get_repo(config.user+"/"+config.repo) except UnknownObjectException: print "Repository "+config.user+"/"+config.repo+" not found." GitHubResearchDataMiner.printHowToUse() sys.exit() self.requestRateTimer = HelperFunctions.millitimestamp()
def addOfflineChallenge(self , fromUser, toUid , challengeData, offlineChallengeId=None): toUser = self.getUserByUid(toUid) offlineChallenge = OfflineChallenge() if(offlineChallengeId!=None): offlineChallenge.offlineChallengeId = offlineChallengeId else: offlineChallenge.offlineChallengeId = HelperFunctions.generateKey(10) offlineChallenge.fromUid_userChallengeIndex = fromUser.uid+"_"+str(fromUser.userChallengesIndex.index) # yeah , its a little bit funny too offlineChallenge.toUid_userChallengeIndex = toUid+"_"+str(toUser.userChallengesIndex.getAndIncrement(toUser).index) offlineChallenge.challengeData = challengeData offlineChallenge.save() return offlineChallenge
def addOfflineChallenge(self , fromUser, toUid , challengeData, offlineChallengeId=None): toUser = self.getUserByUid(toUid) offlineChallenge = OfflineChallenge() if(offlineChallengeId!=None): offlineChallenge.offlineChallengeId = offlineChallengeId else: offlineChallenge.offlineChallengeId = HelperFunctions.generateKey(10) offlineChallenge.fromUid_userChallengeIndex = fromUser.uid+"_"+str(fromUser.userChallengesIndex.index) # yeah , its a little bit funny too # f**k you , i was not funny , that was over optimization for an unreleased app !!! offlineChallenge.toUid_userChallengeIndex = toUid+"_"+str(toUser.userChallengesIndex.getAndIncrement(toUser).index) offlineChallenge.challengeData = challengeData offlineChallenge.save() return offlineChallenge
def problem12(): flag = True n = 1 i = 2 while True: number = 1 nDiv = mset(hf.primeFactorization(n)) exp = nDiv.most_common() for ex in exp: number *= ex[1] + 1 if number > 500: return n n += i i += 1
def publishFeedToUser(self,fromUid , user, _type, message, message2): f = Feed() f.fromUid = fromUid f.type = _type f.message = message if(message2!=None): f.message2 = message2 f.timestamp = HelperFunctions.toUtcTimestamp(datetime.datetime.now()) f.save() userFeed = UserFeed() userFeed.uidFeedIndex= user.uid+"_"+str(user.userFeedIndex.getAndIncrement(user).index) userFeed.feedMessage = f userFeed.save()
def check_high_t(T=6000, metal=0.0, vsini=10): filenames = [f for f in os.listdir("./") if f.endswith("smoothed.fits") and f.startswith("H")] corrdir = "Cross_correlations/" logg = 4.5 HelperFunctions.ensure_dir("Figures/") for rootfile in sorted(filenames): corrfile = "{0:s}{1:s}.{2:d}kps_{3:.1f}K{4:+.1f}{5:+.1f}".format(corrdir, rootfile.split(".fits")[0], vsini, T, logg, metal) print corrfile try: vel, corr = np.loadtxt(corrfile, unpack=True) except IOError: continue plt.plot(vel, corr, 'k-') plt.xlabel("Velocity") plt.ylabel("CCF") plt.title(rootfile.split(".fits")[0]) plt.show()
def detectFacesInImage(cvImage, detectionDebug=False): logger = logging.getLogger('ELIME.OpenCVFunctions.detectFacesInImage') width, height = cv.GetSize(cvImage) minDimension = min(width, height) # scale_factor = 1.1 # min_neighbors = 3 # flags = 0 # min_size = (20,20) arguments = [(1.1, 3, 0, (20, 20)), (1.1, 3, 0, (int(1.0 * minDimension), int(1.0 * minDimension))), (1.1, 3, 0, (int(0.7 * minDimension), int(0.7 * minDimension))), (1.1, 3, 0, (int(0.4 * minDimension), int(0.4 * minDimension))), (1.1, 3, 0, (int(0.1 * minDimension), int(0.1 * minDimension))), (1.1, 3, 0, (int(0.01 * minDimension), int(0.01 * minDimension)))] path = os.path.join(PATHTOCASCADES, 'haarcascade_frontalface_default.xml') path = HelperFunctions.checkFile(path) if path is None: logger.critical("Path to opencv haarcascades is wrong: %s", PATHTOCASCADES) sys.exit(1) print path faceCascade = cv.Load(path) storage = cv.CreateMemStorage() returnFaces = set() for (scale_factor, min_neighbors, flags, min_size) in arguments: detectedFaces = cv.HaarDetectObjects(cvImage, faceCascade, storage, scale_factor, min_neighbors, flags, min_size) debugString = '{0:d} faces found, args: {1} {2} {3} {4}'.format(len(detectedFaces), str(scale_factor), str(min_neighbors), str(flags), str(min_size)) logger.debug(debugString) for face,n in detectedFaces: returnFaces.add(face) if detectionDebug: debugFaces = [] for face,n in detectedFaces: debugFaces.append((face, cv.RGB(0, 0, 255))) UiFunctions.displayColoredRects(cvImage, debugString, debugFaces) logger.debug("returning Faces: %s", returnFaces) return returnFaces
def publishFeed(self, user, _type , message, message2=None): f = Feed() f.fromUid = user.uid f.message = message f.type = _type if(message2!=None): f.message2 = message2 f.timestamp = HelperFunctions.toUtcTimestamp(datetime.datetime.now()) f.save() #### move to tasks other server if possible for uid in user.subscribers: user = self.getUserByUid(uid) userFeed = UserFeed() userFeed.uidFeedIndex = uid+"_"+str(user.userFeedIndex.getAndIncrement(user).index) userFeed.feedMessage = f userFeed.save()
def getAllUpdates(response, user=None): isLogin = response.get_argument("isLogin",False) isFistLogin = response.get_argument("isFirstLogin",False) lastOfflineChallengeIndex = int(response.get_argument("lastOfflineChallengeIndex",0)); retObj = {"messageType":OK_UPDATES, "payload7":user.toJson(), "payload3":"["+','.join(map(lambda x:x.to_json(),dbUtils.getRecentUserFeed(user)))+"]", "payload5":"["+','.join(map(lambda x:x.to_json(),dbUtils.getUserChallenges(user , fromIndex=lastOfflineChallengeIndex)))+"]" } if(isLogin): quizzes = None categories= None badges = None userMaxQuizTimestamp = response.get_argument("maxQuizTimestamp",None) if(userMaxQuizTimestamp): userMaxQuizTimestamp = datetime.datetime.utcfromtimestamp(float(userMaxQuizTimestamp)+1) quizzes = dbUtils.getAllQuizzes(userMaxQuizTimestamp) categories = dbUtils.getAllCategories(userMaxQuizTimestamp) retObj["payload"]="["+','.join(map(lambda x:x.toJson() , quizzes ))+"]" retObj["payload1"] ="["+','.join(map(lambda x:x.toJson() , categories ))+"]" userMaxBadgesTimestamp = response.get_argument("maxBadgesTimestamp",None) if(userMaxBadgesTimestamp): userMaxBadgesTimestamp = datetime.datetime.utcfromtimestamp(max(0,float(userMaxBadgesTimestamp)+1)) badges = dbUtils.getNewBadges(userMaxBadgesTimestamp) retObj["payload2"] = "["+",".join(map(lambda x:x.toJson(),badges))+"]" retObj["payload6"]=json.dumps({server.serverId : server.addr for server in routerServer.servers.values()})#id:serveraddr if(isFistLogin): retObj["payload8"]= json.dumps(dbUtils.getPeopleWithWhomUserConversed(user)) recentMessages = None lastSeenTimestamp = response.get_argument("lastSeenTimestamp",None) if(lastSeenTimestamp): lastSeenTimestamp = datetime.datetime.utcfromtimestamp(float(lastSeenTimestamp)) recentMessages = "["+','.join(map(lambda x:x.to_json(),dbUtils.getRecentMessagesIfAny(user, lastSeenTimestamp)))+"]" retObj["payload4"] = recentMessages #unseen messages if any retObj["payload10"] = json.dumps({"serverTime":HelperFunctions.toUtcTimestamp(datetime.datetime.now())}) responseFinish(response, retObj) if(isLogin): #every time user logs in lets increment the index dbUtils.incrementLoginIndex(user)
def output_results(mDict,cDict): """ This function writes results to the output feature class. """ sField = "MaxGi" cField = "Core" # Add results field. if not properties.dcFields.has_key(sField.upper()): HF.pGP.AddField(inputs.sOutputFC, sField, "FLOAT") if not properties.dcFields.has_key(cField.upper()): HF.pGP.AddField(inputs.sOutputFC, cField, "TEXT") # Add results to output FC HF.pGP.AddMessage (HF.sWritingResults) sFieldList = properties.sFID + ";" + sField + ";" + cField pRows = HF.pGP.UpdateCursor(inputs.sOutputFC,"","",sFieldList) #pRows = pGP.UpdateCursor(inputs.sOutputFC) pRow = pRows.Next() iCnt = 0 fInterval = len(keys) / 5.0 fMore = fInterval iComplete = 20 while pRow <> None: iKey = pRow.GetValue(properties.sFID) try: if mDict [iKey]: # make sure we have a non-Null result. pRow.SetValue(sField, mDict[iKey]) pRows.UpdateRow(pRow) iCnt = iCnt + 1 if iCnt > fInterval: fInterval, iComplete = HF.check_progress(fInterval, fMore, iComplete) except: pass try: if cDict [iKey]: # make sure we have a non-Null result. pRow.SetValue(cField, cDict[iKey]) pRows.UpdateRow(pRow) except: pRow.SetValue(cField, "Outside Clusters") pRows.UpdateRow(pRow) pRow = pRows.Next() HF.pGP.AddMessage (HF.s100Percent) HF.pGP.AddMessage(" ") pRows = None return sField
def update(self): # handle all updates - ALL CODE BELOW: self.handleKeyPress() if self.userEscape: ourFont = pygame.font.SysFont('Arial', 28) text = HelperFunctions.makeTextRect('Paused. Score: ' + str(self.gameScore) + '. Press q to quit, ESC to unpause.', (0,255,0), (400, 300), Config.screen, ourFont, True) # draw a rectangle big enough for the text background pygame.draw.rect(Config.screen, Config.BACKGROUND_COLOUR, (text.x, text.y, text.width, text.height), 1) pygame.display.update([text]) return self.clock.tick(Config.FPS) self.handleCollisions() self.handleExpiry() self.handleRandoms() ### ---------------- NEEDS TO BE SORTED OUT -------------- # while >=0 aka not active if self.freezeActiveBallsTimer > 0: self.freezeActiveBallsTimer -= Config.FPS if self.gameOver == False: movement = self.snake.move(Config.DEFAULT_SCREEN_SIZE[0], Config.DEFAULT_SCREEN_SIZE[1]) if movement != False: self.handleUpdates() else: self.gameOver = True self.exitGame() if ((self.gameOver == True) or (self.userEscape == True)): self.exitGame()
def generateProgressiveQuiz(quizId , uids): quiz = dbUtils.getQuizDetails(quizId) if(quizId): nQuestions = quiz.nQuestions else: nQuestions = 7 questions = dbUtils.getRandomQuestions(quiz) id = HelperFunctions.generateKey(10) userStates={} for i in uids: userStates[i]={} runningQuizes[id] = quizState = { QUESTIONS: questions, CURRENT_QUESTION :0, N_CURRENT_QUESTION_ANSWERED:[], USERS:userStates,##{uid:something} CREATED_AT:datetime.datetime.now(), POINTS:{}, N_CURRENT_REMATCH_REQUEST:set(), N_CURRENT_USERS_READY:set() } return id , quizState
def initAppConfig(response , user=None): responseFinish(response,{"messageType":OK, "payload1":json.dumps({"serverTime":HelperFunctions.toUtcTimestamp(datetime.datetime.now())})})
def randomize_snake_colour(self): for s in self.segments: s.set_colour(HelperFunctions.random_rgb())
def output_results(Cluster): """ This function writes results to the output feature class. """ # Add results field. HF.pGP.AddMessage ("--------" + getCurTime() + "--------") HF.pGP.AddMessage ("Adding results fields...") addField = [] for Field in inputs.sZField: if len(Field) > 8: Field = Field[0:8] #Field = Field + "_N" addField.append(Field + "_N") if inputs.sXY == "Yes": addField.append("X_cent_N") addField.append("Y_cent_N") for Field in addField: if not properties.dcFields.has_key(Field.upper()): HF.pGP.AddField(inputs.sOutputFC, Field, "FLOAT") outField = "CLUSTER" if not properties.dcFields.has_key(outField.upper()): HF.pGP.AddField(inputs.sOutputFC, outField, "LONG") else: outField = "CLUSTER_1" if not properties.dcFields.has_key(outField.upper()): HF.pGP.AddField(inputs.sOutputFC, outField, "LONG") #addField.append("CLUSTER") # Add results to output FC HF.pGP.AddMessage ("--------" + getCurTime() + "--------") HF.pGP.AddMessage (HF.sWritingResults) pRows = HF.pGP.UpdateCursor(inputs.sOutputFC) pRow = pRows.Next() iError = 0 iCnt = 0 fInterval = len(attri) / 5.0 fMore = fInterval iComplete = 20 while pRow <> None: iKey = pRow.GetValue("FID") #print iKey try: if Cluster[iKey]: # make sure we have a non-Null result. #since the type of Cluster[iKey] is numpy.int32, it should convert to [int] for Field in addField: iIndex = addField.index(Field) temp = float(clusterAttri_Norm[iKey, iIndex]) pRow.SetValue(Field, temp) temp = int(Cluster[iKey]) pRow.SetValue(outField, temp) pRows.UpdateRow(pRow) iCnt = iCnt + 1 if iCnt > fInterval: fInterval, iComplete = HF.check_progress(fInterval, fMore, iComplete) #except: pass except: iError = iError + 1 pRow = pRows.Next() #print "++++++++++++++++++++++" print iError HF.pGP.AddMessage (HF.s100Percent) HF.pGP.AddMessage(" ") pRows = None
#if iNumRecs < 30: # HF.pGP.AddWarning (msgFewRecsWrn) if HF.pGP.exists(inputs.sOutputFC): HF.pGP.delete(inputs.sOutputFC) #Copy the input feature class to the output feature class. try: HF.pGP.QualifiedFieldNames = 0 HF.pGP.Copyfeatures(inputs.sInputFC, inputs.sOutputFC) except: sMessage = HF.msgOverwriteErr % (inputs.sOutputFC) raise HF.ReportError (sMessage) properties = HF.get_featureclass_properties(inputs.sOutputFC) attri = build_value_lists() #HF.pGP.AddMessage ("--------build_value_lists finished--------") clusterAttri = attri[:,1:] #print clusterAttri clusterAttri_Norm = normalization(clusterAttri) #print "----------------------" #print clusterAttri_Norm #KData is the clusterAttri plus the sqrt of weight
def main(): configTemplate = """ [ELIME] # dbFile - The file path to where your eye position database will be # stored dbFile = ~/Documents/ELIME Project/Database/eyepositions.db # sourceFolder - The folder where ELIME's pre(process) command will find # your unrenamed digital cameras photos sourceFolder = ~/Documents/ELIME Project/Drop Regular Files/ # prefix - The prefix ELIME's pre(process) command will prepend to your # photo's creation date to create the new filename prefix = christoph # delete - If ELIME should move (and not copy) your photos while renaming # from sourceFolder to photoFolder delete = true # photoFolder - The folder where all your (preprocessed) daily photos # savely and permanently are stored. The names of the photos in that # folder get stored in the eye position database. photoFolder = ~/Documents/ELIME Project/Photo Storage/ # targetFolder - The folder where the rendered (scaled and roated) images # that make up the frames of your project's video get saved. Must be # different from photoFolder for "security reasons" (tm) targetFolder = ~/Documents/ELIME Project/temp/ # maxSize - The maximum x or y of the image's dimensions on which ELIME # will automatically detect eye positions and show in window. Do not go # over 1024! The final size of the rendered images is completey # independent from this! maxSize = 1024 # posDebug - Draws a colored pixel at the the eyes' positions in the rendered # output images. posDebug = false # detectionDebug - Shows all detected eyes and faces before manual fine # control. detectionDebug = false # openCVHaarcascadesFolder - Path to where your opencv installation's # haarcascades reside. openCVHaarcascadesFolder = /usr/local/opt/opencv/share/OpenCV/haarcascades/ """ defaultConfigPath = os.path.expanduser('~/.ELIME.cfg') defaultValues = {'delete': 'false', 'maxSize': '1024', 'prefix': 'elime', 'posDebug': 'false', 'detectionDebug': 'false', 'openCVHaarcascadesFolder': '/usr/local/opt/opencv/share/OpenCV/haarcascades/'} conf_parser = argparse.ArgumentParser(add_help=False) conf_parser.add_argument("-c", "--conf", help="Use config file not located in '~/.ELIME.cfg' (which is the default path for ELIME's config file)", metavar="FILE") conf_parser.add_argument("-cc", "--createConf", action='store_true', help="Create new config file from config file template") args, remainingArgv = conf_parser.parse_known_args() if args.conf: defaultConfigPath = args.confFile if args.createConf: if os.path.exists(defaultConfigPath): print "File exists:", defaultConfigPath, "will not overwrite! Exit." sys.exit(1) with open(defaultConfigPath, 'wb') as configfile: configfile.write(textwrap.dedent(configTemplate)) print "Created config file template at", defaultConfigPath, "Go now and customize it! ELIME's waiting here." sys.exit(0) if os.path.exists(defaultConfigPath): config = ConfigParser.SafeConfigParser(defaults=defaultValues, allow_no_value=True) config.read([defaultConfigPath]) if not config.has_section('ELIME'): print "The config file at", defaultConfigPath, "is not a valid ELIME config file. No 'ELIME' section found. Exit." sys.exit(1) # print config.items('ELIME') if config.has_option('ELIME', 'dbFile'): defaultValues['dbFile'] = config.get('ELIME', 'dbFile') if config.has_option('ELIME', 'sourceFolder'): defaultValues['sourceFolder'] = config.get('ELIME', 'sourceFolder') if config.has_option('ELIME', 'prefix'): defaultValues['prefix'] = config.get('ELIME', 'prefix') if config.has_option('ELIME', 'delete'): defaultValues['delete'] = config.getboolean('ELIME', 'delete') if config.has_option('ELIME', 'photoFolder'): defaultValues['photoFolder'] = config.get('ELIME', 'photoFolder') if config.has_option('ELIME', 'targetFolder'): defaultValues['targetFolder'] = config.get('ELIME', 'targetFolder') if config.has_option('ELIME', 'maxSize'): defaultValues['maxSize'] = config.getint('ELIME', 'maxSize') if config.has_option('ELIME', 'posDebug'): defaultValues['posDebug'] = config.getboolean('ELIME', 'posDebug') if config.has_option('ELIME', 'detectionDebug'): defaultValues['detectionDebug'] = config.getboolean('ELIME', 'detectionDebug') if config.has_option('ELIME', 'openCVHaarcascadesFolder'): defaultValues['openCVHaarcascadesFolder'] = config.get('ELIME', 'openCVHaarcascadesFolder') #print defaultValues if not isinstance(defaultValues['delete'], bool): defaultValues['delete'] = defaultValues['delete'] in ['true', 'True'] if not isinstance(defaultValues['posDebug'], bool): defaultValues['posDebug'] = defaultValues['posDebug'] in ['true', 'True'] if not isinstance(defaultValues['detectionDebug'], bool): defaultValues['detectionDebug'] = defaultValues['detectionDebug'] in ['true', 'True'] if not isinstance(defaultValues['maxSize'], int): defaultValues['maxSize'] = int(defaultValues['maxSize']) # print defaultValues parser = argparse.ArgumentParser(parents=[conf_parser], description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter, epilog = "Everyday, look into my eyes!") parser.set_defaults(**defaultValues) # main parser parser.add_argument('--logFile', help='To enable log to file specify path of logfile') subparsers = parser.add_subparsers(dest='subparser_name') # create the parser for the "pre" command parser_pre = subparsers.add_parser('pre', help='Tries to determines your photos creation date and renames and moves your photos to the permanent photo folder.') parser_pre.add_argument('-sF', '--sourceFolder', help="The folder where ELIME's pre(process) command will find your unrenamed digital cameras photos") parser_pre.add_argument('-pF', '--photoFolder', help='The folder where all your (preprocessed) daily photos savely and permanently are stored. The names of the photos in that folder get stored in the eye position database.') parser_pre.add_argument('-p', '--prefix', help="The prefix ELIME's pre(process) command will prepend to your photo's creation date to create the new filename") parser_pre.add_argument('-d', '--delete', action='store_true', help='If ELIME should move (and not copy) your photos while renaming from sourceFolder to photoFolder') parser_pre.add_argument('-mS', '--maxSize', type=int, help="The maximum x or y of the image's dimensions on which ELIME will automatically detect eye positions and show in window. Do not go over 1024! The final size of the rendered images is completey independent from this!") parser_pre.set_defaults(func=preProcessImageFiles) # the lines in the subparsers like the next line was not needed before. Just a quick hack. Might be not the optimal solution for why it suddenly does not work anymore without. parser_pre.set_defaults(**defaultValues) # create the parser for the "add" command parser_add = subparsers.add_parser('add', help='"Automagically" detects your eyes in your photos from the photoFolder, lets you do fine adjustments and saves eye locations to database file.') parser_add.add_argument('-pF', '--photoFolder', help='The folder where all your (preprocessed) daily photos savely and permanently are stored. The names of the photos in that folder get stored in the eye position database.') parser_add.add_argument('-dF', '--dbFile', help='The file path to where your eye position database will be stored') parser_add.add_argument('-mS', '--maxSize', type=int, help="The maximum x or y of the image's dimensions on which ELIME will automatically detect eye positions and show in window. Do not go over 1024! The final size of the rendered images is completey independent from this!") parser_add.add_argument('--detectionDebug', action='store_true', help="Shows all detected eyes and faces before manual fine control.") parser_add.add_argument('-oF', '--openCVHaarcascadesFolder', help="Path to where your opencv installation's haarcascades reside.") parser_add.set_defaults(func=addMissingEyeData) parser_add.set_defaults(**defaultValues) # create the parser for the "check" command parser_check = subparsers.add_parser('check', help='If you want to correct saved eye positions in database, here you can.') parser_check.add_argument('-pF', '--photoFolder', help='The folder where all your (preprocessed) daily photos savely and permanently are stored. The names of the photos in that folder get stored in the eye position database.') parser_check.add_argument('-dF', '--dbFile', help='The file path to where your eye position database are be stored') parser_check.add_argument('-mS', '--maxSize', type=int, help="The maximum x or y of the image's dimensions on which ELIME will automatically detect eye positions and show in window. Do not go over 1024! The final size of the rendered images is completey independent from this!") parser_check.add_argument('beginWith', nargs='*', help='Filename to begin with checking.') parser_check.set_defaults(func=checkEyeData) parser_check.set_defaults(**defaultValues) # create the parser for the "tidy" command parser_tidy = subparsers.add_parser('tidy', help='Did you delete photos from your photoFolder? Run tidy to tidy the eyeposition database from deleted pictures.') parser_tidy.add_argument('-pF', '--photoFolder', help='The folder where all your (preprocessed) daily photos savely and permanently are stored. The names of the photos in that folder get stored in the eye position database.') parser_tidy.add_argument('-dF', '--dbFile', help='The file path to where your eye position database are be stored') parser_tidy.set_defaults(func=tidyDB) parser_tidy.set_defaults(**defaultValues) # create the parser for the "render" command parser_render = subparsers.add_parser('render', help='Render your photos - scaled, moved and roated based on your eye positions stored in database into JPGs for further processing.') parser_render.add_argument('-pF', '--photoFolder', help='The folder where all your (preprocessed) daily photos savely and permanently are stored. The names of the photos in that folder get stored in the eye position database.') parser_render.add_argument('-dF', '--dbFile', help='The file path to where your eye position database are be stored') parser_render.add_argument('-tF', '--targetFolder', help="The folder where the rendered (scaled and roated) images that make up the frames of your project's video get saved. Must be different from photoFolder for 'security reasons' (tm)") parser_render.add_argument('--posDebug', action='store_true', help="Draws a colored pixel at the the eyes' positions in the rendered output images") parser_render.set_defaults(func=renderPhotos) parser_render.set_defaults(**defaultValues) #print parser_pre.get_default("sourceFolder") #print remainingArgv args = parser.parse_args(remainingArgv) #print args args.logFile = HelperFunctions.checkFile(args.logFile) setupLogging(logFile=args.logFile) if args.func == preProcessImageFiles: args.sourceFolder = HelperFunctions.checkFolder(args.sourceFolder) args.photoFolder = HelperFunctions.checkFolder(args.photoFolder) args.func(args.sourceFolder, args.photoFolder, args.prefix, args.delete) if args.func == addMissingEyeData: args.openCVHaarcascadesFolder = HelperFunctions.checkFolder(args.openCVHaarcascadesFolder) OpenCvFunctions.PATHTOCASCADES = args.openCVHaarcascadesFolder args.photoFolder = HelperFunctions.checkFolder(args.photoFolder) args.dbFile = HelperFunctions.checkFile(args.dbFile) args.func(args.photoFolder, args.dbFile, args.maxSize, detectionDebug=args.detectionDebug) if args.func == checkEyeData: args.photoFolder = HelperFunctions.checkFolder(args.photoFolder) args.dbFile = HelperFunctions.checkFile(args.dbFile) args.func(args.photoFolder, args.dbFile, args.beginWith, args.maxSize) if args.func == tidyDB: args.photoFolder = HelperFunctions.checkFolder(args.photoFolder) args.dbFile = HelperFunctions.checkFile(args.dbFile) args.func(args.photoFolder, args.dbFile) if args.func == renderPhotos: args.photoFolder = HelperFunctions.checkFolder(args.photoFolder) args.dbFile = HelperFunctions.checkFile(args.dbFile) args.targetFolder = HelperFunctions.checkFolder(args.targetFolder) args.func(args.photoFolder, args.targetFolder, args.dbFile, posDebug=args.posDebug) sys.exit(0)
def addMissingEyeData(srcPath, dbPath, maxDimension=1024, detectionDebug=False, zoomSize=640, customDateFormat=''): """Add eye postions of photos not yet in database to database""" logger = logging.getLogger('ELIME.addToDB') if dbPath is None: logger.error("dbPath is invalid") return if not os.path.exists(dbPath): logger.info("No Database file at %s ,yet.", dbPath) if srcPath is None: logger.error("srcPath is not valid") return logger.debug("Preparing database tables...") # create database if it does not exist yet DatabaseFunctions.prepareDataBaseTable(dbPath) # connect to database file conn = sqlite3.connect(dbPath, detect_types=sqlite3.PARSE_DECLTYPES) c = conn.cursor() # get all jpgs in source directory srcFiles = [ f for f in os.listdir(srcPath) if os.path.isfile(os.path.join(srcPath,f)) ] srcPhotos = filter(HelperFunctions.filefilter, srcFiles) numPhotos = len(srcPhotos) if numPhotos == 0: logger.warning("No photos found in source path %s", srcpath) return # get the number of pictures already in the database numAllDBPhotos = DatabaseFunctions.numberOfPhotosInDB(c) # simple consistency check on database: are there at least as many pictures in db as in # source path? if numPhotos < numAllDBPhotos: logger.warning("There are just %d photos in source path %s, but %d photos in database %s", numPhotos, srcPAth, numAllDBPhotos, dbPath) logger.warning("Please run a database tidy before, if you know what you are doing!") return # step through all pictures in sourcepath for inputImageFileName in srcPhotos: logger.debug("Image name: %s", inputImageFileName) inputImageFilePath = os.path.join(srcPath, inputImageFileName) # get picture's creation date and time photoDateTime = ImageFunctions.getCreationDateTimeOfPicture(inputImageFilePath, customDateFormat) # check if photo is already and database c.execute('''SELECT * FROM eyesInPhotos WHERE photoFileName=?''',(inputImageFileName,)) dbPhotos = c.fetchall() numDBPhotos = len(dbPhotos) if numDBPhotos == 0 or (numDBPhotos == 1 and ((dbPhotos[0][3] is None) or (dbPhotos[0][4] is None) or (dbPhotos[0][5] is None) or (dbPhotos[0][6] is None))): if numDBPhotos == 0: # the picture with this filename is not in database yet logger.info("Photo %s not in database yet", inputImageFileName) if numDBPhotos == 1: # there is one picture with the filename but data is incomplete logger.info("Eye info for photo %s in db incomplete (%d,%d), (%d,%d)", inputImageFileName, dbPhotos[0][3], dbPhotos[0][4], dbPhotos[0][5], dbPhotos[0][6]) # find eye positions and add everything to database # create a opencv image from PIL image pilImage = ImageFunctions.loadAndTransposePILImage(inputImageFilePath) cvImage = ImageFunctions.convertPIL2CV(pilImage) # get the image size size = cv.GetSize(cvImage) # create scaling factor for too large images maxDimension = float(maxDimension) scale = 1.0 if size[0] > maxDimension or size[1] > maxDimension: scale = max(size[0]/maxDimension, size[1]/maxDimension) logger.debug("Image scale factor is %f", scale) newSize = ( int(size[0] / scale), int (size[1] / scale) ) # create a scaled down version of the original picture scaledImage = cv.CreateImage(newSize, cvImage.depth, cvImage.nChannels) cv.Resize(cvImage, scaledImage) # find eye coordinates in scaled picture automatically scaledEyeRects = OpenCvFunctions.eyeRectsInImage(scaledImage, inputImageFileName, detectionDebug) logger.debug("Scaled eye rectangles detected %s", scaledEyeRects) scaledEyeCoordinates = [] for scaledEyeRect in scaledEyeRects: scaledEyeCoordinates.append(HelperFunctions.middleOfRect(scaledEyeRect)) logger.debug("Scaled eye positions detected %s", scaledEyeCoordinates) # manually adjust eye positions in scaled image scaledEyeCoordinates = UiFunctions.manuallyAdjustEyePositions(scaledImage, inputImageFileName, scaledEyeCoordinates) logger.debug("Scaled eye positions manually corrected %s", scaledEyeCoordinates) eyeCoordinates = [] # scale back eye position to original sized image for eyeIndex, scaledEyePos in enumerate(scaledEyeCoordinates): (sx, sy) = scaledEyePos (eyecenterX, eyecenterY) = (int(sx * scale), int(sy * scale)) logger.debug("True eye position of eye %d before manual correction %s", eyeIndex, (eyecenterX, eyecenterY)) (x, y) = UiFunctions.manuallyDetailAdjustEyePosition(inputImageFileName, eyeIndex, cvImage, eyecenterX, eyecenterY, zoomSize) logger.debug("True eye position of eye %d after manual correction %s", eyeIndex, (x, y)) eyeCoordinates.append((x, y)) # save everything to database middleLeftEye = eyeCoordinates[0] middleRightEye = eyeCoordinates[1] if len(dbPhotos) == 0: # create new entry in db logger.debug("Executing: 'INSERT INTO eyesInPhotos (photoFileName, date, lEyeX, lEyeY, rEyeX, rEyeY) VALUES (%s, %s, %d, %d, %d, %d)'", inputImageFileName, photoDateTime, middleLeftEye[0], middleLeftEye[1], middleRightEye[0], middleRightEye[1]) c.execute('INSERT INTO eyesInPhotos (photoFileName, date, lEyeX, lEyeY, rEyeX, rEyeY) VALUES (?, ?, ?, ?, ?, ?)', (inputImageFileName, photoDateTime, middleLeftEye[0], middleLeftEye[1], middleRightEye[0], middleRightEye[1])) else: # update entry in database logger.debug("Executing: 'UPDATE eyesInPhotos SET lEyeX=%d, lEyeY=%d, rEyeX=%d, rEyeY=%d WHERE photoFileName=%s'", middleLeftEye[0], middleLeftEye[1], middleRightEye[0], middleRightEye[1], inputImageFileNam) c.execute('UPDATE eyesInPhotos SET lEyeX=?, lEyeY=?, rEyeX=?, rEyeY=? WHERE photoFileName=?', (middleLeftEye[0], middleLeftEye[1], middleRightEye[0], middleRightEye[1], inputImageFileName)) conn.commit() # we found the image in the database with complete data or there are more than 1 image else: if numDBPhotos > 1: logger.critical("Database in bad shape. Found %d occurences of photo named %s", numDBPhotos, inputImageFileName) conn.close() sys.exit(1) else: logger.info("Photo %s already in db", inputImageFileName) newNumAllDBPhotos = DatabaseFunctions.numberOfPhotosInDB(c) logger.info("Added %d photos with eyeinfo to database %s", newNumAllDBPhotos - numAllDBPhotos, dbPath) conn.close()
spt_full = data.SpectralType().split()[0] spt = spt_full[0] + re.search(r'\d*\.?\d*', spt_full[1:]).group() d = {'Object': object, 'plx': plx, 'SpT': spt, 'exptime': header['exptime']} return d if __name__ == '__main__': scale = True early, late = parse_input(sys.argv[1:]) # Add each late file to all of the early-type files HelperFunctions.ensure_dir('GeneratedObservations') for late_file in late: for early_file in early: outfilename = 'GeneratedObservations/{}_{}.fits'.format(early_file.split('/')[-1].split( '.fits')[0], late_file.split('/')[-1].split('.fits')[0]) if scale: outfilename = outfilename.replace('.fits', '_scalex10.fits') if outfilename.split('/')[-1] in os.listdir('GeneratedObservations/'): print "File already generated. Skipping {}".format(outfilename) continue total, early_dict, late_dict = combine(early_file, late_file, increase_scale=scale) # Prepare for output column_list = [] for order in total: