def prewittEdgeDetector(self,img,shape=(5,5)): filter = Filters() op = Operations() sx = filter.prewittKernel(shape,axis=0) dx = op.doConvolution(sx,img) sy = filter.prewittKernel(shape,axis=1) dy = op.doConvolution(sy,img) delta = np.power(np.add(np.power(dx,2),np.power(dy,2)),0.5) return delta
def test_clean_string_column(df): """ Tests operations.clean_string_column . Args: :param df: DataFrame object from fixture. """ ops = Operations() df =ops.clean_string_column(df, 'pagename') assert df.where(df['pageviews']==60).select("pagename").first()['pagename'] == "john person s first 100 days"
class Backup(object): def __init__(self, backup_disk, source_dir = None, backup_sub_dir = ''): assert(isinstance(backup_disk, DiskInfo)) if not backup_disk.is_mounted(): raise Exception("Drive %s is not mounted"%backup_disk.dev_file()) if not backup_disk.mount_path(): raise Exception("Could not determine mount path of backup drive %s"%backup_disk.dev_file()) self.backup_disk = backup_disk self.source_dir = source_dir self.backup_base_dir = os.path.join(backup_disk.mount_path(), backup_sub_dir) self.latest_dir = os.path.join(self.backup_base_dir, 'latest') self.op = Operations() def can_backup(self, dinfo): #has_backup_drive #backup_base_dir_ok #subvol_ok #volume_mounted return True def prepare_backup(self): pass def do_backup(self): logger.info("* dryrun") lines = 0 ok, ret = self.op.sync_dryrun(self.source_dir, self.latest_dir) if ok: lines = ret else: raise PrepareException() logger.info("* sync") ret = self.op.sync_data(self.source_dir, self.latest_dir, lambda x: progress(lines, x)) if not ret: raise SyncException() logger.info("* snapshot") destname = "backup_%d"%int(time.time()) dest_dir = os.path.join(self.backup_base_dir, destname) ret = self.op.create_snapshot(self.latest_dir, dest_dir) if not ret: raise FinalizeException() def delete_backup(self, unix_time): voldir = "backup_%d"%int(unix_time) logger.info("* deleting snapshot %s", voldir) voldir = os.path.join(self.backup_base_dir, voldir) ret = self.op.delete_snapshot(voldir)
def test_append_tokens(df): """ Tests operations.append_tokens. Args: :param df: DataFrame object from fixture. """ ops = Operations() df =ops.clean_string_column(df, 'pagename') df = ops.append_tokens(df) #The stop word 'first' is removed # see for a list of stop words: http://ir.dcs.gla.ac.uk/resources/linguistic_utils/stop_words assert df.where(df['pageviews']==60).select("tokens").first()['tokens'] == ['john', 'person', '100', 'day']
def run(self): if self._args.device == '?': parts = Parts() parts = [part.getName() for part in parts.list()] parts.sort() print(parts) return 0 try: part = Parts().getPartByName(self._args.device) if not self._args.hardware is None: hw = sef._args.hardware else: hw = part.listHardware() if len(hw) != 1: raise PrgError("Cannot determine hardware select one of: %s" % hw) hw = hw[0] io = self._getIOByHardwareName(hw) self._operations = Operations(part, io, self._args.sync) return self._doOperations() except PgmError as e: print(e) return 1
def run(self): self.program_scanner = ProgramScanner(self.program_memory, self.registers) self.ops = Operations(self.program_scanner, self.memory, self.registers) while self.running: opcode = self.program_scanner.nextOpcode() op = self.ops.fromCode(opcode) op() if self.registers.pc > self.last_op_index: self.running = False
def load_operations(self): ''' Args: Returns: Raises: ''' self.ops = Operations(self) self.vert_headers = ["S1", "S2", "Change", "Goal"] self.horiz_headers = self.ops.get_ops("table").keys() self.horiz_headers.sort()
def df(spark_context, hive_context): """ Fixture for creative a test dataframe. Args: :param spark_context: SparkContext object from fixture. :param hive_context: HiveContext object from fixture. Returns: :return: DataFrame object. """ input = ['ace Beubiri 10 12744', 'ace Bhutan 20 31284', 'ace Bireu%c3%abn 30 20356', 'ace Bireuen 40 20347', 'ace Bishkek 50 14665', 'ace John_Person%27s_first_100_days 60 14576', 'ace Bolivia 70 32058', 'ace Bosnia_H%c3%a8rz%c3%a8govina 80 38777'] rdd = spark_context.parallelize(input) ops = Operations() df = ops.create_dataframe(rdd, hive_context) return df
def __init__(self, backup_disk, source_dir = None, backup_sub_dir = ''): assert(isinstance(backup_disk, DiskInfo)) if not backup_disk.is_mounted(): raise Exception("Drive %s is not mounted"%backup_disk.dev_file()) if not backup_disk.mount_path(): raise Exception("Could not determine mount path of backup drive %s"%backup_disk.dev_file()) self.backup_disk = backup_disk self.source_dir = source_dir self.backup_base_dir = os.path.join(backup_disk.mount_path(), backup_sub_dir) self.latest_dir = os.path.join(self.backup_base_dir, 'latest') self.op = Operations()
class Processor(object): def __init__(self, debug=False): self.name = "Untitled Program" # Initialize critical elements: registers, memory, etc self.debug = debug self.program_memory = bytearray()#[] self.memory = bytearray() self.registers = Registers() self.running = True def load(self, code, name=None): if name is not None: self.name = name get_bytes = lambda x: [code.pop(0) for i in range(x)] # Check file header self.check_header(get_bytes(3)) data_size = pack_bytes(get_bytes(2)) for i in range(0, data_size): self.memory.append(code.pop(0)) self.program_memory.extend(code) self.last_op_index = len(self.program_memory) - 1 def load_from_file(self, filename): with open(filename, "rb") as f: self.load(f.read(), name=filename) def check_header(self, header): if bytearray(header) != bytearray(b'\x84\x56\x49'): raise exp.InvalidFile def run(self): self.program_scanner = ProgramScanner(self.program_memory, self.registers) self.ops = Operations(self.program_scanner, self.memory, self.registers) while self.running: opcode = self.program_scanner.nextOpcode() op = self.ops.fromCode(opcode) op() if self.registers.pc > self.last_op_index: self.running = False def __repr__(self): string = "\tMain Memory:\t%s\n" % str(self.memory) string += "\tProgram Memory:\t%s\n" % str(self.program_memory) string += "\tRegisters:\t%s\n" % str(self.registers) return string
def get_all_username_and_score(start_datetime, end_datetime, count=20): result = list() usernames = Games.get_all_username(start_datetime, end_datetime) for username in usernames: temp = dict() temp["username"] = username["username"][:3] + "****" + username["username"][7:] u = Users.get_user(username["username"]) if u is not None: temp["user_id"] = u.id else: temp["user_id"] = 0 temp["score"] = Games.get_max_score(username["username"], start_datetime, end_datetime) result.append(temp) result = Operations.sort_list_with_dict(result, "score", True) # before = 0 ranking = 1 index = 1 for r in result: if index > count: break # if r["score"] == before: # r["ranking"] = ranking # reward_node_info = GameDayRewards.get_some_node_info(ranking) # if reward_node_info: # r["reward"] = reward_node_info.get("text") # else: # r["reward"] = 0 # index += 1 # continue # else: # before = r["score"] ranking = index r["ranking"] = ranking reward_node_info = GameDayRewards.get_some_node_info(ranking) if reward_node_info: r["reward"] = reward_node_info.get("text") else: r["reward"] = 0 index += 1 return result
import os from operations import Operations print("Calculator in python") print("\nChoice operation ") exit = 0 while(exit == 0): print("1 - Addition\n2 - Subtraction\n3 - Multiplication\n4 - Division\n5 - root\n6 - potentiation") operator = int(input("Operator: ")) value1 = float(input("\nValue 1: ")) value2 = float(input("Value 2: ")) operation = Operations(value1, value2) if(operator == 1): operation.setAddition() elif(operator == 2): operation.setSubtraction() elif(operator == 3): operation.setMultiplication() elif(operator == 4): operation.setDivision() elif(operator == 5): operation.setRoot() elif(operator == 6): operation.setPotentiation() #os.system('cls' if os.name == 'nt' else 'clear')
def test_operation_addition(self): operation = Operations(100, 100) self.assertEqual(operation.setAddition(), 200, 'Incorrect result')
def test_operation_division(self): operation = Operations(100,100) self.assertEqual(operation.division(), 1, 'Incorrect result')
def test_operation_root(self): operation = Operations(100,1) self.assertEqual(operation.root(), 10, 'Incorrect result')
else: sc = SparkContext(appName="wikistats") lines = sc.textFile("s3n://my.wiki.bucket.com/wikidata") sc._jsc.hadoopConfiguration().set("fs.s3n.impl", "org.apache.hadoop.fs.s3native.NativeS3FileSystem") sc._jsc.hadoopConfiguration().set("fs.s3n.awsAccessKeyId", "###") sc._jsc.hadoopConfiguration().set("fs.s3n.awsSecretAccessKey", "#####") sqlContext = HiveContext(sc) from operations import Operations ops = Operations() #Create the dataframe from the lines RDD df = ops.create_dataframe(lines, sqlContext) #Clean the 'pagename' column of encoded characters df = ops.clean_string_column(df, 'pagename') #Add columns for hour, day, month, year from the file name df = ops.append_date_columns(df) #Group by timeframes hour_df, day_df, month_df, year_df = ops.aggregate_times(df) #Create tokens from the pagename hour_df = ops.append_tokens(hour_df) #Add term frequency and inverse document frequency hour_df = ops.append_tf_idf(hour_df) #Create ranking hour_df, day_df, month_df, year_df = ops.append_ranks(hour_df, day_df, month_df, year_df)
def test_operation_subtraction(self): operation = Operations(100, 100) self.assertEqual(operation.subtraction(), 0, 'Incorrect result')
def test_must_add_value_on_credit(self): op = Operations("accounts.csv") op.accounts = {"1":2359415, "2":335498, "3": -5579741} #mocking data original_value = op.accounts["1"] op.make_operation("1", 100) self.assertEquals(op.accounts["1"], long(original_value + 100))
def test_should_burst_exceptions_if_try_execute_with_number_of_operations_less_than_to_0( ): book = Book(Operations(Fifo()), 4) with raises(ValueError, match='Exceeded number of operations allowed'): for x in range(0, 10): book.add_operation('3')
def test_must_subtract_value_on_debit(self): op = Operations("accounts.csv") op.accounts = {"1":2359415, "2":335498, "3": -5579741} #mocking data original_value = op.accounts["2"] op.make_operation("2", -200) self.assertEquals(op.accounts["2"], long(original_value - 200))
def test_must_apply_tax_on_negative_account(self): op = Operations("accounts.csv") op.accounts = {"1":2359415, "2":335498, "3": -5579741} #mocking data original_value = op.accounts["3"] op.make_operation("3", -300) self.assertEquals(op.accounts["3"], long(original_value -800))
action='store_true', help="Enable full multi-page animated PDF.") parser.add_argument('-g', '--graphs', action='store_true', help="Enable addition of tree image to data output.") parser.add_argument('-c', '--clean_off', action='store_true', help="Disable cleanup of temporary files.") args = parser.parse_args() ops = Operations() for file in args.operations: ops.read_ops(file) algs = ['simple', 'rb', 'splay', 'avl', 'wavl', 'tango', 'static'] algo = "" algarg = args.algorithm.lower().replace(' ', '') if algarg in {'simple', 'bst', 'simplebst'}: algo = algs[0] elif algarg in {'rb', 'redblack', 'redblacktree'}: algo = algs[1]
def main(): while True: user_operation = GetNumbers() operator = user_operation.get_operator() get_x = GetNumbers() x = get_x.get_x() get_y = GetNumbers() y = get_y.get_y() if operator == 'a': add = Operations(x, y) add_result = round(add.addition(), 2) add_print = PrintResults(add_result) print(add_print.printed_string()) print() elif operator == 's': subtract = Operations(x, y) sub_result = round(subtract.subtraction(), 2) sub_print = PrintResults(sub_result) print(sub_print.printed_string()) print() elif operator == 'm': multiply = Operations(x, y) multiply_result = round(multiply.multiplication(), 2) multiply_print = PrintResults(multiply_result) print(multiply_print.printed_string()) print() elif operator == 'd': divide = Operations(x, y) try: div_result = round(divide.division(), 2) div_print = PrintResults(div_result) print(div_print.printed_string()) except TypeError: print(f"You can't divide zero by zero, that's just ludicrous!") print()
else: cmd = "help" # Clients: Compute, Blockstorage, Database, Identity, Load Balancer cp = oci.core.compute_client.ComputeClient( config ) bs = oci.core.blockstorage_client.BlockstorageClient( config ) vn = oci.core.virtual_network_client.VirtualNetworkClient( config ) db = oci.database.database_client.DatabaseClient( config ) fs = oci.file_storage.FileStorageClient( config ) id = oci.identity.IdentityClient( config ) lb = oci.load_balancer.LoadBalancerClient( config ) os = oci.object_storage.ObjectStorageClient( config ) o = Operations( cp, bs, vn ) if cmd == "auth": print( config ) print( OCI_SSH_KEY_PUB ) print( dir( cp ) ) elif cmd == "objectStorageBuckets": nameSpace = sys.argv[3] buckets = os.list_buckets( namespace_name=nameSpace, compartment_id=config["compartment_id"] ).data for i in buckets: out = ", ".join( map( str, [ i.name, "..."+i.etag[-6:] ] ) ) print( out ) elif cmd == "objectStorageObjects": nameSpace = sys.argv[3]
def create_jobs(): for link in Operations.file_to_set(QUEUE_FILE): queue.put(link) queue.join() crawl()
if (right == 'C'): print("displaying data for calls for %s" % (row["ticker"])) if (right == 'P'): print("displaying data for puts") for strike_index, row_ in temp_df.iterrows(): if (row_["delta"] == None): continue if (strike_index in error_list or abs(row_["delta"]) <= min_buy_wing_delta or abs(row_["delta"]) > .2): temp_df.drop(strike_index, inplace=True) for strike_index, row_ in temp_df.iterrows(): print(strike_index, row_) #at this point, should have all data for calls for this underlying. Do analysis/select strikes and determine number of contracts here before moving on to next underlying #sell leg rules: gamma less than .046, vega greater than .040, sub 20 delta, sub 20 theta (this can wiggle but must be sub 20 delta) send_order = Operations(temp_df, exp, account_size) spreads_dict = send_order.checkCriteria() #stop at first spread that satisfies margin impact print("looking for spreads") print(row["ticker"]) sell_leg, buy_leg = 0, 1 bid_, ask_, contract_ = 0, 1, 2 print(spreads_dict) spreads = True for strike_index, (spread, values) in enumerate(spreads_dict.items()): print("showing spread/bid_ask/contract for %s" % (right)) print(spread, values) legs = {} leg1 = tools.createContract(row["ticker"], "OPT", exp, float(spread.split(',')[sell_leg]),
def test_should_burst_exceptions_if_the_number_of_operations_less_than_or_equal_to_100000( ): with raises(ValueError): Book(Operations(Fifo()), 100001)
print("Here are your letters: {}".format(letter_list)) print("Choose the letters in the order you want the word to be written") for letter in letter_list: new_letter = input("Next letter: ").capitalize() for letter2 in letter_list: if new_letter == letter2.key: returning_list.append(letter2) continue return returning_list if __name__ == "__main__": print("--Welcome to Scrabble!--") player_amount = 0 operations = Operations() while player_amount < 2 or player_amount > 4: player_amount = input( "Choose how many are playing (2-4 players) or enter 1 to quit: ") try: player_amount = int(player_amount) except ValueError(): print("You have to enter a number!") player_amount = 5 if player_amount == 1: quit() player_lis = []
def test_should_burst_exceptions_if_the_number_of_operations_greater_than_or_equal_to_1( ): with raises(ValueError): Book(Operations(Fifo()), -1)
import os home_dir = os.path.expanduser('~') from operations import Operations op = Operations() kwargs = { 'src': home_dir + os.sep + 'temp/123.txt', 'dest': home_dir + os.sep + 'copyto/1234.txt' } # srcdirpath = home_dir + os.sep + '{test1.txt,test.txt}' # srcdirpath = home_dir + os.sep + 'temp/*' # srcdirpath = home_dir # op.copy(**kwargs) # op.move(**kwargs) op.delete(**kwargs)
from operations import Operations ops = Operations() count = 1 a = True b = True c = False d = False if ops.negate_op(c): print("{}. PASS".format(count)) else: print("{}. FAIL".format(count)) count += 1 if not ops.negate_op(a): print("{}. PASS".format(count)) else: print("{}. FAIL".format(count)) count += 1 if ops.and_op(a, b): print("{}. PASS".format(count)) else: print("{}. FAIL".format(count)) count += 1 if not ops.and_op(a, c): print("{}. PASS".format(count)) else: print("{}. FAIL".format(count))
class Player: def __init__(self): self.op = Operations() self.op.init_player() def sticky_fingers(self, current_room, item): room_data = [] with open("room_data.txt", "r") as dat: room_data = json.loads(dat.read()) self.go(current_room, item) data = self.op.take() for room in room_data: if room['room_id'] == current_room: if len(room['items']) > 0: room['items'] = [] with open("room_data.txt", "w") as da: da.write(json.dumps(room_data)) if data: return True return False def go(self, current_room, end_room): room_data = [] room_conns = {} with open("room_data.txt", "r") as rdat: room_data = json.loads(rdat.read()) with open("room_conns.txt", "r") as rcon: room_conns = json.loads(rcon.read()) print('Going to ', str(end_room)) traversing = self.traverse(room_conns, room_data, current_room, end_room) if not traversing: print("Cant get there") return for step in range(len(traversing)): data = {} print('Walking') next_room = room_conns[str(current_room)][traversing[step]] data = self.op.move(traversing[step]) # if "cooldown" in data: # sleep(data["cooldown"]) # break current_room = str(data['room_id']) sleep(data["cooldown"]) print(data) def traverse(self, room_conns, room_data, room_id, end_room): cue = Queue() checked = set() paths = {} cue.enqueue(room_id) paths[room_id] = [room_id] while cue.size() > 0: current = cue.dequeue() checked.add(current) for possibles in room_conns[str(current)].values(): if possibles in checked or possibles == "?": continue new = paths[current][:] new.append(possibles) paths[possibles] = new found = False for data in room_data: if possibles == str(data['room_id']): if data['title'].lower() == end_room.lower(): found = True break if 'items' in data and 'small treasure' in data[ 'items']: found = True break elif 'items' in data and 'tiny treasure' in data[ 'items']: found = True break if possibles == end_room: found = True if found: the_path = paths[possibles] exits = [] for step in range(len(the_path) - 1): exits.append( self.compass(room_conns, str(the_path[step]), the_path[step + 1])) return exits cue.enqueue(possibles) return None def compass(self, room_conns, room_id, next): for connection, room in room_conns[room_id].items(): if room == next: return connection return None
def test_operation_multiplication(self): operation = Operations(100, 1) self.assertEqual(operation.multiplication(), 100, 'Incorrect result')
def __init__(self): self.op = Operations() self.op.init_player()
def test_operation_division_by_zero(self): operation = Operations(100,0) self.assertEqual(operation.division(), None, 'Incorrect result')
sys.modules[str(name).split(" ")[0]] = None def restrictResources(self): resource.setrlimit(resource.RLIMIT_AS, (1024000000, 10240000000000)) resource.setrlimit(resource.RLIMIT_NPROC, (1,1)) resource.setrlimit(resource.RLIMIT_CPU, (1,1)) resource.setrlimit(resource.RLIMIT_NOFILE, (4,4)) if __name__ == "__main__": ob_restrictions = Restrictions() ob_restrictions.load() print "\n\nRestrictions Loaded !\n" try: ob_operations = Operations() print "Computing: Fibonacci" ob_operations.computeFibonacci(10) print "computing: Power of 2" ob_operations.computePower(128) except IOError: print "Exception: Too Many Files Open" except MemoryError: print "Exception: Memory Usage" except: print "Exception: ", sys.exc_info()[0] print "\nLoading Exploit Program\n" try: ob_security = Security()
def test_operation_potentiation(self): operation = Operations(10,1) self.assertEqual(operation.potentiation(), 100, 'Incorrect result')
def crawl(): queued_links = Operations.file_to_set(QUEUE_FILE) if len(queued_links) > 0: print(str(len(queued_links)) + ' links in the queue') create_jobs()
parser_prep.add_argument('--mountpoint', '-m', dest='backup_dir', type=str, help="Where to mount the backup volume.") parser_back = subparsers.add_parser('backup', help='Do the Backup') parser_back.add_argument('--source', '-s', dest='source_dir', type=str, help="What to backup") parser_back.add_argument('--drive', '-d', dest='drive', type=str, help="Disk drive where the backups should be put to.") parser_del = subparsers.add_parser('delete', help='Delete a Backup') parser_del.add_argument('--date', '-t', dest='date', type=int, required=True, help="Unix time of the backup to delete") parser_del.add_argument('--drive', '-d', dest='drive', type=str, help="Disk drive where the backups should be put to.") options = parser.parse_args() #print #print "Options:",options #print op = Operations() if options.subcommand == 'ld': from disks import Disks disks = Disks() for i, dinfo in enumerate(disks.list_devices()): print "%2d. Device: %s Label: %s"%(i+1, dinfo.dev_file(), dinfo.label()) if options.subcommand == 'prepare': latest_dir = os.path.join(options.backup_dir, 'latest') if 1: logger.info("* Unmounting Volume") ret = op.unmount_backup(options.drive) logger.info(ret) if 1: logger.info("* Creating Volume")
class BatchISP: def __init__(self): parser = argparse.ArgumentParser( formatter_class=argparse.RawDescriptionHelpFormatter, description='Linux remake of Atmel\'s BatchISP utility.') parser.add_argument('-baudrate', type=int, help="{ 9600 | 19200 | 38400 | 57600 | *115200* }") parser.add_argument('-device', type=str, required=True, help="Device type, ? for list.") parser.add_argument('-port', type=str, help="Port/interface to connect.") parser.add_argument('-hardware', type=str, help="{ RS232 | TODO }") parser.add_argument('-version', action='version', version='%(prog)s 0.0.0') parser.add_argument('-sync', type=int, default=1, choices=(1, 0), help="Synchronize protocol (for development only)") parser.add_argument( '-operation', nargs=argparse.REMAINDER, help="<operation> <operation> ..., use help for help") #CANOPEN <node_number> #CANCLOSE <node_number> #SERIALIZE <dest_addr> <serial_number> <number | ascii | unicode> <step> #WAIT <Nsec> #FILLBUFFER <data> #ASSERT < PASS | fail > #RBOOTID1 [ expected_data ] #RBOOTID2 [ expected_data ] #WSBV <data> #RSBV [ expected_data ] #WBSB <data> #RBSB [ expected_data ] #WHWB <data> #RHWB [ expected_data ] #WEB <data> #REB [ expected_data ] #SSL1 #SSL2 #RSSB [ expected_data ] #RSIGB #WCRIS <data> #RCRIS [ expected data ] #WNNB <data> #RNNB [ expected data ] #WBTC1 <data> #RBTC1 [ expected data ] #WBTC2 <data> #RBTC2 [ expected data ] #WBTC3 <data> #RBTC3 [ expected data ] #WP1CFG <data> #RP1CFG [ expected data ] #WP3CFG <data> #RP3CFG [ expected data ] #WP4CFG <data> #RP4CFG [ expected data ] #ENAX2 #DISX2 #ENABLJB #DISBLJB #ENAOSC #DISOSC #ENASELBOOT #DISSELBOOT #INCLUDE <cmd_file> #ONFAIL < ASK | abort | retry | ignore > #ADDRANGE <start> <end> operations_help = """ BLANKCHECK ECHO "<your comment>" ERASE { F | <n> } LOADBUFFER <in_hexfile> MEMORY { FLASH | EEPROM | <id> } PROGRAM READ SAVEBUFFER <hex_file_name> { 386HEX | ? } START { RESET | NORESET } <address> VERIFY """ parser.epilog = operations_help self._args = parser.parse_args() self._parser = parser def _getIOByHardwareName(self, hardware): if hardware == 'RS232': if self._args.port is None: raise PrgError("Port not specified for RS232") if not self._args.baudrate is None: return SerialIO(self._args.port, self._args.baudrate) else: return SerialIO(self._args.port) else: raise PrgError("Unsupported hardware: %s" % hardware) def run(self): if self._args.device == '?': parts = Parts() parts = [part.getName() for part in parts.list()] parts.sort() print(parts) return 0 try: part = Parts().getPartByName(self._args.device) if not self._args.hardware is None: hw = sef._args.hardware else: hw = part.listHardware() if len(hw) != 1: raise PrgError( "Cannot determine hardware select one of: %s" % hw) hw = hw[0] io = self._getIOByHardwareName(hw) self._operations = Operations(part, io, self._args.sync) return self._doOperations() except PgmError as e: print(e) return 1 def _doOperations(self): """Go trought all operations and try to execute them.""" if self._args.operation is None: return iop = iter(self._args.operation) self._buffer = IHex() try: while True: try: op = next(iop) time.sleep(1) except StopIteration: return 0 if op == 'BLANKCHECK': self._operations.opBlankCheck(0) elif op == 'ECHO': print(next(iop)) elif op == 'ERASE': op = next(iop) if op != 'F': raise PgmError("Expected 'F' not %s" % op) self._operations.opErase() elif op == 'LOADBUFFER': filename = next(iop) self._buffer = IHex.read_file(filename) elif op == 'PROGRAM': for start, data in self._buffer.areas.items(): self._operations.opProgram(data, start) elif op == 'MEMORY': self._operations.opMemory(next(iop)) self._addr_start = 0 self._addr_end = None elif op == 'READ': if self._addr_end is None: size = None #size = 1024 # debug only, set to None!!! else: size = self._addr_end - self._addr_start data = self._operations.opRead(self._addr_start, size) self._buffer.insert_data(self._addr_start, data) elif op == 'SAVEBUFFER': filename = next(iop) if next(iop) != '386HEX': raise PgmError("Invalid output format") self._buffer.write_file(filename) elif op == 'START': reset = next(iop) if reset == 'RESET': reset = True addr = next(iop) elif reset == 'NORESET': reset = False addr = next(iop) else: addr = reset reset = True addr = int(addr, 0) if addr != 0: raise PgmError("Only address 0 supported for START") self._operations.opStartAppl(reset) try: next(iop) except StopIteration: continue raise PgmError( "START cannot be folowed by anny instruction!!!") elif op == 'VERIFY': for start, data in self._buffer.areas.items(): data_r = self._operations.opRead(start, len(data)) if data != data_r: while not data_r.startswith(data): data = data[:-1] addr = len(data) addr = addr + start raise PgmError( "Verification failed at address: 0x%X" % addr) else: raise PgmError("Unknown or unsupported operation: %s" % op) except StopIteration: raise PgmError("Missing argument for cmd: %s" % cmd)
def test_calculate(self): input_string = b'26 * 99 - 37 * 38 + 50 + 48 / 45 + 90 + 22 - 44' operations = Operations() self.assertEqual(operations.main(input_string), b'1287.0666666666666')
from player import Player from operations import Operations import sys # for api-key from decouple import config api_key = config('API_KEY') player = Player() operations = Operations() def to_shop(c_map=c_map): current_room = operations.init_player() check_inv = operations.check_status() <<<<<<< HEAD <<<<<<< HEAD #print('CHECK INVENTORY', check_inv['inventory']) #print(current_room, 'LOOKIE') #print(current_room['room_id'],current_room['title'],'NOW LOOK') #print(current_room['exits'], 'NOW LOOK') cur_room_id = current_room['room_id'] path = to_room(c_map[cur_room_id], 105) # path = bfs(current room, 1(shop room))
class Model(qt4.QObject): ''' Main work horse. Model of the data. ''' #*************************************__init__()************************************* def __init__(self): ''' Constructor ''' qt4.QObject.__init__( self ) self.view_mode = "R" self.cp1 = CircuitProfile(None) self.cp2 = CircuitProfile(None) self.cpgoal = CircuitProfile(None) self.ops = {} self.results = {"S1": {}, "S2": {}, "Change": {}, "Goal": {}} self.load_operations() # create the GUI app self.app = qt4.QApplication.instance() self.app.processEvents() # instantiate the main window self.ui = MainWindow(self) self.plots_S1 = Plots(self, self.ui, "S1", self.ui.mplS1) self.plots_S2 = Plots(self, self.ui, "S2", self.ui.mplS2) self.plots_S2.hide() # full screen self.ui.showMaximized() # start the Qt main loop execution, exiting from this script # with the same return code of Qt application sys.exit(self.app.exec_()) self.log_file = open("log_file.txt", "w") #*************************************load_circuit_profile()************************************* def load_circuit_profile(self, identifier, fname): ''' Args: Returns: Raises: ''' if identifier == "S1": cp = self.cp1 elif identifier == "S2": cp = self.cp2 self.plots_S2.unhide() if len(cp.results) == 0: self.plots_S1.hide() elif identifier == "Goal": cp = self.cpgoal else: print "Unknown circuit profile identifier" sys.exit() cp.open_file(fname) results = self.compute_operations(cp) cp.set_results(results) self.results[identifier] = results if identifier == "S1": self.plots_S1.update_plots(cp) elif identifier == "S2": self.plots_S2.update_plots(cp) self.check_change() self.emit(qt4.SIGNAL("sigModified")) #***************************check_change()*************************** def check_change(self): ''' Args: Returns: Raises: ''' rS1 = self.results["S1"] rS2 = self.results["S2"] if len(rS1) > 0 and len(rS2) > 0: self.results["Change"] = self.cp2 - self.cp1 self.plots_S1.unhide() self.plots_S2.unhide() #***************************load_operations()*************************** def load_operations(self): ''' Args: Returns: Raises: ''' self.ops = Operations(self) self.vert_headers = ["S1", "S2", "Change", "Goal"] self.horiz_headers = self.ops.get_ops("table").keys() self.horiz_headers.sort() #***************************compute_operations()*************************** def compute_operations(self, cp): ''' Args: Returns: Raises: ''' return self.ops.compute(cp) #***************************change_view()*************************** def change_view(self, view): ''' Args: Returns: Raises: ''' self.view_mode = view temp = [] temp_plots = [] if self.view_mode == "R": temp = self.ops.get_ops("table").keys() temp_plots = self.ops.get_ops("plot").keys() if self.view_mode == "T": for opname, op in self.ops.get_ops("table").items(): if op.therapist is True or op.patient is True: temp.append(opname) for opname, op in self.ops.get_ops("plot").items(): if op.therapist is True or op.patient is True: temp_plots.append(opname) if self.view_mode == "P": for opname, op in self.ops.get_ops("table").items(): if op.patient is True: temp.append(opname) for opname, op in self.ops.get_ops("plot").items(): if op.patient is True: temp_plots.append(opname) self.plots_S1.change_view(temp_plots) self.plots_S2.change_view(temp_plots) temp.sort() self.horiz_headers = temp self.emit(qt4.SIGNAL("sigModified")) #***************************get_result_at()*************************** def get_result_at(self, row, col): ''' Args: Returns: Raises: ''' cpname = self.vert_headers[col] cp_results = self.results[cpname] if len(cp_results) == 0: return None opname = self.horiz_headers[row] if self.view_mode == "R": return cp_results[opname] if self.view_mode == "T": op = self.ops.ops[opname] if op.therapist is True or op.patient is True: return cp_results[opname] if self.view_mode == "P": op = self.ops.ops[opname] if op.patient is True: return cp_results[opname] #***************************get_description_at()*************************** def get_description_at(self, row, col): ''' Args: Returns: Raises: ''' cpname = self.vert_headers[col] opname = self.horiz_headers[row] return self.ops.get_description(opname) #***************************get_vert_headers()*************************** def get_vert_headers(self): ''' Args: Returns: Raises: ''' return self.vert_headers #***************************get_horiz_headers()*************************** def get_horiz_headers(self): ''' Args: Returns: Raises: ''' return self.horiz_headers
import threading from queue import Queue from crawler import Crawler from operations import Operations PROJECT_NAME = 'Gaiaonline' HOMEPAGE = 'http://www.gaiaonline.com/profiles/ie-batman/9487660/' DOMAIN_NAME = Operations.get_domain_name(HOMEPAGE) QUEUE_FILE = PROJECT_NAME + '/queue.txt' CRAWLED_FILE = PROJECT_NAME + '/crawled.txt' EDGES_FILE = PROJECT_NAME + '/edges.txt' URL_MATCH_FILE = PROJECT_NAME + '/url_match.txt' NAME_MAPPING = PROJECT_NAME + '/name_mapping.txt' NUMBER_OF_THREADS = 128 queue = Queue() Crawler(PROJECT_NAME, HOMEPAGE, DOMAIN_NAME) # Create worker threads (will die when main exits) def create_workers(): for _ in range(NUMBER_OF_THREADS): t = threading.Thread(target=work) t.daemon = True t.start() # Do the next job in the queue def work(): while True: url = queue.get()
from connection import Connection from operations import Operations from flask import Flask, jsonify, render_template, send_from_directory from dotenv import load_dotenv import os load_dotenv() app = Flask(__name__, static_url_path='') connection = Connection(os.getenv('DB_HOST'), os.getenv('DB_USER'), os.getenv('DB_PASSWORD'), os.getenv('DB_NAME')) operations = Operations(connection) __corrida_ativa = 0 __corredor_ativo = 0 __running = False __infras = [0, 0, 0, 0, 0, 0] __timer_infras = [0, 0, 0] __tempos = [0, 0, 0] @app.route("/") def index(): return "gravity car system" @app.route("/ativar-corredor/<int:id_corredor>", methods=["POST"]) def ativar_corredor(id_corredor): global __corredor_ativo __corredor_ativo = id_corredor return "OK"
class BatchISP: def __init__(self): parser = argparse.ArgumentParser( formatter_class=argparse.RawDescriptionHelpFormatter, description='Linux remake of Atmel\'s BatchISP utility.') parser.add_argument('-baudrate', type=int, help="{ 9600 | 19200 | 38400 | 57600 | *115200* }") parser.add_argument('-device', type=str, required=True, help="Device type, ? for list.") parser.add_argument('-port', type=str, help="Port/interface to connect.") parser.add_argument('-hardware', type=str, help="{ RS232 | TODO }") parser.add_argument('-version', action='version', version='%(prog)s 0.0.0') parser.add_argument('-sync', type=int, default=1, choices=(1, 0), help="Synchronize protocol (for development only)") parser.add_argument('-operation', nargs=argparse.REMAINDER, help="<operation> <operation> ..., use help for help") #CANOPEN <node_number> #CANCLOSE <node_number> #SERIALIZE <dest_addr> <serial_number> <number | ascii | unicode> <step> #WAIT <Nsec> #FILLBUFFER <data> #ASSERT < PASS | fail > #RBOOTID1 [ expected_data ] #RBOOTID2 [ expected_data ] #WSBV <data> #RSBV [ expected_data ] #WBSB <data> #RBSB [ expected_data ] #WHWB <data> #RHWB [ expected_data ] #WEB <data> #REB [ expected_data ] #SSL1 #SSL2 #RSSB [ expected_data ] #RSIGB #WCRIS <data> #RCRIS [ expected data ] #WNNB <data> #RNNB [ expected data ] #WBTC1 <data> #RBTC1 [ expected data ] #WBTC2 <data> #RBTC2 [ expected data ] #WBTC3 <data> #RBTC3 [ expected data ] #WP1CFG <data> #RP1CFG [ expected data ] #WP3CFG <data> #RP3CFG [ expected data ] #WP4CFG <data> #RP4CFG [ expected data ] #ENAX2 #DISX2 #ENABLJB #DISBLJB #ENAOSC #DISOSC #ENASELBOOT #DISSELBOOT #INCLUDE <cmd_file> #ONFAIL < ASK | abort | retry | ignore > #ADDRANGE <start> <end> operations_help = """ BLANKCHECK ECHO "<your comment>" ERASE { F | <n> } LOADBUFFER <in_hexfile> MEMORY { FLASH | EEPROM | <id> } PROGRAM READ SAVEBUFFER <hex_file_name> { 386HEX | ? } START { RESET | NORESET } <address> VERIFY """ parser.epilog = operations_help self._args = parser.parse_args() self._parser = parser def _getIOByHardwareName(self, hardware): if hardware == 'RS232': if self._args.port is None: raise PrgError("Port not specified for RS232") if not self._args.baudrate is None: return SerialIO(self._args.port, self._args.baudrate) else: return SerialIO(self._args.port) else: raise PrgError("Unsupported hardware: %s" % hardware) def run(self): if self._args.device == '?': parts = Parts() parts = [part.getName() for part in parts.list()] parts.sort() print(parts) return 0 try: part = Parts().getPartByName(self._args.device) if not self._args.hardware is None: hw = sef._args.hardware else: hw = part.listHardware() if len(hw) != 1: raise PrgError("Cannot determine hardware select one of: %s" % hw) hw = hw[0] io = self._getIOByHardwareName(hw) self._operations = Operations(part, io, self._args.sync) return self._doOperations() except PgmError as e: print(e) return 1 def _doOperations(self): """Go trought all operations and try to execute them.""" if self._args.operation is None: return iop = iter(self._args.operation) self._buffer = IHex() try: while True: try: op = next(iop) time.sleep(1) except StopIteration: return 0 if op == 'BLANKCHECK': self._operations.opBlankCheck(0) elif op == 'ECHO': print(next(iop)) elif op == 'ERASE': op = next(iop) if op != 'F': raise PgmError("Expected 'F' not %s" % op) self._operations.opErase() elif op == 'LOADBUFFER': filename = next(iop) self._buffer = IHex.read_file(filename) elif op == 'PROGRAM': for start, data in self._buffer.areas.items(): self._operations.opProgram(data, start) elif op == 'MEMORY': self._operations.opMemory(next(iop)) self._addr_start = 0 self._addr_end = None elif op == 'READ': if self._addr_end is None: size = None #size = 1024 # debug only, set to None!!! else: size = self._addr_end - self._addr_start data = self._operations.opRead(self._addr_start, size) self._buffer.insert_data(self._addr_start, data) elif op == 'SAVEBUFFER': filename = next(iop) if next(iop) != '386HEX': raise PgmError("Invalid output format") self._buffer.write_file(filename) elif op == 'START': reset = next(iop) if reset == 'RESET': reset = True addr = next(iop) elif reset == 'NORESET': reset = False addr = next(iop) else: addr = reset reset = True addr = int(addr, 0) if addr != 0: raise PgmError("Only address 0 supported for START") self._operations.opStartAppl(reset) try: next(iop) except StopIteration: continue raise PgmError("START cannot be folowed by anny instruction!!!") elif op == 'VERIFY': for start, data in self._buffer.areas.items(): data_r = self._operations.opRead(start, len(data)) if data != data_r: while not data_r.startswith(data): data = data[:-1] addr = len(data) addr = addr + start raise PgmError("Verification failed at address: 0x%X" % addr) else: raise PgmError("Unknown or unsupported operation: %s" % op) except StopIteration: raise PgmError("Missing argument for cmd: %s" % cmd)
# system import ntpath import logging from functools import partial logging.basicConfig(level=logging.DEBUG) # constants SLIDER_MIN_VAL = -100 SLIDER_MAX_VAL = 100 SLIDER_DEF_VAL = 0 THUMB_SIZE = 120 # global image_original = None operations = Operations() drawer = Drawer() strategies = {} class ActionTabs(QTabWidget): def __init__(self, parent): super().__init__() self.parent = parent self.filters_tab = FiltersTab(self) self.adjustment_tab = AdjustingTab(self) self.modification_tab = ModificationTab(self) self.rotation_tab = RotationTab(self) self.addTab(self.filters_tab, "Filters")
from apscheduler.schedulers.blocking import BlockingScheduler from operations import Operations sched = BlockingScheduler() ops = Operations() @sched.scheduled_job('interval', seconds=15) def update_15_seconds(): print('Update device locations every 15 seconds.') ops.update_device_locale() sched.start()
def marrHildrethDetector(self,img,slope_threshold,shape=(5,5),sigma=5): filter = Filters() op = Operations() log = filter.laplaceOfGaussianKernel(shape,sigma) detect = op.doConvolution(log,img) return detect
def test_addition(self): add_positive_ints = Operations(36, 45) add_negative_ints = Operations(-5, -9) add_positive_and_negative_ints = Operations(10, -8) add_positive_floats = Operations(42.0, 5.3) add_negative_floats = Operations(-85.6, -37.9) add_positive_and_negative_floats = Operations(54.1, -12.2) assert round(add_positive_ints.addition(), 2) == 81 assert round(add_negative_ints.addition(), 2) == -14 assert round(add_positive_and_negative_ints.addition(), 2) == 2 assert round(add_positive_floats.addition(), 2) == 47.3 assert round(add_negative_floats.addition(), 2) == -123.5 assert round(add_positive_and_negative_floats.addition(), 2) == 41.9