def searchTweetsByHashtag(self, hashtag): #you can generate this values by AccessKeyGenerator class TWITTER_ACCESS_TOKEN = '' TWITTER_ACCESS_TOKEN_SECRET = '' auth = tweepy.OAuthHandler(AccessKeyGenerator.CONSUMER_KEY, AccessKeyGenerator.CONSUMER_SECRET) auth.set_access_token(TWITTER_ACCESS_TOKEN, TWITTER_ACCESS_TOKEN_SECRET) api = tweepy.API(auth) storage = Storage() for tweet in tweepy.Cursor(api.search, q=hashtag, count=1000, result_type="recent").items(): storage.save(tweet)
def generateLog ( ): SER.send('\n\n<LOG>') while 1: message = Storage.read() if message == 0: break SER.send(message[0:1000]) MOD.sleep(2) SER.send(message[1000:]) # Next time we'll be writing in a new sector. Storage.incrementActiveSector() SER.send('</LOG>\n\n')
def storagecli(argv): """storagecli [-?rg] [<scriptfile>...] Provides an interactive session to the configuration server. This allows you to interactively view and change the persistent database. Options: -? = This help text. -g = used paged output (like 'more'). """ import nmsgetopt paged = False try: optlist, longopts, args = nmsgetopt.getopt(argv[1:], "?g") except GetoptError: print storagecli.__doc__ return for opt, val in optlist: if opt == "-?": print storagecli.__doc__ return elif opt == "-g": paged = True if paged: import termtools io = termtools.PagedIO() else: io = CLI.ConsoleIO() ui = CLI.UserInterface(io) cf = Storage.get_config(initdict=longopts) cf.reportfile = __name__.replace(".", "_") cf.logbasename = "%s.log" % (__name__.replace(".", "_"),) cf.arguments = argv cmd = RootContainerEditor(ui) cmd._setup(cf, "root") parser = CLI.CommandParser(cmd, historyfile=os.path.expandvars("$HOME/.hist_storagecli")) if args: for arg in args: try: parser.parse(arg) except KeyboardInterrupt: break else: parser.interact()
def add_storage(self, storage_params): # When we add a storage, the following algorithm is executed: # 1. If the storage is already in the shared db, it is just added # 2. If the storage is not in the shared db, the storage location # is rescanned. All storage locations found there are added as # base storages, and a new one is created. storage_idx = self.assign_storage_idx() listener = self.create_block_listener(storage_idx) storage = Storage.create_storage(self.db_manager, self.txn_manager, storage_idx, storage_params, listener) storage.set_report_manager(self.report_manager) self.storages[storage_idx] = storage return storage_idx
def setup ( ): initSettings() VOLT = Gauge.getBatteryVoltage() SER.send('Voltage: %s\n' % VOLT) # Don't start the network on a missing/near-empty battery if VOLT > 2500: initNetworkRelated() SER.send('Starting storage initialization at: %s\n' % MOD.secCounter()) sector = Storage.initialize() SER.send('End at: %s. Sector: %s\n' % (MOD.secCounter(), sector)) Module.CPUclock(0) # Clock back to default (@26Mhz) SER.send('CPU back down\n')
def load_storages(self): # # All storages except for the specified one are inactive, i.e., base. # Inactive storages can be used to pull data blocks from, and must # be updated on each invocation, since somebody else might be adding # blocks there # logging.debug("StorageManager loading storages") self.storages = {} self.active_storage_idx = None for storage_idx in self.get_storage_idxs(): logging.debug("StorageManager loading storage %d" % storage_idx) handler = self.create_block_listener(storage_idx) storage = Storage.load_storage(self.db_manager, self.txn_manager, storage_idx, handler) storage.set_report_manager(self.report_manager) self.storages[storage_idx] = storage if storage.is_active(): logging.debug("Storage is active") seq_id = storage.get_active_sequence_id() self.active_storage_idx, seq_idx = self.seq_to_index[seq_id]
# File: JSON Mode Test File for EDD # License: Released under MIT License # Notice: Copyright (c) 2020 TytusDB Team # Developer: Luis Espino import Storage as j # assume no data exist or execute the next optional drop function j.dropAll() print("\nCREATE DATABASE: ") print(j.createDatabase("DataBase1"), end="/ 0 \n") # 0 print(j.createDatabase("DATABASE1"), end="/ 2 \n") # 2 print(j.createDatabase("DataBase2"), end="/ 0 \n") # 0 print(j.createDatabase("DataBase3"), end="/ 0 \n") # 0 print(j.createDatabase("DataBase4"), end="/ 0 \n") # 0 print(j.createDatabase("@hola"), end="/ 1 \n") # 1 print(j.createDatabase(" "), end="/ 1 \n") # 1 print(j.createDatabase(5), end="/ 1 \n") # 1 print(j.createDatabase("DataBase4"), end="/ 2 \n") # 2 print(j.showDatabases()) print("\nALTER DATABASE: ") print(j.alterDatabase("Database1", "database2"), end="/ 3 \n") # 3 print(j.alterDatabase("Database10", "database5"), end="/ 2 \n") # 2 print(j.alterDatabase("Database4", "DataBase0"), end="/ 0 \n") # 0 print(j.alterDatabase("DataBase4", "1DataBase0"), end="/ 1 \n") # 1 print(j.showDatabases()) print("\nDROP DATABASE: ") print(j.dropDatabase("DataBase0"), end="/ 0 \n") # 0 print(j.dropDatabase("1DataBase0"), end="/ 2 \n") # 2 print(j.dropDatabase("DataBase0"), end="/ 2 \n") # 2
def setUp(self): """Setup the storage, that has student index and course index.""" self.storage = Storage.Storage() self.storage.update_from_excel("tablea.xls") self.storage.update_from_excel("tableb.xls")
def commandVRFY(self, argument, module): if argument != "-": data = Storage.commandVRFY(self.accountEmailRegistry, argument) CommonFunctions.sendData(data, module, self.securityServer) else: self.code501("", module)
def test01_export_xml_fail(self): with self.assertRaises(IndexError): self.assertFalse(Storage.export_xml_tree(self.invalid_data)) self.assertFalse(os.path.exists(self.output_filename))
def follow_lane_PID(position, targetPosition, auto_drive): t = auto_drive.getBasicTimeStep() delta_t = Storage.loadData("delta_t", file_path) start_time = time.time() if delta_t is None: delta_t = 0.01 # estimated_position = Storage.loadData("estimated_position", file_path) # if estimated_position is not None: # if estimated_position > position: # position = position + (estimated_position - position) e = round((targetPosition - position), 2) prev_position = Storage.loadData("prev_position", file_path) prev_integral_of_error = Storage.loadData("prev_integral_of_error", file_path) if prev_position is not None: # calculate starting on second cycle derivative_of_position = (position - prev_position) / t integral_of_error = prev_integral_of_error + e * delta_t Storage.storeData("prev_integral_of_error", round(integral_of_error, 2), file_path) elif prev_integral_of_error is None: Storage.storeData("prev_integral_of_error", 0, file_path) Storage.storeData("prev_position", position, file_path) derivative_of_position, integral_of_error = 0, 0 P = Kp * e # Proportional result I = (Kp / Ti) * integral_of_error # Integral result D = -(Kp * Td) * derivative_of_position # Derivative result steering_angle = first_controller_output + P + I + D Storage.storeData("delta_t", round((time.time() - start_time), 4), file_path) if steering_angle > control_out_max: # check upper limit steering_angle = control_out_max integral_of_error = integral_of_error - e * delta_t # anti-reset windup Storage.storeData("prev_integral_of_error", round(integral_of_error, 2), file_path) if steering_angle < control_out_min: # check lower limit steering_angle = control_out_min integral_of_error = integral_of_error - e * delta_t # anti-reset windup Storage.storeData("prev_integral_of_error", round(integral_of_error, 2), file_path) # # to prevent derivative kick the value of the error changes suddenly whenever the set point is adjusted. y = odeint(derivative_kick, position, [0, delta_t], args=(steering_angle, Ku, Tu)) # estimated position error estimated_position = int(y[-1]) Storage.storeData("estimated_position", estimated_position, file_path) Log.extend( [str(position), str(targetPosition), str(round(steering_angle, 2))]) auto_drive.setSteeringAngle(round(steering_angle, 2)) Log.append(str(delta_t))
# File: JSON Mode Test File for EDD # License: Released under MIT License # Notice: Copyright (c) 2020 TytusDB Team # Developer: Luis Espino import Storage as j from time import time import random # assume no data exist or execute the next optional drop function j.dropAll() # print(j.createDatabase("DataBase2")) #0 # print(j.createDatabase("DataBase3")) #0 # print(j.createDatabase("DataBase4")) #0 # print(j.createDatabase("@hola")) #1 # print(j.createDatabase("DataBase4")) #2 # print(j.showDatabases()) # print(j.alterDatabase("Database1","database2")) #3 # print(j.alterDatabase("Database","database2")) #2 # print(j.alterDatabase("Database4","DataBase0")) #0 # print(j.alterDatabase("DataBase4","1DataBase0")) #1 # print(j.showDatabases()) # print(j.dropDatabase("DataBase0")) #0 # print(j.dropDatabase("1DataBase0")) #1 # print(j.dropDatabase("DataBase0")) #2 # print(j.showDatabases()) # print(j.createTable("DataBase1","Tabla1",5)) #0 # print(j.createTable("DataBase1","Tabla2",5)) #0 # print(j.createTable("DataBase1","Tabla3",5)) #0 # print(j.createTable("DataBase2","Tabla4",4)) #0 # print(j.createTable("DataBase2","Tabla5",4)) #0
# File: JSON Mode Test File for EDD # License: Released under MIT License # Notice: Copyright (c) 2020 TytusDB Team # Developer: Luis Espino import Storage as j # assume no data exist or execute the next optional drop function j.dropAll() # test Databases CRUD print(j.createDatabase('db1')) # 0 print(j.createDatabase('db1')) # 2 print(j.createDatabase('db4')) # 0 print(j.createDatabase('db5')) # 0 print(j.createDatabase(0)) # 1 print(j.alterDatabase('db5', 'db1')) # 3 print(j.alterDatabase('db5', 'db2')) # 0 print(j.dropDatabase('db4')) # 0 print(j.showDatabases()) # ['db1','db2'] # test Tables CRUD print(j.createTable('db1', 'tb4', 3)) # 0 print(j.createTable('db1', 'tb4', 3)) # 3 print(j.createTable('db1', 'tb1', 3)) # 0 print(j.createTable('db1', 'tb2', 3)) # 0 print(j.alterAddPK('db1', 'tb1', [0, 1])) # 0 print(j.showTables('db1')) # ['tb1', 'tb2'] # print(j.alterDropPK('db1','tb1')) # 0 print(j.alterDropPK('db1', 'tb2')) # 4
record_list = storage.listRecord(info[0]) if record_list == [] or record_list == None: return record_list.sort(key=lambda record: record.fieldValues[0], reverse=False) print_list = (' '.join(map(str, record.fieldValues)) for record in record_list) print_str.append('\n'.join(map(str, print_list))) if __name__ == "__main__": if len(sys.argv) != 3: print("Run with the following: \n program.exe $input_path $output_path") sys.exit(0) if not File.existFile(sys.argv[1]): print("invalid input file for open: " + sys.argv[1]) sys.exit(0) print_str: list = [] storage = Storage() input_file: str = open(sys.argv[1], 'r') output_file: str = open(sys.argv[2], 'w+') for line in input_file: o1, o2, *info = " ".join(line.split()).replace('\n','').split(" ") operation: str = o1 + o2 s = Switcher() s.indirect(operation, info) output_file.write('\n'.join(map(str, print_str)))
C=C, gL=gL, gCa=gCa, gK=gK, VL=VL, VCa=VCa, VK=VK, V1=V1, V2=V2, V3=V3, V4=V4, phi=phi, I_v=0.1, C_v=C_v, gL_v=gL_v, gCa_v=gCa_v, gK_v=gK_v, VL_v=VL_v, VCa_v=VCa_v, VK_v=VK_v, V1_v=V1_v, V2_v=V2_v, V3_v=V3_v, V4_v=V4_v, phi_v=phi_v) storage = Storage(brain, '/u/eot/hariria2/scratch/DataStoreTest', 5) brain.SetStorage(storage) brain.DevelopNetwork(120) brain.Simulate(source='script')
class HUD(): def __init__(self): SOCKETHOST = socket.gethostname() #'localhost' '192.168.3.92' SOCKETPORT = 12345 self.SOCKET = (SOCKETHOST, SOCKETPORT) self.data = Storage() self.data.log_file = 'gui_dump.log' self.scope_canvas = ScopeCanvas() self.myUID = 0 #Hardcoded for now def work(self): print timestamp(), 'Starting...' send = '{"list_users":1}' print timestamp(), 'Requesting list of users...' user_list = socketExchange(send, self.SOCKET) try: user_list = json.loads(user_list) except: user_list = [] print 'User List Not Found...' #print user_list #Update self.data with user data structures for each user on server try: for uid in user_list: #print 'itter ', uid id = int(uid) try: print timestamp(), 'Requesting user...' send = str('{"get_user":'******'}') #print send #print 'Sending get_user message to socketExchange()' this_user = socketExchange(send, self.SOCKET) #print this_user self.data.users[id] = json.loads(this_user) except: print 'No data received for ' + str(id) except: print 'user_list cannot be iterated over' self.data.dump() self.user_data = { 'My Coords': [], 'My Heading': 0, 'Paint': 0, 'Air': 0, 'Users': {} } for uid in user_list: print timestamp(), 'Building user_data...' id = int(uid) print 'Opening UID', id if id == self.myUID: #self.user_data['My Coords'].append(self.data.users[id]['posStruct']['lat']) #self.user_data['My Coords'].append(self.data.users[id]['posStruct']['long']) #self.user_data['My Heading'] = self.data.users[id]['posStruct']['heading'] self.user_data['My Coords'].append( self.data.users[118]['posStruct']['lat']) #hardcode uid self.user_data['My Coords'].append( self.data.users[118]['posStruct']['long']) #hardcode uid self.user_data['My Heading'] = self.data.users[118][ 'posStruct']['heading'] #hardcode uid self.user_data['Paint'] = self.data.users[id]['markerStruct'][ 'paint_level'] self.user_data['Air'] = self.data.users[id]['markerStruct'][ 'tank_pressure'] else: pass #self.user_data['Users'][id] = [self.data.users[id]['posStruct']['lat'], self.data.users[id]['posStruct']['long'], self.data.users[id]['posStruct']['heading']] #print 'self.user_data', self.user_data #--DUMMY DATA-- # user_data = {'My Coords': ['N03742.179', 'E13555.237'], # 'My Heading': 0, # 'Paint': 33, # 'Air': 29, # 'Users':{1018: ['N03742.178', 'E13555.236', 210], # 2022: ['N03742.181', 'E13555.240', 270]}} #Update display image according to values in user_data print timestamp(), 'Drawing scope_canvas...' self.scope_canvas.updateDisplay(self.user_data) print timestamp(), 'Done' print ''
from NiaPy.benchmarks import Ackley, Rastrigin, Rosenbrock, Griewank, Sphere, Whitley, Zakharov, Perm, Powell, Pinter #input parameters ArrayOfNP = [10, 20] #,30,50,75,100 ArrayOfBenchmarks = [ Ackley(), Rastrigin() ] #,Rosenbrock(),Griewank(), Sphere(), Whitley(), Zakharov(), Perm(), Powell(), Pinter() ArrayOfnFES = [10000] #,20000,30000 ArrayOfD = [10] #,20,30 NUM_RUNS = 1 resultFilePath = 'results.xlsx' Processor = Procesing.Procesing() Runner = runner.Runner() Storage = Storage.Storage() beginningTime = time.time() with pd.ExcelWriter('results.xlsx') as writer: for Np in ArrayOfNP: for nFES in ArrayOfnFES: for D in ArrayOfD: tempDataFrame = [] for BenchFunction in ArrayOfBenchmarks: #start the timer start = time.time() #Execute the Firefly Algorithm rawData = Runner.RunAlgorithm(NUM_RUNS, D, Np, nFES, BenchFunction) end = time.time()
def storeMessage(message): Storage.write(message)
def main(args): count = 0 args.storage = Storage.Storage(google_key_path=args.google_key_path) url = 'https://www.moneycontrol.com/markets/indian-indices/top-nse-500-companies-list/7?classic=true' #datetime.time(9, 14, tzinfo=tz) < time_now < datetime.time(15, 31, tzinfo=tz) time_now = datetime.datetime.now(tz).time() while (datetime.time(9, 14, tzinfo=tz) < time_now < datetime.time( 15, 31, tzinfo=tz)): try: df = Real(url, count) df['LTP'] = df['LTP'].apply( lambda x: pd.to_numeric(x, errors='coerce')) ltp = list(map(float, list(df['LTP']))) volume = list(map(int, list(df['Volume']))) print(time_now) df['per_change'] = df['per_change'].apply( lambda x: pd.to_numeric(x, errors='coerce')) per = list(map(float, list(df['per_change']))) if (count == 0): df['open'] = df['LTP'] df['close'] = df['LTP'] df['high'] = df['LTP'] df['low'] = df['LTP'] df = df.drop(['LTP'], axis=1) df.to_csv('Data.csv', index=False) prev = list(map(float, list(df['close']))) prev1 = list(map(float, list(df['close']))) elif (count == 5): if (prev == prev1): print('Market Closed!') break else: pass else: df1 = pd.read_csv('Data.csv') high = list(map(float, list(df1['high']))) low = list(map(float, list(df1['low']))) #Since len of high,low,open,close is the same for i in range(len(high)): if (ltp[i] > high[i]): high[i] = ltp[i] if (ltp[i] < low[i]): low[i] = ltp[i] df1['high'] = high df1['low'] = low df1['close'] = ltp df1['volume'] = volume df1['per_change'] = per df1['open'] = prev df1["datetime"] = (datetime.datetime.now(tz)) df1['datetime'] = df1['datetime'].apply( lambda x: x.strftime('%Y-%m-%d %H:%M:%S')) df1['datetime'] = df1['datetime'].apply( lambda x: pd.to_datetime(x)) df1['datetime'] = df1['datetime'].dt.round('min') df1 = df1[[ 'datetime', 'company_name', 'open', 'low', 'high', 'close', 'volume', 'per_change' ]] df1.to_csv('Data.csv', index=False) prev = list(map(float, list(df1['close']))) records = df1.to_dict('records') args.storage.insert_bigquery_data(args.environment, args.table, records) count += 1 t = 50 while t >= 0: mins, secs = (00, t) timer = '{:02d}:{:02d}'.format(mins, secs) # print(timer, end='\r') time.sleep(1) t -= 1 time_now = datetime.datetime.now(tz).time() except KeyboardInterrupt: break
'craftbukkit': {}, 'nukkit': {}, 'paper': {}, 'spigot': {}, 'vanila': {}, }, 'snapshot': { 'craftbukkit': {}, 'nukkit': {}, 'paper': {}, 'spigot': {}, 'vanila': {}, }, } Storage.init_logger() Storage.logger.info('JarFetcher starting') Storage.logger.info('Getting CraftBukkit links') # Using hardcoded values for easier dev all_links['stable']['craftbukkit'].update(CraftBukkitProvider.get()) all_links['stable']['spigot'].update(SpigotProvider.get()) all_links['stable']['vanila'].update(VanillaProvider.get()) # all_links['snapshot']['vanila'].update(VanillaSnapshotProvider.get()) all_paths = [] Storage.logger.debug(all_links) with concurrent.futures.ThreadPoolExecutor() as executor: results = [] for stage in all_links: for jar_type in all_links[stage]: Storage.logger.info(f'Downloading {jar_type} jars')
return note_list def get_all_locations(cur_user): loc_list = storage.get_all_locations(cur_user) return loc_list def get_all_photos(cur_user): photo_list = storage.get_all_photos(cur_user) return photo_list def get_all_notes(cur_user): note_list = storage.get_all_notes(cur_user) return note_list server = SimpleXMLRPCServer(("localhost", 8000)) database = Database.Database() storage = Storage.Storage() print "Listening on port 8000..." server.register_function(login_, "login_") server.register_function(create_account, "create_account") server.register_function(get_loc_photos, "get_loc_photos") server.register_function(get_loc_notes, "get_loc_notes") server.register_function(get_all_locations, "get_all_locations") server.register_function(get_all_photos, "get_all_photos") server.register_function(get_all_notes, "get_all_notes") server.serve_forever()
# File: JSON Mode Test File for EDD # License: Released under MIT License # Notice: Copyright (c) 2020 TytusDB Team # Developer: Luis Espino import Storage as j # assume no data exist or execute the next drop function j.dropAll() # test Databases CRUD print(j.createDatabase('db1')) # 0 print(j.createDatabase('db1')) # 2 print(j.createDatabase('db4')) # 0 print(j.createDatabase('db5')) # 0 print(j.createDatabase(0)) # 1 print(j.alterDatabase('db5','db1')) # 3 print(j.alterDatabase('db5','db2')) # 0 print(j.dropDatabase('db4')) # 0 print(j.showDatabases()) # ['db1','db2'] # test Tables CRUD print(j.createTable('db1','tb4',3)) # 0 print(j.createTable('db1','tb4',3)) # 3 print(j.createTable('db1','tb1',3)) # 0 print(j.createTable('db1','tb2',3)) # 0 print(j.alterTable('db1','tb4','tb3')) # 0 print(j.dropTable('db1','tb3')) # 0 print(j.alterAddPK('db1','tb1',0)) # 1 print(j.alterAddPK('db1','tb1',[0])) # 0 print(j.showTables('db1')) # ['tb1', 'tb2']
def main(): while Storage.calculateStorageUse() < 4500000000: try: runProcessFourTimes() except: pass
from Storage import * from Marble import * img = np.zeros((900, 900, 3), np.uint8) winName = "ellipses" cv2.namedWindow(winName) ss = [] for i in range(4): for j in range(4): ss.append(Storage([300 * (i), 300 * (j)], baseSize=75)) m = Marble([70, 70], [4, 2]) while 1: img2 = img.copy() for s in ss: s.draw(img2) m.draw(img2) for s in ss: s.update(m) m.move(img) key = cv2.waitKey(1) if key == ord("q"): break cv2.imshow(winName, img2)
def __init__(self): self.tstorage = Storage() self.lst = [ 'd_email', 'd_pass', 'akey', 'asecret', 'otoken', 'osecret' ]
import Storage i = 3 while i: i = i - 1 sector = Storage.initialize() print 'Sector: %s' % sector print 'ReadSector: %s' % Storage.readSector print 'activeSector: %s' % Storage.activeSector Storage.write("Lorem ipsum dolor sit amet, consectetur adipiscing elit. Aliquam facilisis elit vitae ex mollis consequat. Nunc eu sapien ipsum. Morbi urna sapien, dapibus eu lectus eget, malesuada pretium arcu. Praesent vehicula mi eu mauris consectetur, sit amet sodales nisi rhoncus. Donec eu sollicitudin mi. Sed et lacus congue sem iaculis tincidunt. Donec nec lorem ac velit commodo ullamcorper. Suspendisse nulla felis, sodales at sodales vitae, scelerisque a urna. Suspendisse mollis eros non risus semper porttitor. Quisque quis nunc ornare purus massa nunc. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Aliquam facilisis elit vitae ex mollis consequat. ") Storage.write("Lorem ipsum dolor sit amet, consectetur adipiscing elit. Quisque ac ligula vitae nibh placerat ullamcorper ut id ipsum. Nunc vehicula ligula quis ex lacinia, et viverra arcu posuere. Fusce pretium sapien odio, in blandit sapien efficitur eget. Aliquam sagittis imperdiet nunc vel convallis. Duis fermentum nisl et vestibulum suscipit. Nullam eget orci eleifend, euismod massa non, molestie turpis. Vivamus tortor elit, tempor vel urna at, semper laoreet nibh. Phasellus tristique efficitur metus vitae suscipit. Praesent eget nisi at massa cras amet. Nunc vehicula ligula quis ex lacinia, et viverra arcu posuere. ") print 'ReadSector: %s' % Storage.readSector print 'activeSector: %s' % Storage.activeSector print '--------------------' # Can't do assignment in condition while 1: data = Storage.read() if data == 0: break print data
def storeMessage ( message ): Storage.write(message)
def __init__(self): self.adc = Adafruit_ADS1x15.ADS1115() self.manager = multiprocessing.Manager() self.lock = self.manager.Lock() self.storage = Storage.Storage() self.measurement_list = []