def __init__(self, accounts=None, debug=False): """Initializes the Watcher""" self.datastore = datastore.Datastore() if not accounts: accounts = Account.query.filter(Account.third_party==False).filter(Account.active==True).all() self.accounts = [account.name for account in accounts] else: self.accounts = accounts self.debug = debug self.created_items = [] self.deleted_items = [] self.changed_items = [] self.ephemeral_items = [] # TODO: grab these from DB, keyed on account self.rate_limit_delay = 0 self.honor_ephemerals = False self.ephemeral_paths = [] # Batching attributes: self.batched_size = 0 # Don't batch anything by default self.done_slurping = True # Don't batch anything by default self.total_list = [] # This will hold the full list of items to batch over self.batch_counter = 0 # Keeps track of the batch we are on -- can be used for retry logic self.current_account = None # Tuple that holds the current account and account index we are on. self.technology = None
def __init__(self, id, node_name, ip, port, coordinate): self.node = Node(id, node_name, ip, port, coordinate) self.geomap = geomap.Geomap() self.region_list = self.geomap.regional_list self.datastore = datastore.Datastore(id) self.geolocator = Nominatim(user_agent="App_Name") self.active_fog_nodes = [] self.InitializeDatastore()
def main(): hat = Board() #parameters = hat.config("parameters") parameters = hat.config("shortlist") station_id = hat.config("name") samplesDB = ds.Datastore("mem") sender = dsend.Datasender() sampleFreq = 1020 # in seconden (17 minuut) collect = True lastSendTime = time.time() lastCleanTime = time.time() sendFreq = 15 * 60 # in seconden (15 minuten) cleanFreq = 14400 # in seconds (4x60 minuten) threshold = 1.1 # in dagen while collect: starttime = time.time() #clean cache database if time.time() - lastCleanTime >= cleanFreq: samplesDB.clean_samples_table(threshold) lastCleanTime = time.time() #get new samples per parameter for parameter in parameters: value, units = hat.read(parameter) samplesDB.store_sample(station_id, parameter, value, units) #send unsend samples if time.time() - lastSendTime >= sendFreq: newSamples = samplesDB.read_new_samples() result = sender.send_samples(newSamples) if result != "error": samplesDB.update_sample_status(newSamples, "send") lastSendTime = time.time() else: print(time.time() - lastSendTime) #Show current samples in cache samples = samplesDB.read_all_samples() df = pd.DataFrame(samples, columns=[ 'sample_id', 'status', 'station_id', 'parameter', 'time_at', 'time_for', 'values', 'units' ]) df = df.set_index('status') print(df) #sleep sampleFreq if (sampleFreq - (time.time() - starttime)) > 0: time.sleep(sampleFreq - (time.time() - starttime))
def __init__(self, datacenter_name): self.name = datacenter_name self.pyVmomiDatacenter = utils.get_obj_by_name( name=datacenter_name, vimtypes=[vim.Datacenter]) # Get names of the folders in datacenters self._folders = folder.Folder('vm').folders # Get datastores for the datacenter self.datastores = [ datastore.Datastore(ds.name) for ds in self.pyVmomiDatacenter.datastore ]
def __init__(self, parent, id, title): super(MainFrame, self).__init__(parent, id, title, wx.DefaultPosition, wx.Size(800, 600)) self._load_config() self._create_menu() self._do_layout() self.Centre() self._reset() self.ds = datastore.Datastore(self.datastore_file)
def __init__(self, accounts=None, debug=False): """Initializes the Watcher""" self.datastore = datastore.Datastore() if not accounts: accounts = Account.query.filter(Account.third_party==False).filter(Account.active==True).all() self.accounts = [account.name for account in accounts] else: self.accounts = accounts self.debug = debug self.created_items = [] self.deleted_items = [] self.changed_items = [] self.rate_limit_delay = 0
def __init__(self, accounts=None, debug=False): self.datastore = datastore.Datastore() self.accounts = accounts self.debug = debug self.items = [] self.team_emails = app.config.get('SECURITY_TEAM_EMAIL') self.emails = [] self.emails.extend(self.team_emails) for account in self.accounts: users = User.query.filter(User.daily_audit_email == True).filter( User.accounts.any(name=accounts[0])).all() new_emails = [user.email for user in users] self.emails.extend(new_emails)
def __init__(self, accounts=None, debug=False): """Initializes the Watcher""" self.datastore = datastore.Datastore() if not accounts: accounts = Account.query.filter(Account.third_party==False).filter(Account.active==True).all() self.accounts = [account.name for account in accounts] else: self.accounts = accounts self.debug = debug self.created_items = [] self.deleted_items = [] self.changed_items = [] self.ephemeral_items = [] # TODO: grab these from DB, keyed on account self.rate_limit_delay = 0 self.interval = 15 self.honor_ephemerals = False self.ephemeral_paths = []
def __init__(self, accounts=None, debug=False): self.datastore = datastore.Datastore() self.accounts = accounts self.debug = debug self.items = [] self.team_emails = app.config.get('SECURITY_TEAM_EMAIL', []) self.emails = [] if type(self.team_emails) in (str, unicode): self.emails.append(self.team_emails) elif type(self.team_emails) in (list, tuple): self.emails.extend(self.team_emails) else: app.logger.info("Auditor: SECURITY_TEAM_EMAIL contains an invalid type") for account in self.accounts: users = User.query.filter(User.daily_audit_email==True).filter(User.accounts.any(name=account)).all() self.emails.extend([user.email for user in users])
def __init__(self): self.config = datastore.Datastore()
if not api_config_object.has_section('api') or not os.path.isfile(api_config_file): # Write default config api_config_object.add_section('api') for key in DEFAULTCONF: api_config_object.set('api', key, str(DEFAULTCONF[key])) conffile = codecs.open(api_config_file, 'w', 'utf-8') api_config_object.write(conffile) conffile.close() api_config = multiscanner.common.parse_config(api_config_object) # TODO: fix this mess # Needs api_config in order to function properly from celery_worker import multiscanner_celery, ssdeep_compare_celery from ssdeep_analytics import SSDeepAnalytic db = database.Datastore(config=api_config.get('Database'), goog_cred_file=os.path.join(MS_WD, 'pwned-google-cred.json')) # To run under Apache, we need to set up the DB outside of __main__ db.init_db() # set credentials # used for google storage and pub/sub or # for any service that connects to google cloud os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = os.path.join(MS_WD, 'pwned-google-cred.json') upload_bucket_folder = api_config['api']['upload_bucket'] pub_sub_project = api_config['api']['pub_sub_project'] pub_sub_topic = api_config['api']['pub_sub_topic'] storage_conf = multiscanner.common.get_config_path(multiscanner.CONFIG, 'storage') storage_handler = multiscanner.storage.StorageHandler(configfile=storage_conf)
return self.name class SearchResults(): __slots__ = ['tracks', 'artists', 'albums', 'album_track_map'] def __init__(self, tracks, artists, albums, album_track_map): self.tracks = tracks self.artists = artists self.albums = albums self.album_track_map = album_track_map scope = "user-library-read,user-follow-read,user-read-playback-state" DATASTORE = datastore.Datastore() sp = spotipy.Spotify(auth_manager=SpotifyOAuth(scope=scope)) pageSize = 50 has_internet = False print(DATASTORE) def check_internet(request): global has_internet try: result = request() has_internet = True except Exception as _: print("no ints")
#!/usr/bin/python3 """ Test stub using PyTest""" import datastore import pytest from random import randint obj = datastore.Datastore(str(randint(1000, 9999)) + ".json") t_dict = {"brand": "Ford", "model": "Mustang", "year": 1964} def test_create(): """ Test Stub for datastore.Datastore.create() Returns: bool: True if test cases are passed, False otherwise """ return obj.create("1", t_dict) def test_read(): """ Test Stub for datastore.Datastore.read() Returns: bool: True if test cases are passed, False otherwise """ t = obj.read("1")[0] return t == t_dict def test_delete(): """ Test Stub for datastore.Datastore.create()