def __init__(self): self.db = boto3.resource( 'dynamodb', region_name='eu-central-1', aws_access_key_id=Secrets.get_aws_access_key(), aws_secret_access_key=Secrets.get_aws_secret_access_key()) self.cart_id = Secrets.get_cart_id() self.table = self.db.Table('cart_' + self.cart_id) self.products = self.db.Table('product_dataset') self.cart_info = self.db.Table('cart_info')
def execute(self): if not os.path.isdir(self.file_locations.cluster_root()): dir_util.mkpath(self.file_locations.cluster_root()) if os.path.isdir(self.file_locations.release_root()): dir_util.remove_tree(self.file_locations.release_root()) dir_util.mkpath(self.file_locations.release_root()) for source, destination in self.file_locations.location_mappings( ).iteritems(): dir_util.copy_tree(source, destination) print "Preparing Openstack release for {} environment.".format( self.cluster) print "Preparing secrets" secrets = Secrets(self.file_locations) secrets.process_secrets() cluster_config = self._merge_dicts( self.cluster_config(), self.secrets_config(), self.images_config(), ) print "Preparing config" self._prepare_specs(self.file_locations.release_etc(), cluster_config) print "Preparing start scripts" self._prepare_specs(self.file_locations.release_bin(), cluster_config) print "Preparing consolated config maps" self._prepare_configmaps() file_util.copy_file( os.path.join(self.file_locations.source_local_configmaps(), "cluster-configmap.yaml"), os.path.join(self.file_locations.release_configmaps())) file_util.copy_file( os.path.join(self.file_locations.source_local_configmaps(), "image-versions.yaml"), os.path.join(self.file_locations.release_configmaps())) print "Preparing checksums" self._prepare_checksums(self.file_locations.release_configmaps(), self.file_locations.release_secrets()) print "Preparing specs" self._prepare_specs(self.file_locations.release_openstack(), cluster_config)
def generate_auth(self): components = [self.formatted_date, self.request_type.upper(), self.host.lower(), self.path] args = [] for key, val in sorted(self.params, key=lambda (k,v): k): args.append('{k}={v}'.format(k=urllib.quote(key, '~'), v=urllib.quote(val, '~'))) components.append('&'.join(args)) signature_string = '\n'.join(components) signature = hmac.new(Secrets.get('secret_key'), signature_string, hashlib.sha1).hexdigest() auth_string = '{key}:{signature}'.format(key=Secrets.get('integration_key'), signature=signature) return base64.b64encode(auth_string)
def main(): args = parse_command_line() keyphrase = getpass('Keyphrase:') # keyphrase = 'password' try: s = Secrets('vault.yml', keyphrase) s.read() except ansible.parsing.vault.AnsibleVaultError: print('Vault password failed.') sys.exit(1) if not args.action: interactive(s) elif args.action == 'jsondump': dump_secrets(s)
class Reddit(): _my_client_id = 'RJU0Gi10B4FtCw' _my_client_secret = 'gX733gRRV_g-7T55kzNc4ItWsl0' _platform = 'web' _app_ID = 'com.bagosocks.redditSandbox' _version_string = 'v0.0.1' _user_string = '(by /u/hknecht)' _my_user_agent = "{}:{}:{} {}". \ format(_platform, _app_ID, _version_string, _user_string) _secret = Secrets() def __init__(self, *args, **kwargs): return super().__init__(*args, **kwargs) def open_reddit_read(self): self._reddit = praw.Reddit(client_id=self._my_client_id, client_secret=self._my_client_secret, user_agent=self._my_user_agent) def open_reddit_write(self): self._reddit = praw.Reddit(client_id=self._my_client_id, client_secret=self._my_client_secret, user_agent=self._my_user_agent, username=self._secret.get_username(), password=self._secret.get_password()) def is_read_only(self): return self._reddit.read_only def top_submissions(self, subreddit, sub_limit): return self._reddit.subreddit(subreddit).hot(limit=sub_limit)
class Graph(object): app_id = Secrets.get('app_id') version = 2.4 access_token_store = RedisStore('facelock:facebook:graph:access_tokens') def __init__(self, access_token=None): self.access_token = access_token or Secrets.get('access_token') self._graph = facebook.GraphAPI(self.access_token, self.version) @property def auth_code(self): return AuthCode(self, self.new_auth_code()) @property def fb_id(self): return self._graph.get_object('me')['id'] @classmethod def for_user(cls, user_id=None): if user_id and cls.access_token_store.exists(user_id): access_token = cls.access_token_store.get(user_id) else: access_token = cls().auth_code.poll() if user_id: cls.access_token_store.setex(user_id, access_token, 5184000) # 60 Days return Graph(access_token) def new_auth_code(self): args = { 'client_id': self.app_id, 'type': 'device_code', 'scope': 'user_photos' } return self._graph.request('oauth/device', post_args=args) def poll_auth_code(self, code): args = {'client_id': self.app_id, 'type': 'device_token', 'code': code} return self._graph.request('oauth/device', post_args=args) @photo_collection def photos(self): FIELDS = 'id,images,tags,width,height' id = self.fb_id photos = self._graph.get_connections('me', 'photos', fields=FIELDS) after = True while after: for photo in photos['data']: yield Photo(photo, id) try: after = photos['paging']['cursors']['after'] except KeyError: raise StopIteration photos = self._graph.get_connections('me', 'photos', fields=FIELDS, after=after) raise StopIteration
def __init__(self, record): self.action = record['messageAttributes']['Action']['stringValue'] self.dmls = int(record['messageAttributes']['DMLS']['stringValue']) self.dbname = record['messageAttributes']['DBNAME']['stringValue'] secrets = Secrets(self.dbname) self.passw = secrets.secret_values['password'] self.user = secrets.secret_values['username'] self.host = record['messageAttributes']['HOST']['stringValue']
def execute(self): if not os.path.isdir(self.file_locations.cluster_root()): dir_util.mkpath(self.file_locations.cluster_root()) if os.path.isdir(self.file_locations.release_root()): dir_util.remove_tree(self.file_locations.release_root()) dir_util.mkpath(self.file_locations.release_root()) for source,destination in self.file_locations.location_mappings().iteritems() : dir_util.copy_tree(source, destination) print "Preparing Openstack release for {} environment.".format(self.cluster) print "Preparing secrets" secrets = Secrets(self.file_locations) secrets.process_secrets() cluster_config = self._merge_dicts(self.cluster_config(),self.secrets_config(),self.images_config(),) print "Preparing config" self._prepare_specs(self.file_locations.release_etc(),cluster_config) print "Preparing start scripts" self._prepare_specs(self.file_locations.release_bin(),cluster_config) print "Preparing consolated config maps" self._prepare_configmaps() file_util.copy_file(os.path.join(self.file_locations.source_local_configmaps(),"cluster-configmap.yaml"),os.path.join(self.file_locations.release_configmaps())) file_util.copy_file(os.path.join(self.file_locations.source_local_configmaps(),"image-versions.yaml"),os.path.join(self.file_locations.release_configmaps())) print "Preparing checksums" self._prepare_checksums(self.file_locations.release_configmaps(),self.file_locations.release_secrets()) print "Preparing specs" self._prepare_specs(self.file_locations.release_openstack(),cluster_config)
def generate_auth(self): components = [ self.formatted_date, self.request_type.upper(), self.host.lower(), self.path ] args = [] for key, val in sorted(self.params, key=lambda (k, v): k): args.append('{k}={v}'.format(k=urllib.quote(key, '~'), v=urllib.quote(val, '~'))) components.append('&'.join(args)) signature_string = '\n'.join(components) signature = hmac.new(Secrets.get('secret_key'), signature_string, hashlib.sha1).hexdigest() auth_string = '{key}:{signature}'.format( key=Secrets.get('integration_key'), signature=signature) return base64.b64encode(auth_string)
def main(): secretsDb = Secrets() keyEngine = key_engine.KeyEngine(secretsDb) f = open(sys.argv[1], 'rb') disa = io.BytesIO(f.read()) f.close() game_id = int(sys.argv[2], 16) disa.seek(0x100, 0) header = disa.read(0x100) digest = hashlib.sha256(disa_extract.getDigestBlock("sd", game_id, header)).digest() disa.seek(0x00, 0) disa.write(cmac.AesCmac(digest, keyEngine.getKeySdNandCmac())) disa.seek(0x00, 0) disa = disa_extract.cryptoUnwrap(disa, "sd", game_id, keyEngine.getKeySdDecrypt()) with open("./out/00000001.sav", 'wb') as save: save.write(disa.getbuffer())
def secrets_config(self): secrets = Secrets(self.file_locations) return secrets.plain_secrets()
def host(self): return Secrets.get('api_hostname')
from pyemvue import PyEmVue from pyemvue.enums import Scale, Unit from tesla_api import TeslaApiClient from secrets import Secrets logging.basicConfig( level=logging.DEBUG, format='(%(threadName)-9s) %(message)s', ) json_file = "keys.json" token_file = "token.txt" _LOGGER = logging.getLogger() logging.basicConfig(level=logging.INFO) auth = Secrets() async def tesla_wake(my_tesla): if my_tesla.state != 'online': pass def get_energy_total(vue): channel_usage_list = vue.get_recent_usage(scale=Scale.SECOND.value, unit=Unit.WATTS.value) for channel in channel_usage_list: print(channel.name + " : " + channel.usage) def get_vue():
def __init__(self, access_token=None): self.access_token = access_token or Secrets.get('access_token') self._graph = facebook.GraphAPI(self.access_token, self.version)
'Title': 'title', 'Username': '******', 'Password': '******', 'URL': 'url', 'Notes': 'notes' } # Map csv fields to a column number in the source csv mapping2 = {} for k in mapping.keys(): mapping2[k] = headers.index(k) # Copy the data into our format new_entries = [] for row in entries: new_entry = {} for k in mapping: if mapping[k] != None: new_entry[mapping[k]] = row[mapping2[k]] new_entries.append(new_entry) print(new_entries) s = Secrets(out_file, keyphrase) s.new_vault() for entry in new_entries: s.add_entry(entry) s.write() print('{} read and exported to {}, {} entries'.format(in_file, out_file, len(new_entries)))
import logging import time import sys from threading import Thread from secrets import Secrets EMULATE_HX711 = False referenceUnit = Secrets.get_reference_unit() if not EMULATE_HX711: import RPi.GPIO as GPIO from hx711 import HX711 print('1. if') else: from emulated_hx711 import HX711 print('2. if') print('now accessing hx') hx = HX711(5, 6) print(hx) hx.set_reading_format("MSB", "MSB") hx.set_reference_unit(referenceUnit) hx.reset() hx.tare() tolerance = 20 def call_weight():
import logging import argparse import sys parser = argparse.ArgumentParser(description='Run YNAB Importers') parser.add_argument('importers', nargs='*', help='list of importers to run (default none runs all). Options: brim, wealthica') parser.add_argument('--once', default=False, action='store_true', help="Disable cron and only run the importer once.") args = parser.parse_args() importers_to_run = list(map(lambda x: x.strip().lower(), args.importers)) logger = logging.getLogger("YNAB Importer") s = Secrets(os.environ.get( "YNAB_IMPORTER_SECRETS_FILE", "./secrets.ejson")) logging.basicConfig(level=os.environ.get("LOGLEVEL", "INFO"), format='%(asctime)s - %(name)s - %(levelname)s - %(message)s') if len(importers_to_run) == 0 or "brim" in importers_to_run: if s.getSecret('brim.enable') != False: logger.info("Starting brim") brim = importers.BrimImporter(s) brim.run() schedule.every().day.at("22:00").do(brim.run) if len(importers_to_run) == 0 or "splitwise" in importers_to_run: if s.getSecret('splitwise.enable') != False: logger.info("Starting splitwise") splitwise = importers.SplitwiseImporter(s) splitwise.run()
new_entry = {} for k in mapping: if mapping[k] != None: new_entry[mapping[k]] = row[mapping2[k]] new_entries.append(new_entry) print(new_entries) def entries_equal(a, b): return (a['title'] == b['title'] and a['username'] == b['username'] and a['password'] == b['password'] and a['notes'] == b['notes']) # Read in the existing secrets file, then look for duplicates. s = Secrets(secrets_file, keyphrase) s.read() entries = s.entries() dupe_count = 0 add_count = 0 for new_entry in new_entries: for entry in entries: if entries_equal(entry, new_entry): # Entry already exists, avoid adding a duplicate dupe_count += 1 break else: # All secrets searched, no duplicates found. # Add to secrets. s.add_entry(new_entry) add_count += 1
def main(): prog = sys.argv[0] version = "%prog 1.2.0" usage = "%prog OPTIONS" epilog = "Build configured .fed package." parser = OptionParser(usage=usage, version=version, epilog=epilog) parser.add_option("-v", "--verbose", dest="verbose", help="Enable verbose messages [optional]", action="store_true") parser.add_option("-e", "--env", dest="env_file_path", help="Path of environment archive (.env)", metavar="FILEPATH") parser.add_option("-p", "--pol", dest="pol_file_path", help="Path of policy archive (.pol)", metavar="FILEPATH") parser.add_option("-c", "--config", dest="config_file_path", help="Path of JSON configuration file", metavar="FILEPATH") parser.add_option("--prop", dest="prop_file_path_list", help="Path of JSON property file [multiple]", metavar="FILEPATH", action="append") parser.add_option( "--cert", dest="cert_file_path", help="Path of JSON certificate configuration file [optional]", metavar="FILEPATH") parser.add_option( "--cert-expiration", dest="cert_expiration_days", help="Check if certificates expire within the next days [optional]", metavar="DAYS") parser.add_option( "--cert-config-update", dest="cert_config_update", help= "Enable writing of info section for 'update' certificates within the configuration file [optional]", action="store_true") parser.add_option( "--output-fed", dest="out_fed_file_path", help="Path of output deployment archive file (.fed) [optional]", metavar="FILEPATH") parser.add_option( "--output-env", dest="out_env_file_path", help="Path of output environment archive file (.env) [optional]", metavar="FILEPATH") parser.add_option("-D", "--define", dest="cli_properties", help="Define a command line property [multiple]", metavar="NAME:VALUE", action="append") parser.add_option( "-F", "--fromFile", dest="file_properties", help="Define a command line property from file content [multiple]", metavar="NAME:FILE", action="append") parser.add_option("--passphrase-in", dest="passphrase_in", help="Passphrase of input archive files [optional]", metavar="PASSPHRASE") parser.add_option("--passphrase-out", dest="passphrase_out", help="Passphrase for output archive files [optional]", metavar="PASSPHRASE") parser.add_option("-s", "--simulate", dest="simulate", help="Enable simulation mode [optional]", action="store_true") parser.add_option("-b", "--base-dir", dest="base_dir", help="Base directory for certificate files [optional]", metavar="DIRECTORY") parser.add_option( "--secrets-file", dest="secrets_file", help="Path of JSON file containing confidential properties [optional]", metavar="FILEPATH") parser.add_option( "--secrets-key", dest="secrets_key_file", help="Path to key file to decrypt confidential properties [optional]", metavar="FILEPATH") (options, args) = parser.parse_args() if not options.env_file_path: parser.error("Environment archive option is missing!") if not options.pol_file_path: parser.error("Policy archive option is missing!") if not options.config_file_path: parser.error("Configuration file option is missing!") if options.secrets_file and not options.secrets_key_file: parser.error("Key file for secrets is missing!") logging.basicConfig(format='%(levelname)s: %(message)s') if options.verbose: logging.getLogger().setLevel(logging.DEBUG) else: logging.getLogger().setLevel(logging.INFO) # Add some standard command line properties cli_properties = {} cli_properties["_system.build.datetime"] = datetime.now().isoformat() # Add command line properties if options.cli_properties: for sp in options.cli_properties: (name, value) = parse_cli_property(sp) cli_properties[name] = value if options.file_properties: for fp in options.file_properties: (name, prop_file) = parse_cli_property(fp) if not os.path.isfile(prop_file): raise ValueError( "File for command line property '%s' doesn't exist: %s" % (name, prop_file)) logging.debug("Reading command line property '%s' from file '%s'" % (name, prop_file)) with codecs.open(prop_file, encoding='utf-8', mode='r') as pf: cli_properties[name] = pf.read() try: # Set properties from files and command line properties properties = PropConfig(options.prop_file_path_list) for name, value in cli_properties.items(): logging.info("Provided command line property %s" % (name)) properties.set_property(name, value) # Set passphrases passphrase_in = "" if options.passphrase_in: passphrase_in = options.passphrase_in passphrase_out = "" if options.passphrase_out: passphrase_out = options.passphrase_out # Set secrets secrets = None if options.secrets_file: secrets = Secrets(options.secrets_key_file, options.secrets_file) # Setup configuration fed_config = FedConfigurator(options.pol_file_path, options.env_file_path, options.config_file_path, options.cert_file_path, properties, passphrase_in, secrets) if options.base_dir: fed_config.set_base_dir(options.base_dir) if options.simulate: fed_config.enable_simulation_mode() if options.cert_config_update: fed_config.enable_cert_config_update() if options.cert_expiration_days: fed_config.set_cert_expiration_days( int(options.cert_expiration_days)) # Execute configuration succeeded = fed_config.configure(passphrase_out) if succeeded: if options.simulate: logging.info("[SIMULATION_MODE] No output files written!") else: if options.out_fed_file_path: fed_config.write_fed(options.out_fed_file_path) if options.out_env_file_path: fed_config.write_env(options.out_env_file_path) else: sys.exit(1) except Exception as e: if options.verbose: logging.error("Error occurred, check details:", exc_info=True) else: logging.error("%r" % (e)) sys.exit(1) sys.exit(0)
await channel.send("Bot starting up.") last_team_summary = -1 update_data = None while True: new_embeds = tba_watcher.getUpdates() for channel in channels: for embed in new_embeds: await channel.send(embed=embed) await asyncio.sleep(0.5) # wait 1 second between every message if len(tba_watcher.events) > 0 and (time() - last_team_summary > TEAM_SUMMARY_REFRESH_RATE or client.printStatus): last_team_summary = time() summary, update_data = tba_watcher.getTeamUpdates(last_update_data=(None if client.printStatus else update_data)) client.printStatus = False if summary != None: for channel in channels: await channel.send(embed=summary) else: print("suppressed status print - no changes") elif len(tba_watcher.events) == 0 and client.printStatus: client.printStatus = False await channel.send("No teams currently in events.") await asyncio.sleep(20) # task runs every 60 seconds if __name__ == '__main__': Secrets.set_secrets(*get_secrets()) tbar = TBA_Request(Secrets.tba_auth_key) client = FRCBot() client.loop.create_task(my_background_task(client)) client.run(Secrets.bot_secret)
from bot import RsynClient from secrets import Secrets from redditBadMovies import RedditBadMovieClient if __name__ == "__main__": secrets = Secrets('secrets.json') movie_client = RedditBadMovieClient(secrets) discord_bot = RsynClient(secrets, movie_client) discord_bot.run() # TODO: Host this in AWS
def determine_which_class_to_book(current_day): """ Booking begins 24 hours in advance. Ex. For Wednesday classes, we book Tuesday at 12am. :param current_day: The current day of the week as an int, per datetime convention in where Monday is 0. :return: The XPATH to the link of the class we would like to book. """ # A dict whose key (day of the week) corresponds to the XPATH to the class I want to take that day: classes_by_day = { 0: '//*[@id="scheduleDisplay"]/table/tbody/tr[36]/td[3]/a', 1: '//*[@id="scheduleDisplay"]/table/tbody/tr[19]/td[3]/a', 2: '//*[@id="scheduleDisplay"]/table/tbody/tr[37]/td[3]/a[1]', 3: "TODO", 4: "TODO", 5: "TODO", 6: "TODO" } return classes_by_day.get(current_day + 1) if __name__ == "__main__": # Get the current day today = get_day_of_week() # Return tomorrow's desired class, based on today's day desired_class = determine_which_class_to_book(today) # Start up our session my_credentials = Secrets() browser = my_credentials.web_login() # Instantiate, and book the desired class bookings = DailyBookings(browser) bookings.book_a_class(desired_class)