def authenticate(): # Get client ID and secret from auth.ini config = get_config() # https://github.com/Imgur/imgurpython/blob/master/examples/auth.ini config.read('auth.ini') # this is the file that holds user credentials client_id = config.get('credentials', 'client_id') client_secret = config.get('credentials', 'client_secret') client = ImgurClient(client_id, client_secret) # Authorization flow, pin example (see docs for other auth types) authorization_url = client.get_auth_url('pin') print (("Go to the following URL: {0}".format(authorization_url))) # Read in the pin, handle Python 2 or 3 here. pin = get_input("Enter pin code: ") # ... redirect user to `authorization_url`, obtain pin (or code or token) ... credentials = client.authorize(pin, 'pin') client.set_user_auth(credentials['access_token'], credentials['refresh_token']) config.set('credentials', 'refresh_token', credentials['refresh_token']) with open('auth.ini', 'w') as configfile: # save config.write(configfile) print ("Authentication successful! Here are the details:") print ((" Access token: {0}".format(credentials['access_token']))) print ((" Refresh token: {0}".format(credentials['refresh_token']))) return client
def plot_cape_tauc_panel(inargs, axflat, iday, rootgroup): """ Plots cape and tau_c timeseries in each panel. Parameters ---------- inargs : argparse object Argparse object with all input arguments axflat : list List of axis objects iday : int Index for list object rootgroup : netCDF object NetCDF object with data to plot """ ax1 = axflat[iday] ax2 = ax1.twinx() ax2.set_ylim(0, 20) dateobj = (timedelta(seconds=int(rootgroup.variables['date'][iday])) + datetime(1, 1, 1)) datestr = dateobj.strftime(get_config(inargs, 'plotting', 'date_fmt')) ax1.set_title(datestr) if iday % 4 == 0: # Only left column ax1.set_ylabel('CAPE [J/kg]') if iday % 4 == 3: ax2.set_ylabel('tau_c [h]') else: ax2.get_yaxis().set_ticks([]) for group in ['det', 'ens']: for var, ax, ls in zip(['CAPE_ML', 'TAU_C'], [ax1, ax2], ['-', '--']): # Get data do be plotted # prec: det, obs or ens mean # prec_lower/upper: ensemble minimum, maximum if group == 'ens': array = rootgroup.groups[group].variables[var][iday, :, :] mean = np.mean(array, axis=1) lower = np.amin(array, axis=1) upper = np.amax(array, axis=1) else: mean = rootgroup.groups[group].variables[var][iday, :, 0] # Plot data ax.plot(rootgroup.variables['time'][:], mean, label=group, c=get_config(inargs, 'colors', group), ls=ls) if group == 'ens': ax.fill_between(rootgroup.variables['time'][:], lower, upper, where=upper >= lower, facecolor=get_config(inargs, 'colors', 'ens_range'))
def plot_precipitation_panel(inargs, axflat, iday, rootgroup): """ Plots precipitation timeseries in each panel. Parameters ---------- inargs : argparse object Argparse object with all input arguments axflat : list List of axis objects iday : int Index for list object rootgroup : netCDF object NetCDF object with data to plot """ dateobj = (timedelta(seconds=int(rootgroup.variables['date'][iday])) + datetime(1, 1, 1)) datestr = dateobj.strftime(get_config(inargs, 'plotting', 'date_fmt')) axflat[iday].set_title(datestr) for group in rootgroup.groups: # Get data do be plotted # prec: det, obs or ens mean # prec_lower/upper: ensemble minimum, maximum if group == 'ens': prec_array = rootgroup.groups[group].variables['PREC_ACCUM'] \ [iday, :, :] prec = np.mean(prec_array, axis=1) prec_lower = np.amin(prec_array, axis=1) prec_upper = np.amax(prec_array, axis=1) else: prec = rootgroup.groups[group].variables['PREC_ACCUM'] \ [iday, :, 0] # Plot data axflat[iday].plot(rootgroup.variables['time'][:], prec, label=group, c=get_config(inargs, 'colors', group)) if group == 'ens': axflat[iday].fill_between(rootgroup.variables['time'][:], prec_lower, prec_upper, where=prec_upper >= prec_lower, facecolor=get_config( inargs, 'colors', 'ens_range'))
async def buy(request): values = request.json description = values.get('description') if description is None: return text(body='Missing values', status=400) buyer = get_config(key='node_identifier') for seller, product_dict in app.product_list.items(): for product_id, product_description in product_dict.items(): if product_description == description: hostname = get_hostname(seller) total_response = {'buyer_message': f'{buyer} wants to buy {seller}\'s product {product_description}'} try: async with aiohttp.ClientSession() as session: async with session.post(f'{hostname}/sell', json={ 'product_id': product_id, 'recipient': buyer }) as response: if response.status == 200: total_response['seller_message'] = await response.text() add_product(product_id=product_id, description=description) else: return text(body=await response.text(), status=404) except (aiohttp.client_exceptions.ClientConnectorError, asyncio.TimeoutError): return text(body='Server not found', status=404) return json(total_response, status=200)
def post_ldap(DCOS_IP): """ Get the LDAP configuration from the buffer, and post it to a DC/OS cluster available at the DCOS_IP argument. """ config = helpers.get_config(env.CONFIG_FILE) try: #open the LDAP file and load the LDAP configuration from JSON ldap_file = open(env.LDAP_FILE, 'r') helpers.log(log_level='INFO', operation='LOAD', objects=['LDAP'], indx=0, content='** OK **') except IOError as error: helpers.log(log_level='ERROR', operation='LOAD', objects=['LDAP'], indx=0, content=request.text) return False #load entire text file and convert to JSON - dictionary ldap_config = json.loads(ldap_file.read()) ldap_file.close() #build the request api_endpoint = '/acs/api/v1/ldap/config/' url = 'http://' + config['DCOS_IP'] + api_endpoint headers = { 'Content-type': 'application/json', 'Authorization': 'token=' + config['TOKEN'], } data = ldap_config #send the request to PUT the LDAP configuration try: request = requests.put(url, headers=headers, data=json.dumps(data)) request.raise_for_status() #show progress after request helpers.log(log_level='INFO', operation='PUT', objects=['LDAP'], indx=0, content=request.status_code) except requests.exceptions.HTTPError as error: helpers.log(log_level='ERROR', operation='PUT', objects=['LDAP'], indx=0, content=request.status_code) helpers.log(log_level='INFO', operation='PUT', objects=['Users'], indx=0, content=env.MSG_DONE) helpers.get_input(message=env.MSG_PRESS_ENTER) return True
def authenticate(): # Get client ID and secret from auth.ini config = get_config() config.read('auth.ini') client_id = config.get('credentials', 'client_id') client_secret = config.get('credentials', 'client_secret') client = ImgurClient(client_id, client_secret) # Authorization flow, pin example (see docs for other auth types) authorization_url = client.get_auth_url('pin') print("Go to the following URL: {0}".format(authorization_url)) # Read in the pin, handle Python 2 or 3 here. pin = get_input("Enter pin code: ") print type(pin) # ... redirect user to `authorization_url`, obtain pin (or code or token) ... credentials = client.authorize(pin, 'pin') client.set_user_auth(credentials['access_token'], credentials['refresh_token']) print("Authentication successful! Here are the details:") print(" Access token: {0}".format(credentials['access_token'])) print(" Refresh token: {0}".format(credentials['refresh_token'])) return client
def check_enabled_by_table_name(table_name, file_name="tables"): config = get_config(env=os.environ.get("ENVIRONMENT")) allowed_entities = [ k for k, v in config.ENABLED_ENTITIES.items() if v is True ] dirname = get_current_directory() file_path = os.path.join(dirname, f"{file_name}.json") with open(file_path) as tables_json: tables_dict = json.load(tables_json, object_pairs_hook=OrderedDict) base_entities = tables_dict[table_name]["entities"]["base_entities"] try: extra_entities = tables_dict[table_name]["entities"]["relies_on"] except KeyError: extra_entities = [] required_entities = base_entities + extra_entities if all(x in allowed_entities for x in required_entities): log.info( f"All required entities: {', '.join(required_entities)} for table {table_name} enabled, continue" ) return True else: log.info( f"Not all required entities: {', '.join(required_entities)} for table {table_name} enabled, moving on" ) return False
def read_auth(self): config = get_config() config.read('auth.ini') client_id = config.get('credentials', 'client_id') client_secret = config.get('credentials', 'client_secret') client = ImgurClient(client_id, client_secret) self.searchs = (client.gallery_search(self.query))
def authenticate(): # Get client ID and secret from auth.ini config = get_config() # https://github.com/Imgur/imgurpython/blob/master/examples/auth.ini config.read('auth.ini') # this is the file that holds user credentials client_id = config.get('credentials', 'client_id') client_secret = config.get('credentials', 'client_secret') client = ImgurClient(client_id, client_secret) # Authorization flow, pin example (see docs for other auth types) authorization_url = client.get_auth_url('pin') print(("Go to the following URL: {0}".format(authorization_url))) # Read in the pin, handle Python 2 or 3 here. pin = get_input("Enter pin code: ") # ... redirect user to `authorization_url`, obtain pin (or code or token) ... credentials = client.authorize(pin, 'pin') client.set_user_auth(credentials['access_token'], credentials['refresh_token']) config.set('credentials', 'refresh_token', credentials['refresh_token']) with open('auth.ini', 'w') as configfile: # save config.write(configfile) print("Authentication successful! Here are the details:") print((" Access token: {0}".format(credentials['access_token']))) print((" Refresh token: {0}".format(credentials['refresh_token']))) return client
def authenticate(): # Get client ID and secret from auth.ini config = get_config() config.read('auth.ini') client_id = config.get('credentials', 'client_id') client_secret = config.get('credentials', 'client_secret') access_token = config.get('credentials', 'access_token') refresh_token = config.get('credentials', 'refresh_token') client = ImgurClient(client_id, client_secret, access_token, refresh_token) # Authorization flow, pin example (see docs for other auth types) #authorization_url = client.get_auth_url('pin') #print("Go to the following URL: {0}".format(authorization_url)) # Read in the pin, handle Python 2 or 3 here. # pin = get_input("Enter pin code: ") # ... redirect user to `authorization_url`, obtain pin (or code or token) ... #credentials = client.authorize(pin, 'pin') #client.set_user_auth(credentials['access_token'], credentials['refresh_token']) #print("Authentication successful! Here are the details:") #print(" Access token: {0}".format(credentials['access_token'])) #print(" Refresh token: {0}".format(credentials['refresh_token'])) return client
def authenticate(): """ :rtype: client object """ # result = os.getcwd() src_path = os.path.dirname(os.path.abspath(__file__)) auth_path = "{0}/auth.ini".format(src_path) config = get_config() # config.read('auth.ini') config.read(auth_path) client_id = config.get('credentials','client_id') client_secret = config.get('credentials', 'client_secret') refresh_token = config.get('credentials', 'refresh_token') access_token = config.get('credentials', 'access_token') if not refresh_token: # print 'not refresh token' client = ImgurClient(client_id, client_secret) authorization_url = client.get_auth_url('pin') import webbrowser as wb print("First we need to have your authentication access...") print("Go to browser and open the following URL: {0}".format(authorization_url)) wb.open_new_tab(authorization_url) pin = get_input("Enter the pin code: ") # ... redirect user to `authorization_url`, obtain pin (or code or token) ... credentials = client.authorize(pin, 'pin') # Store the refresh_token new_refresh_token = credentials['refresh_token'] config.set('credentials', 'refresh_token', value=new_refresh_token) refresh_token = new_refresh_token with open(auth_path, 'wb') as configfile: config.write(configfile) if refresh_token and not access_token: client.set_user_auth(access_token, refresh_token) # client = ImgurClient(client_id, client_secret, refresh_token) if not client.auth: print("Auth failed... Please try again") import sys sys.exit() else: print("Auth success! Getting accessing...") client.auth.refresh() new_access_token = client.auth.current_access_token print("New access token generated.") config.set('credentials', 'access_token', value=new_access_token) with open(auth_path, 'wb') as configfile: config.write(configfile) access_token = new_access_token print("Access information saved!") if refresh_token and access_token: # print 'refresh token and access token' client = ImgurClient(client_id, client_secret, access_token, refresh_token) return client
def __init__(self): config = get_config() config.read('auth.ini') self.client_id = config.get('imgur', 'client_id') self.client_secret = config.get('imgur', 'client_secret') self.imgur = ImgurClient(self.client_id, self.client_secret)
async def mining_controller(app): app.blockchain.resolve_conflicts() pipe, remote_pipe = multiprocessing.Pipe() event = multiprocessing.Event() process = multiprocessing.Process(target=miner, args=(remote_pipe, event)) process.start() pipe.send({ 'block': app.blockchain.build_block(), 'difficulty': default_difficulty }) while True: event.set() await asyncio.sleep(10) if not app.mining: event.clear() if pipe.poll(): result = pipe.recv() found_block = result['found_block'] if app.blockchain.build_block( )['previous_hash'] == found_block['previous_hash']: app.blockchain.save_block(found_block) app.blockchain.new_transaction( sender="0", recipient=get_config(key='node_identifier'), product_id=1, ) logger.info( f"Mined Block {found_block['height']} containing {len(found_block['transactions'])} transactions" ) await as_publish_consensus() pipe.send({ 'block': app.blockchain.build_block(), 'difficulty': default_difficulty })
def authenticate() -> ImgurClient: config = get_config() config.read('.env') client_id = config.get('imgur', 'client_id') client_secret = config.get('imgur', 'client_secret') return ImgurClient(client_id, client_secret)
def load_config(self): config = get_config() config.read('auth.ini') self.client_id = config.get('credentials', 'client_id') self.client_secret = config.get('credentials', 'client_secret') self.access_token = config.get('credentials', 'access_token') self.refresh_token = config.get('credentials', 'refresh_token')
def main(): """Entry point of the SysProbe application :return: None """ flags = helpers.parser_create() config_data = helpers.get_config(flags.config_file) influxdb_config = helpers.get_influxdb_config(config_data) jobs_config = helpers.get_jobs_config(config_data) loggers_config = helpers.get_loggers_config(config_data) helpers.loggers_configure(loggers_config) logger = logging.getLogger() available_jobs = helpers.get_available_jobs() for job, interval in jobs_config.items(): if job in available_jobs: job = available_jobs[job] schedule.every(interval).seconds.do(helpers.run_threaded, job, influxdb_config) else: logger.warning('Unknown job name: {}'.format(job)) while True: try: schedule.run_pending() except KeyboardInterrupt: break
def plot_correlation(inargs): """ Plots correlation between N and m in a scatter plot Parameters ---------- inargs : argparse object Argparse object with all input arguments """ # Read pre-processed data rootgroup = read_netcdf_dataset(inargs) pw = get_config(inargs, 'plotting', 'page_width') fig, axarr = plt.subplots(1, 2, figsize=(pw, pw / 2.5)) mean_m = rootgroup.variables['mean_m'][:, :, 0, 0, 0] mean_N = rootgroup.variables['mean_N'][:, :, 0, 0, 0] # axarr[0].plot(rootgroup.variables['time'][:], mean_m, label='non-separated') # axarr[1].plot(rootgroup.variables['time'][:], mean_M, label='non-separated') axarr[0].scatter(mean_m, mean_N) print np.corrcoef(mean_m, mean_N)[0, 1] axarr[0].set_xlabel('m') axarr[0].set_ylabel('N') axarr[0].legend() # Save figure and log save_fig_and_log(fig, rootgroup, inargs, 'correlation', tight=True)
def getMovieDBKey(): # Get client ID, secret, and access and refresh tokens from auth.ini config = get_config() config.read('auth.ini') moviedb_key = config.get('api_keys', 'moviedb_key') return moviedb_key
def plot_precipitation_panel(inargs, ax, iday, rootgroup): """ Plots precipitation timeseries in each panel. Parameters ---------- inargs : argparse object Argparse object with all input arguments ax : axis object Axis objects iday : int Index for list object rootgroup : netCDF object NetCDF object with data to plot """ dateobj = (timedelta(seconds=int(rootgroup.variables['date'][iday])) + datetime(1, 1, 1)) if iday % 4 == 0: # Only left column ax.set_ylabel(r'Precip [mm h$^{-1}$]') for group in rootgroup.groups: # Get data do be plotted # prec: det, obs or ens mean # prec_lower/upper: ensemble minimum, maximum if group == 'ens': prec_array = rootgroup.groups[group].variables['PREC_ACCUM'] \ [iday, :, :] prec = np.mean(prec_array, axis=1) prec_lower = np.amin(prec_array, axis=1) prec_upper = np.amax(prec_array, axis=1) else: prec = rootgroup.groups[group].variables['PREC_ACCUM'] \ [iday, :, 0] # Plot data ax.plot(rootgroup.variables['time'][:], prec, label=group, c=get_config(inargs, 'colors', group), linewidth=2) if group == 'ens': ax.fill_between(rootgroup.variables['time'][:], prec_lower, prec_upper, where=prec_upper >= prec_lower, facecolor=get_config(inargs, 'colors', 'ens_range'))
def __init__(self, commands_config, template_values): """Init Shh App Commands""" self.template_values = template_values # set xform for config otherwise text will be normalized to lowercase self.config = helpers.get_config(commands_config) self.load_commands()
def authenticate(): # Get client ID and secret from auth.ini config = get_config() config.read(HOME + '/.imgur.ini') client_id = config.get('credentials', 'client_id') client_secret = config.get('credentials', 'client_secret') return ImgurClient(client_id, client_secret)
def authed(): config = get_config() config.read('auth.ini') client_id = config.get('credentials', 'client_id') access_token = config.get('credentials', 'access_token') client_secret = config.get('credentials', 'client_secret') client = ImgurClient(client_id, client_secret) client.set_user_auth(credentials['access_token'], credentials['refresh_token'])
def get_default_channel(self, guild): data = get_config(guild.id, "youtube_default_channel") if not data: return None # TODO: get first upload date automagically, oh this seems not possible through youtube api y, m, d = data['first_upload_date'].split('-') first_upload_date = datetime.datetime(int(y), int(m), int(d)) return YoutubeChannel(data['id'], first_upload_date)
async def first_run(): config = get_config() api_id, api_hash = config['api_id'], config['api_hash'] client = TelegramClient('telethon_send_message', api_id, api_hash) await client.start() await client.disconnect()
def StartClient(): # Get credentials from auth.ini config = get_config() config.read('auth.ini') # this is the file that holds user credentials client_id = config.get('credentials', 'client_id') client_secret = config.get('credentials', 'client_secret') refresh_token = config.get('credentials', 'refresh_token') # Start client client = ImgurClient(client_id, client_secret, None, refresh_token) return client
def authenticate(): # Get client ID and secret from auth.ini config = get_config() config.read("auth.ini") username = config.get("credentials", "username") password = config.get("credentials", "password") api_key = config.get("credentials", "api_key") print("Username: " + username) client = TVDBClient(api_key, username, password) return client
def get_agents ( DCOS_IP ): """ Get the agent status configuration from a DC/OS cluster as a JSON blob. Save it to the text file in the save_path provided. Return the cluster's agent state as a dictionary. """ api_endpoint = '/mesos/slaves' config = helpers.get_config( env.CONFIG_FILE ) url = 'http://'+config['DCOS_IP']+api_endpoint headers = { 'Content-type': 'application/json', 'Authorization': 'token='+config['TOKEN'], } try: request = requests.get( url, headers=headers, ) request.raise_for_status() helpers.log( log_level='INFO', operation='GET', objects=['AGENTS'], indx=0, content=request.status_code ) except requests.exceptions.HTTPError as error: helpers.log( log_level='ERROR', operation='GET', objects=['AGENTS'], indx=0, content=request.text ) #save to AGENTS file agents_file = open( config['AGENTS_FILE'], 'w' ) agents_file.write( request.text ) #write to file in same raw JSON as obtained from DC/OS agents_file.close() #Create a list of agents agents_dict = dict( json.loads( request.text ) ) helpers.log( log_level='INFO', operation='GET', objects=['AGENTS'], indx=0, content='* DONE. *\n' ) return agents_dict
def plot_domain_mean_timeseries_individual(inargs, plot_type): """ Function to plot time series of domain mean precipitation for each day Parameters ---------- inargs : argparse object Argparse object with all input arguments plot_type : str Type of plot. Must be 'precipitation' or 'cape_tauc' """ assert plot_type in ['precipitation', 'cape_tauc'], \ 'Type must be precipitation or cape_tauc' # Read pre-processed data rootgroup = read_netcdf_dataset(inargs) n_days = rootgroup.dimensions['date'].size # Set up figure n_cols = 4 n_rows = int(np.ceil(float(n_days) / n_cols)) fig, axmat = plt.subplots(n_rows, n_cols, sharex=True, sharey=True, figsize=(10, 3 * n_rows)) axflat = np.ravel(axmat) # Loop over axes / days for iday in range(n_days): dateobj = (timedelta(seconds=int(rootgroup.variables['date'][iday])) + datetime(1, 1, 1)) datestr = dateobj.strftime(get_config(inargs, 'plotting', 'date_fmt')) axflat[iday].set_title(datestr) if iday >= ((n_cols * n_rows) - n_cols): # Only bottom row axflat[iday].set_xlabel('Time [UTC]') if plot_type == 'precipitaiton': plot_precipitation_panel(inargs, axflat, iday, rootgroup) if plot_type == 'cape_tauc': plot_cape_tauc_panel(inargs, axflat, iday, rootgroup) # Finish figure axflat[0].legend(loc=0) plt.tight_layout() # Save figure save_fig_and_log(fig, rootgroup, inargs, plot_type + '_ts_individual')
def ShortenUrl(url): # Get APIKey from auth.ini config = get_config() config.read('auth.ini') # this is the file that holds user credentials api_key = config.get('credentials', 'api_key') post_url = 'https://www.googleapis.com/urlshortener/v1/url?key=' + api_key postdata = {'longUrl': url} headers = {'Content-Type': 'application/json'} req = urllib2.Request(post_url, json.dumps(postdata), headers) ret = urllib2.urlopen(req).read() return json.loads(ret)['id']
def posted(username, limit): config = get_config() reddit = get_reddit(config) subreddit = config["REDDIT"]["subreddit"].lower() user = reddit.redditor(username) yield "/u/%s" % username for subm in user.submissions.new(limit=10): if subm.subreddit.display_name.lower() == subreddit: yield "\n\n".join( (subm.url, quoted(subm.title), quoted(subm.selftext))) for comm in user.comments.new(limit=limit): if comm.subreddit.display_name.lower() == subreddit: yield ("\n\n").join((comm.link_permalink, quoted(comm.body)))
def main(): arguments = helpers.get_arguments() config = helpers.get_config(config_name=arguments['c']) secrets = helpers.get_secrets() helpers.validate_config(config) if 'period' in config: config['period'] = helpers.period_to_epoch(config['period']) trader = Trader(config, secrets) if config['type'] == 'backtest': trader.backtest(config['period']) if config['type'] == 'live': trader.live_trade()
async def sell(request): values = request.json required = ['product_id', 'recipient'] if not all(k in values for k in required): return text(body='Missing values', status=400) seller = get_config(key='node_identifier') product_id = values.get('product_id') buyer = values.get('recipient') if sell_product(product_id): app.blockchain.new_transaction(seller, buyer, product_id) transcation = await mining_once(app) product = get_products(product_id=product_id) return text(f'Sold {product.description} to {buyer}', 200) return text('Sold out', 404)
def main(): config = get_config() # Wraps arg parse functionallity around train function so that it can be provided as arguments parser = argparse.ArgumentParser( description='Trains a language model from a wiki dataset') parser.add_argument( 'lm_fit', help='The wiki dump name to train a language model for') parser.add_argument('lm_flow', help='Name of the model, used in exported files etc') parser.add_argument( 'fit_dump', help='The wiki dump name to train a language model for') parser.add_argument('flow_dump', help='Name of the model, used in exported files etc') parser.add_argument( '--test-mode', help="makes dataset smaller to see if the script actually runs", action='store_true') parser.add_argument('--epochs', type=int, default=5, help="Number of epochs to run for") parser.add_argument('--batch_size', type=int, default=64, help="Batch size") parser.add_argument('--gpu', type=int, default=-1, help="Gpu to use") parser.add_argument('--out', default='result', help="Folder to put results") parser.add_argument('--grad-clip', default=True, help="Clip gradients") parser.add_argument('--brpoplen', type=int, default=35) parser.add_argument('--resume', default='') parser.add_argument('--max-seq-size', default=250000, type=int) args = parser.parse_args() com = Communication(args.out) com.add_text("Type", "Translation via matrix") # keep time com.add_text("Start date", time.strftime("%c")) start = time.time() train(com, args.lm_fit, args.lm_flow, args.fit_dump, args.flow_dump, args.test_mode, args.epochs, args.batch_size, args.gpu, args.out, args.grad_clip, args.brpoplen, args.resume, args.max_seq_size) diff = time.time() - start com.add_text('time', seconds_to_str(diff)) com.send_slack(config.get('slack', 'channel'), config.get('slack', 'api_token'))
async def start(params: dict): config = get_config() api_id, api_hash, bot_name = config['api_id'], config['api_hash'], config[ 'bot_name'] client = TelegramClient('telethon_send_message', api_id, api_hash) await client.start() prepared_message = f"{params['cmd']} {params['name']} {params['episode']} {params['dub_or_sub']} {params['page']}" await client.send_message(bot_name, prepared_message) await client.disconnect() return "OK"
def fit(self, X, y): config = get_config() estimator = TREE_ESTIMATORS[self.tree_estimator]( n_estimators=self.n_estimators, random_state=config.RANDOM_STATE, ) estimator.fit(X, y) self.idx_sorted = np.argsort(-estimator.feature_importances_) if isinstance(X, DataFrame): self.selected_features = X.columns[ self.idx_sorted[:self.n_features]] else: pass return self
def ShortenUrl(url): # Get APIKey from auth.ini config = get_config() config.read('auth.ini') # this is the file that holds user credentials api_key = config.get('credentials', 'api_key') post_url = 'https://www.googleapis.com/urlshortener/v1/url?key=' + api_key postdata = {'longUrl': url} headers = {'Content-Type': 'application/json'} req = urllib2.Request( post_url, json.dumps(postdata), headers) ret = urllib2.urlopen(req).read() return json.loads(ret)['id']
async def mining_once(app): app.blockchain.resolve_conflicts() found_block = proof_of_work(app.blockchain.build_block(), default_difficulty, None) if app.blockchain.build_block( )['previous_hash'] == found_block['previous_hash']: app.blockchain.save_block(found_block) app.blockchain.new_transaction( sender="0", recipient=get_config(key='node_identifier'), product_id=1, ) logger.info( f"Mined Block {found_block['height']} containing {len(found_block['transactions'])} transactions" ) await as_publish_consensus() return found_block['transactions']
def render_sitemap_xml(request): from helpers import get_config config = get_config(request) from ..project.models import Project project_list = Project.objects.all().order_by('-created').all() from ..post.models import Post post_list = Post.objects.all().order_by('-created').all() return render(request, 'home/templates/%s/sitemap.xml' % settings.THEME, { 'config': config, 'project_list': project_list, 'post_list': post_list, 'current_lang': get_language(), 'lang': settings.LANGUAGE_CODE, 'lang_list': config['lang_list'], })
def wd_instance(driver_name, time_to_wait=30): """:param time_to_wait: Sets a sticky timeout to implicitly wait for an element to be found """ display = None remurl = 'http://%s:%s' \ % (get_config('CHROMEDRIVER_HOST'), get_config('CHROMEDRIVER_PORT')) if driver_name == 'firefox': driver = webdriver.Firefox() elif driver_name in user_agents.keys(): capabilities = {} if driver_name == 'mobile_chrome': capabilities = selenium.webdriver.DesiredCapabilities.CHROME capabilities["chromeOptions"] = { 'args': ['user-agent=%s' % user_agents[driver_name]], 'extensions': [] } elif driver_name != 'chrome': capabilities["chromeOptions"] = { 'args': ["user-agent=%s" % user_agents[driver_name]], 'extensions': [] } if 'chromeOptions' not in capabilities: capabilities['chromeOptions']={'args':[]} capabilities['chromeOptions']['args'].append('--test-type') if get_config('CHROMEDRIVER_VIRTUAL_DISPLAY'): chromedriver = get_config('CHROMEDRIVER_BINARY') os.environ['webdriver.chrome.driver'] = chromedriver display = Display(visible=0, size=DEFAULT_DISPLAY_SIZE) display.start() options = selenium.webdriver.ChromeOptions() if driver_name in user_agents: options.add_argument('--user-agent="%s"'%user_agents[driver_name]) try: driver = selenium.webdriver.Chrome(chromedriver,chrome_options=options,desired_capabilities=capabilities) driver.set_window_size(*DEFAULT_DISPLAY_SIZE) driver.set_window_position(0, 0) except Exception as expt: print(expt) print('-' * 80) raise Exception( 'failed to instantiate webdriver ' 'with binary path %s' % chromedriver) else: try: if get_config('CHROMEDRIVER_HOST') not in ['localhost', '127.0.0.1']: remurl += '/wd/hub' capabilities = selenium.webdriver.DesiredCapabilities.CHROME driver = selenium.webdriver.Remote( remurl, desired_capabilities=capabilities) driver.set_window_size(*DEFAULT_DISPLAY_SIZE) driver.set_window_position(0, 0) except Exception as expt: print('-' * 80) raise Exception('could not connect to selenium at %s; ' 'CHECK THAT YOU HAVE CHROMEDRIVER RUNNING - ' 'http://code.google.com/p/chromedriver/' 'downloads/list' 'Exception: %s' % (remurl, str(expt))) else: raise Exception('Driver not defined!') if not display is None: driver.display_stop = lambda: display.stop() driver.implicitly_wait(time_to_wait) return driver
'new_id':35, 'split_date':FUTURE_DATE.strftime('%Y-%m-%d'), 'bool_mult_div':'True', 'split_rate': 10 } DEMO_NOSPLIT = { 'type_id':35, 'type_name':'Tritanium', 'original_id':35, 'new_id':35, 'split_date':TEST_DATE.strftime('%Y-%m-%d'), 'bool_mult_div':'False', 'split_rate': 10 } ROOT_CONFIG = helpers.get_config( path.join(ROOT, 'scripts', 'app.cfg') ) TEST_CONFIG = helpers.get_config( path.join(HERE, 'test_config.cfg') ) def test_splitinfo_happypath(): """test SplitInfo behavior""" split_obj = split_utils.SplitInfo(DEMO_SPLIT) ## Validate data inside obj ## assert split_obj.type_id == DEMO_SPLIT['type_id'] assert split_obj.type_name == DEMO_SPLIT['type_name'] assert split_obj.original_id == DEMO_SPLIT['original_id'] assert split_obj.new_id == DEMO_SPLIT['new_id'] assert split_obj.split_date == datetime.strptime(DEMO_SPLIT['split_date'], '%Y-%m-%d') assert split_obj.date_str == DEMO_SPLIT['split_date']
from flask import Flask, request, jsonify, redirect, url_for, render_template, send_from_directory from flask.ext.cors import CORS from werkzeug import secure_filename import helpers import html import os import sys import time CONFIGURATION = helpers.get_config(sys.argv) # Load settings from config.py CONTENT_FOLDER = CONFIGURATION['app']['content_folder'] # The folder where UL/DL happen LEN_CONTENT_FOLDER = len(CONTENT_FOLDER) - 1 app = Flask(__name__) app.config['UPLOAD_FOLDER'] = CONTENT_FOLDER # Tell flask that this is where all uploads are supposed to go cors = CORS(app, resources={r"/api/*": {"origins": "*"}}) @app.route('/dl/<path:filename>') # Legacy support, remove this in the future @app.route('/download_file/<path:filename>') # Legacy support, remove this in the future @app.route('/api/download_file/<path:filename>') def download_file(filename): """ Used for downloading files to client """ filename = html.unescape(filename) # Convert HTML sequences to their characters & -> & return send_from_directory(CONTENT_FOLDER, filename, as_attachment=True) # Send file to client @app.route('/download_directory/<path:dirname>') # Legacy support, remove this in the future @app.route('/api/download_directory/<path:dirname>')
from simplejson import loads as parse_json from helpers import do_curl, get_config config = get_config('drupal') cookie = '' def login(): global cookie if cookie == '': data = do_curl(config['login_url'], username=config['user'], password=config['password']) data = parse_json(str(data)) cookie = data['session_name'] + "=" + data['sessid'] return cookie def get_site(site_id): cookie = login() site = do_curl(config['site_url'] % site_id, cookie=cookie) return parse_json(str(site)) def get_sites(): cookie = login() sites = do_curl(config['site_index_url'], cookie=cookie) return parse_json(str(sites))
from flask import Flask, render_template, request import helpers # Init # ---------------------------------- app = Flask(__name__) app.config.update(blog=helpers.get_config()) app.config.update(SERVER_NAME=app.config["blog"]["host"]) app.debug = app.config["blog"]["debug"] # ---------------------------------- # Routes # ---------------------------------- @app.route("/") def index(): posts = helpers.get_posts() return render_template("index.html", posts=posts) @app.route("/<post_name>") def show_post(post_name): post = helpers.get_post(post_name) return render_template("post.html", **post) @app.route("/contact")
import csv, requests, time, re, sys, random, smtplib, jinja2, envelopes #Imports from files that I've defined import helpers if __name__ == "__main__": #are we in test mode or do we want to send the emails for real? test_mode = True #default to testing. No accidents here if test_mode: #setup the necessairy test information test_send_to_addrs = [''] #array of email addresses that would get the test email #grab the configuration from the config file config = helpers.get_config('config/config') userpass = helpers.get_config('config/userpass') #options for the smtp server smtp_options = {} smtp_options['server'] = config['server'] smtp_options['port'] = config['port'] smtp_options['username'] = userpass['smtp_username'] smtp_options['password'] = userpass['smtp_password'] #paths to the data date_string = time.strftime("%Y-%m-%d") data_directory = 'winners_thank_you' #data_filename = 'Winners_2014-07-21.csv' #+ date_string + '.csv' #TODO this needs to change per day. or be dynamic data_full_path = 'data/' + data_directory + '/' + data_filename winner_list = helpers.read_thank_you_winner_data(data_full_path)
import requests from tinydb import Query import pytest import publicAPI.config as api_config import publicAPI.crest_utils as crest_utils import publicAPI.exceptions as exceptions import helpers HERE = path.abspath(path.dirname(__file__)) ROOT = path.dirname(HERE) CONFIG_FILENAME = path.join(HERE, 'test_config.cfg') CONFIG = helpers.get_config(CONFIG_FILENAME) ROOT_CONFIG = helpers.get_config( path.join(ROOT, 'scripts', 'app.cfg')) def test_validate_esi_fetcher(config=CONFIG): """exercise fetch_crest_endpoint""" region_data = crest_utils.fetch_esi_endpoint( 'map_regions', region_id=config.get('TEST', 'region_id'), config=ROOT_CONFIG ) region_keys = [ #not all keys, just important ones 'name', 'region_id', 'description',
#!/usr/bin/env python import yaml import simplejson from helpers import do_curl, get_config from sh import sendemail, ssh, curl import notifications from html import HTML config = get_config('notifications') def notify(event, message, **kw): if not event in config['events']: return if not 'app' in kw: kw['app'] = config['default_app'] for user, methods in config['events'][event].items(): if not hasattr(methods, '__iter__'): methods = (methods,) for method in methods: getattr(notifications, "send_" + method)(user, event, message, **kw) def send_pushover(user, event, message, **kw): if not user in config['pushover']['user_keys']: return if not 'app' in kw: kw['app'] = config['default_app'] token = config['pushover']['tokens'][kw['app']]
def render_index(request, strings, template='home/templates/%s/index.htm', noindex=False): config = get_config(request) config['properties'] = properties_resource.get_list_of_names(['SITE_TITLE', 'SITE_DESCRIPTION', 'SITE_NAME', 'SITE_LOGO']) strings = helpers.set_null_values_if_not_exist(strings, ['site_title', 'site_description', 'site_name', 'site_image', 'site_type', 'site_url']) if strings['site_title'] is None: strings['site_title'] = config['properties']['SITE_TITLE'] strings['short_site_title'] = config['properties']['SITE_TITLE'] else: strings['site_title'].append(config['properties']['SITE_TITLE']) strings['short_site_title'] = strings['site_title'][0] strings['site_title'] = strings['site_title'][0] if strings['site_description'] is None: strings['site_description'] = config['properties']['SITE_DESCRIPTION'] if strings['site_name'] is None: strings['site_name'] = config['properties']['SITE_NAME'] if strings['site_image'] is None: strings['site_image'] = config['properties']['SITE_LOGO'] if strings['site_type'] is None: strings['site_type'] = 'website' if strings['site_url'] is None: strings['site_url'] = config['host_name'] strings['short_site_url'] = '' else: temp_list = [] temp_list.append(config['host_name']) temp_list.append(strings['site_url']) strings['short_site_url'] = '/%s' % strings['site_url'] strings['site_url'] = '/'.join(temp_list) meta_tag_list = meta_tag_resource.get_list_as_objects() properties_list = properties_resource.get_list_of_names(['SITE_TITLE', 'SITE_DESCRIPTION', 'SITE_NAME', 'SITE_LOGO', 'HOME_HEADER_BOTTOM_HTML', 'HOME_BODY_TOP_HTML', 'HOME_BODY_BOTTOM_HTML']) try: HTTP_USER_AGENT = request.META['HTTP_USER_AGENT'] except: HTTP_USER_AGENT = 'EMPTY' if "_escaped_fragment_" not in request.GET and 'Prerender' not in HTTP_USER_AGENT: escaped_fragment_tag = '<meta name="fragment" content="!">' else: escaped_fragment_tag = '' if noindex: noindex_tag = '<meta name="robots" content="noindex,nofollow" />' else: noindex_tag = '' return render(request, template % settings.THEME, { 'host_name': config['host_name'], 'host_url': '//' + request.get_host(), 'config': json.dumps(config, sort_keys=True, indent=4), 'current_lang': get_language(), 'lang': settings.LANGUAGE_CODE, 'lang_list': config['lang_list'], 'settings': settings, 'meta_tag_list': meta_tag_list, 'properties_list': properties_list, 'escaped_fragment_tag': escaped_fragment_tag, 'noindex_tag': noindex_tag, 'strings': strings })
#!/usr/bin/env python import yaml import simplejson from helpers import do_curl, get_config config = get_config("civicrm") def civicrm_api(entity, action, **kw): data = civicrm_api_raw(entity, action, **kw) return simplejson.loads(str(data)) def civicrm_api_raw(entity, action, **kw): params = { "entity": entity, "action": action, "version": 3, "key": config["key"], "api_key": config["api_key"], "json": 1, } args = dict(kw, **params) if "_return" in args: args["return"] = args.pop("_return") data = do_curl(config["path"], **args) return data def main(args):
from random import randint # Email dependancies from email.mime.text import MIMEText from email.mime.image import MIMEImage from email.mime.multipart import MIMEMultipart # Requires Imgur's Python api to be installed. >> https://github.com/Imgur/imgurpython # Documentation >> https://api.imgur.com/ from imgur_client import StartClient from helpers import get_config from search_sources import GoogleLookup, APILookup, LookupUrl, ShortenUrl # Get email username and password from auth.ini config = get_config() config.read('auth.ini') # this is the file that holds user credentials username = config.get('credentials', 'username') password = config.get('credentials', 'password') # Routing addresses fromaddr = username toaddrs = '*****@*****.**' # Empty directory array files = None # Path to target source directory folder = '/home/cody/repost_9000/images' # Path to currently selected image
from flask import Flask, request, jsonify, redirect, url_for, render_template, send_from_directory from werkzeug import secure_filename import html import helpers import os import sys mynode = "" config = helpers.get_config(sys.argv) # Load settings from config.py CONTENT_FOLDER = config['app']['content_folder'] # The folder where UL/DL happen LEN_CONTENT_FOLDER = len(CONTENT_FOLDER) - 1 app = Flask(__name__) app.config['UPLOAD_FOLDER'] = CONTENT_FOLDER # Tell flask that this is where all uploads are supposed to go @app.route('/dl/<path:filename>') # Legacy support, remove this in the future @app.route('/download_file/<path:filename>') def download_file(filename): """ Used for downloading files to client """ filename = html.unescape(filename) # Convert HTML sequences to their characters & -> & return send_from_directory(CONTENT_FOLDER, filename, as_attachment=True) # Send file to client @app.route('/download_directory/<path:dirname>') def download_directory(dirname): """ Used for downloading entire directories """ dirname = html.unescape(dirname)
import pytest from flaky import flaky import requests from Robinhood import Robinhood import helpers if six.PY2: from Robinhood import RH_exception HERE = path.abspath(path.dirname(__file__)) ROOT = path.dirname(HERE) CONFIG_FILENAME = path.join(HERE, 'test_config.cfg') CONFIG = helpers.get_config(CONFIG_FILENAME) TEST_QUOTE = {} TESTABLE_KEYS = [ 'previous_close_date', 'symbol', 'trading_halted', 'previous_close', 'last_trade_price_source', 'instrument' ] @pytest.mark.incremental class TestQuoteHelpers: """wrapper to test quote architecture in order""" test_ticker = CONFIG.get('FETCH', 'test_ticker')