class EnvConfig(object): """Parent configuration class.""" DEBUG = False CSRF_ENABLED = True SECRET = get_env("SECRET") SQLALCHEMY_DATABASE_URI = get_env("DATABASE_URL")
def open_sheet(self): if self.bootcamp == "PYTHON": sheet = self.client.open( get_env('GAPPS_SHEET_NAME')).worksheet('sheet1') elif self.bootcamp == "ML": sheet = self.client.open( get_env('GAPPS_SHEET_NAME')).worksheet('sheet2') return sheet
def __init__(self, adapter='redis', redis_connection=None): self.cache_obj = None if redis_connection is None: redis_connection = { 'host': get_env('REDIS_HOST'), 'port': get_env('REDIS_PORT'), 'db': get_env('REDIS_DB'), 'password': get_env('REDIS_PASSWORD') } if adapter == 'redis': pool = redis.ConnectionPool(**redis_connection) self.cache_obj = redis.StrictRedis(connection_pool=pool)
def generate_password_reset_token(user): now = datetime.utcnow() # issued_at = datetime.timestamp(now) expires = now + timedelta(hours=1, minutes=30) payload = {'iat': now, 'exp': expires, 'iss': 'Heroshe LLC', 'uid': user.id, 'hash': user.hash_id} return jwt.encode(payload=payload, key=get_env('SECRET_KEY')).decode()
def verify_password_reset_token(token): try: payload = jwt.decode(jwt=token, key=get_env('SECRET_KEY'), verify=True) return payload except Exception as e: print(e) return None
def __init__(self, name="redis", namespace="pm", url=None): """ The default connection parameters are: host = 'localhost', port = 6379, db = 0 """ url = url or get_env("REDIS_URL") self.__db = Redis.from_url(url, charset="utf-8", decode_responses=True) self.key = f"{namespace}:{name}"
def connect_db(): env = config.get_env() if env['ENV_MODE'] == 'production': vars = (env['DBNAME'], env['USER'], env['PASSWORD'], env['HOST']) return psycopg2.connect("dbname=%s user=%s password=%s host=%s" % vars) else: vars = (env['DBNAME'], env['HOST']) return psycopg2.connect("dbname=%s host=%s" % vars)
def __init__(self): ''' Instantiate all required variables ''' self.dataTable = PrettyTable() self.dataTable.field_names = ['Instance Name', 'Instance IP', 'Status'] try: self.vultr = Vultr(get_env('VULTR_TOKEN')) except: print("Problem during vultr object initialization")
def connect_db(): env = config.get_env() if env['ENV_MODE'] == 'production': vars = (env['DBNAME'], env['USER'], env['PASSWORD'], env['HOST']) return psycopg2.connect("dbname=%s user=%s password=%s host=%s" % vars) else: vars = (env['DBNAME'], env['HOST']) return psycopg2.connect("dbname=%s host=%s" % vars)
def __init__(self): ''' Instantiate prettytable for the result data, manager to call digitalocean apis ''' self.dataTable = PrettyTable() self.dataTable.field_names = ['Droplet Name', 'Droplet IP', 'Status'] try: self.manager = digitalocean.Manager( token=get_env('DIGITAL_OCEAN_TOKEN')) except: print("Problem occurred here")
def decode_token(token): try: jwtsecret = get_env('SECRET_KEY') decoded = jwt.decode(token, jwtsecret) return decoded except jwt.ExpiredSignature: return make_response(jsonify({'msg': 'Token is expired'})), 400 except jwt.DecodeError: raise Exception('Error decoding')
def __init__(self): # setup for google sheet - Google Drive API Instance self.scope = ['https://spreadsheets.google.com/feeds', 'https://www.googleapis.com/auth/drive'] json_file_path = path.join(path.dirname( __file__), '../../', get_env('CLIENT_SECRET_FILE')) self.credentials = ServiceAccountCredentials.from_json_keyfile_name( json_file_path, self.scope) self.client = gspread.authorize(self.credentials)
def get(self): result = "this is test" if get_env() != 'dev': return result """ self.return_error(10000) """ # from services.account_service import AccountService # account_service = AccountService() # with MysqlTools().session_scope() as session: # print('***') # 实现真正对充值 # result = account_service.do_recharge(session, '2018111309375761185989916621239257168264426978617039743301831998', 1, 1, 123, 123) # if result == 0: # session.commit() # else: # self.return_error(result) # 获取账户信息对接口 # result = account_service.get_inner_user_account_info(session, 'b7996c045505422c8a44155a19f49e12') # 获取用户对应币种的余额 # result = account_service.get_inner_user_account_by_token(session, 'b7996c045505422c8a44155a19f49e12', 1) # 用户下注 # result = account_service.do_bet(session, 'b7996c045505422c8a44155a19f49e12', 1, get_decimal('0.1'), '123') # session.commit() # 用户赢钱 # result = account_service.do_win(session, 'b7996c045505422c8a44155a19f49e12', 1, get_decimal('0.1'), '123') # session.commit() # 获取用户信息列表对接口 # result = account_service.get_user_account_info_list(session, ['7507a61d22f64ae29b9ce36585bcc289', '7507a61d22f64ae29b9ce36585bcc287']) # 校验交易密码, 如果是测试环境则111111为必过密码!!!! # result = account_service.check_pay_password('b7996c045505422c8a44155a19f49e12', '$2a$12$wYNR0BQN5EXqNZy.57OLMuR/rSe75.XlZsOupEPG14HRgbNzs1s4W') # 提现申请 # result = account_service.apply_withdraw('7507a61d22f64ae29b9ce36585bcc289', '1', get_decimal('0.1'), get_decimal('0.01'), '123456') # 提现结果实施 # result = account_service.do_withdraw('20181113153610545304913763198044', '456789', '6') # 账户流水 # result = account_service.get_account_water('7507a61d22f64ae29b9ce36585bcc289', change_type='999', page_limit='3', page_num='2') # from services.wallet_withdraw_service import WalletWithdrawService # wallet_withdraw_service = WalletWithdrawService() # result = wallet_withdraw_service.get_account_water('2018111309375761185989916621239257168264426978617039743301831998') # from services.vcode_service import VcodeService # vcode_service = VcodeService() # result = vcode_service.send_vcode_by_email('gather') # result = vcode_service.check_sms_email_vcode('gather', 'AHGmiMWo') return result
def decode_token(token): jwt_secret = get_env('SECRET_KEY') try: decoded = jwt.decode(token, jwt_secret, verify=True) return decoded except jwt.ExpiredSignature: raise Exception('Token is Expired') except jwt.DecodeError: raise Exception('Invalid Token - Could Not Verify Signature')
def Post(): content = request.get_json(silent=True) parameters = pika.URLParameters(get_env().RABBITMQCONNECTSTRING) connection = pika.BlockingConnection(parameters) channel = connection.channel() channel.queue_declare(queue='crawl-url') channel.basic_publish(exchange='', routing_key='crawl-url', body=content['url']) connection.close() data = {'message': 'Created', 'code': 'SUCCESS'} return make_response(jsonify(data), 201)
def get_argument_dict(self, must_keys=None, format_str=False, format_keys=True, format_eval=True, check_form_token=False, time_key_list=None): """ :param must_keys: must_keys=["aa", "bb"] 判断出入列表里的值,是否在请求参数里,没有报错 :param format_str: 是否需要把所有int类型,强转成字符串 :param format_eval: 是否开启 把字符串 '["a","b"]' '{"a":1,"b":"1"}' 强转回list dict :param format_keys: 是否开启 把key的值 转为全小写 :param check_form_token: 是否校验表单中的随机字符串,所有会修改数据的请求,都应该校验!! :param time_key_list: 转换时区的校验时间key补充字段列表 :return: """ # 获取参数字典 request_args = self.get_request_content() request_args = formate_args(request_args, format_str, format_keys, format_eval) if get_env() != 'dev' and check_form_token: if 'form_token' not in request_args: self.return_error(10018) check_url = get_user_center_conf()[get_env( )]['base_url'] + '/transfer/' + str(request_args['form_token']) check_result = transfer_to_platform(check_url) if not check_result: self.return_error(10018) request_args.pop('form_token') # 判断必填字段 if must_keys: for key in must_keys: if key not in request_args: raise_logger("请求缺少 [%s] 参数" % key, lv="error") self.return_error(20003) return self.timezone_transform(request_args, time_key_list)
def get_pull_request_report_of_org(self): organisation = get_env('ORGANISATION') repos = self.git_instance.get_organization(organisation).get_repos() data = [] for repo in repos: pull_request_count = self.get_pull_request_count(repo.full_name) data.append(OrderedDict([ ("name", repo.name), ("pending_pull_request", pull_request_count) ])) return sorted(data, key=itemgetter('name'))
def __init__(self, bootcamp): path = get_env('SETTING_FILE').rsplit('/', 1) filename = os.path.join(path[0], bootcamp, path[1]) with open(filename) as f: data = json.load(f)["schedule"] self.registration = data["registration_date"] for k in self.registration.keys(): self.registration[k] = ScheduleHelper.to_time(self.registration[k]) self.days = data["days_starting_date"] for k in self.days.keys(): self.days[k] = ScheduleHelper.to_time(self.days[k]) self.start = ScheduleHelper.to_time(data["start_date"]) self.now = datetime.now(timezone.utc) + timedelta(hours=2)
def create_ticket_block(self, ticket_data): CREATE_TICKET_BLOCK = [ { "type": "header", "text": { "type": "plain_text", "text": ":memo: \t" + ticket_data["username"] + " Created a Ticket", "emoji": True } }, { "type": "divider" }, { "type": "section", "text": { "type": "mrkdwn", "text": ":rolled_up_newspaper: *Title*: " + ticket_data["title"] + "\n\n" } }, { "type": "section", "text": { "type": "mrkdwn", "text": ":crystal_ball: *Description*: " + ticket_data["description"] + "\n\n" } }, { "type": "section", "fields": [ { "type": "mrkdwn", "text": ":signal_strength: *Priority*: " + ticket_data["priority"] }, { "type": "mrkdwn", "text": ":label: *Category*: " + ticket_data["category"] } ] }, { "type": "section", "text": { "type": "mrkdwn", "text": "*<"+get_env('EMPLOYEE_VIEW_URL')+"|View>* \n\n" } } ] return CREATE_TICKET_BLOCK
def get_users_without_profile_name(self): organisation = get_env('ORGANISATION') members = self.git_instance.get_organization(organisation).get_members() i = 1 users = [] for member in members: if not member.name: users.append(OrderedDict([ ("No", i), ("Login User", member.login), ("Profile Link", member.html_url) ])) i = i + 1 if len(users) == 0: return False return users
class EnvConfig(object): DEBUG = False SECRET = get_env("SECRET_KEY") SQLALCHEMY_DATABASE_URI = get_env('DATABASE_URL') REDIS_HOST = "0.0.0.0" REDIS_PORT = 6379 BROKER_URL = environ.get( 'REDIS_URL', "redis://{host}:{port}/0".format(host=REDIS_HOST, port=str(REDIS_PORT))) CELERY_RESULT_BACKEND = BROKER_URL MAIL_SERVER = get_env('MAIL_SERVER') MAIL_PORT = get_env('MAIL_PORT') MAIL_USE_TLS = get_env('MAIL_USE_TLS') MAIL_USERNAME = get_env('MAIL_USERNAME') MAIL_PASSWORD = get_env('MAIL_PASSWORD')
def handle_exception(error): """Error handler called when a ValidationError is raised""" response = { 'msg': 'An error occurred while processing your request. Please contact Admin.' } if isinstance(error, HTTPException): return make_response(jsonify({'msg': error.description})), error.code traceback.print_exc() error_logger.exception(str(error)) bugsnag.notify(error) if get_env('APP_ENV') in ['staging', 'production']: rollbar.report_exc_info() return make_response(jsonify(response)), 500
def create_token(user_id): """ Generates the Auth Token :return: string """ try: payload = { 'exp': datetime.now() + timedelta(days=1, seconds=120), 'iat': datetime.now(), 'sub': user_id } return jwt.encode(payload, get_env('SECRET_KEY'), algorithm='HS256') except Exception as e: return e
def github_issue_report(args): try: result = __git_helper.get_issue_request_report_of_org() n = 25 """Chunk the big list into group of 20 as it would be neater to send slack message """ message = "Hello <@" + args['user_id'] + "> \n" message += "*Issues report for " + get_env('ORGANISATION') + "* \n" chunk_lists = chunk_list_group(result, n) __slack_helper.send_message(message, args['response_url']) for chunk_list in chunk_lists: message = "```\n" + tabulate( chunk_list, headers="keys", tablefmt="psql") + "\n```\n" __slack_helper.send_message(message, args['response_url']) except Exception: exception = "Hello <@" + args['user_id'] + \ "> there is a problem with your request. Please contact the administrator" __slack_helper.send_message(exception, args['response_url'])
def send_as_attachment(self, content, channel): url = "https://slack.com/api/files.upload" files = {'file': (content, open(content, 'rb'), 'csv')} payload = { "token": get_env('BOT_TOKEN'), "channels": channel, "filename": "git_users.csv", "title": "List of users in emisgroup organisation" } response = requests.request("POST", url, data=payload, files=files) result = json.loads(response.text) if result["ok"]: os.remove(content)
def pickup_ticket_block(self, ticket_data): PICKUP_TICKET_BLOCK = [ { "type": "header", "text": { "type": "plain_text", "text": ":admission_tickets: Your ticket has been picked up:\n", "emoji": True } }, { "type": "divider" }, { "type": "context", "elements": [ { "type": "mrkdwn", "text": ":rolled_up_newspaper: *Ticket*: " + ticket_data["title"] }, { "type": "mrkdwn", "text": ":heavy_check_mark: *Picked Up By:* " + ticket_data["username"] } ] }, { "type": "section", "text": { "type": "mrkdwn", "text": "Thanks for your patience! We're on it!" } }, { "type": "section", "text": { "type": "mrkdwn", "text": "*<"+get_env('UHDA_URL')+"|Open UHDA>* \n\n" } } ] return PICKUP_TICKET_BLOCK
def create_app(config_name): app = FlaskAPI(__name__, instance_relative_config=False) app.config.from_object(env.app_env[config_name]) app.config.from_pyfile('../config/env.py') app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False app.config['SECRET_KEY'] = get_env('SECRET_KEY') # CORS CORS(app) # JWT JWTManager(app) # Blueprints blueprint = BaseBlueprint(app) blueprint.register() from . import models db.init_app(app) return app
def assigned_ticket_block(self, ticket_data): ASSIGNED_TICKET_BLOCK = [ { "type": "header", "text": { "type": "plain_text", "text": ":admission_tickets: Ticket assignment updated:\n", "emoji": True } }, { "type": "divider" }, { "type": "context", "elements": [ { "type": "mrkdwn", "text": ":rolled_up_newspaper: *Ticket*: " + ticket_data["title"] } ] }, { "type": "context", "elements": [ { "type": "mrkdwn", "text": ":heavy_check_mark: *" + ticket_data["username"] + "* has been assigned to your ticket!" } ] }, { "type": "section", "text": { "type": "mrkdwn", "text": "*<"+get_env('UHDA_URL')+"|Open UHDA>* \n\n" } } ] return ASSIGNED_TICKET_BLOCK
def field_update_block(self, ticket_data, field): FIELD_UPDATE_BLOCK = [ { "type": "header", "text": { "type": "plain_text", "text": ":admission_tickets: Ticket " + field + " updated:\n", "emoji": True } }, { "type": "divider" }, { "type": "context", "elements": [ { "type": "mrkdwn", "text": ":rolled_up_newspaper: *Ticket*: " + ticket_data["title"] } ] }, { "type": "context", "elements": [ { "type": "mrkdwn", "text": ":heavy_check_mark: *" + ticket_data["username"] + "* set the ticket " + field + " to *" + ticket_data[field] + "*" } ] }, { "type": "section", "text": { "type": "mrkdwn", "text": "*<"+get_env('UHDA_URL')+"|Open UHDA>* \n\n" } } ] return FIELD_UPDATE_BLOCK
def get_users_in_organisation(self): organisation = get_env('ORGANISATION') members = self.git_instance.get_organization(organisation).get_members() dir_path = os.path.dirname(os.path.realpath(__file__)) + "/../storage" users = [] none = [] for member in members: print(member.name) if member.name is not None: users.append([member.name, member.login, member.html_url]) else: none.append([member.name, member.login, member.html_url]) sorted = users.sort(key=lambda x: x[0]) users.insert(0, ["User Name", "Login User", "Profile Link"]) file_name = dir_path + "/git_users_" + \ str(datetime.datetime.now()) + ".csv" result = users + none self.write_data_to_file(file_name, result) if len(users) == 0: return False return file_name
def get_pull_request(self, repo): org = get_env('ORGANISATION') repo_name = org + "/" + repo pull_requests = (self.git_instance .get_repo(repo_name) .get_pulls() ) if pull_requests.totalCount == 0: return False data = [] for pr in pull_requests: reviews = pr.get_reviews() filtered = list(filter (lambda x: x.state == "APPROVED", reviews ) ) required_review_count = ( self.get_required_pull_request_reviews_count( repo_name, pr.base.ref ) ) data.append(OrderedDict([ ("branch_name", pr.html_url), ("created_by", pr.user.name), ("required_reviews", required_review_count), ("approved_reviews", len(filtered)) ])) return data
import soroban as s import os import config import random import pprint from twitter import * from datetime import datetime, timedelta from contextlib import closing from connect import DBConnection from config import get_env if __name__ == '__main__': env = get_env() # only execute Mon-Fri 8am-4pm EST est = (datetime.utcnow() + timedelta(hours=-4)).timetuple() # est[6] = day (Mon = 0, Fri = 4) # est[3] = hour (8am = 8, 3pm = 16) #if est[6] in range(5) and if est[3] in range(8,18): print "eligible date/time" conn = DBConnection() cur = conn.connection.cursor() cur.execute("""SELECT * FROM tweets WHERE tweeted = False""") rows = cur.fetchall() num_rows = len(rows) # tweet times left in week formula # daysofweek = 4 # dayofweek = 0 on Monday, 4 on Friday # endofdayhour = 16 # currenthour = 8 at 8am, 16 at 4pm # (daysinweek - dayofweek) * 9 - (endofdayhour - currenthour) + 1
def test_correct_vars_in_prod(self): os.environ['ENV_MODE'] = 'production' var = config.get_env() assert os.environ.get('ENV_MODE') == 'production' assert var['ENV_MODE'] == 'production'
def test_correct_vars_in_dev(self): var = config.get_env() assert var['HOST'] == 'localhost' assert var['DBNAME'] == 'soroban' assert os.environ.get('HOST') == 'localhost' assert os.environ.get('DBNAME') == 'soroban'
def test_envmode_is_dev_when_not_set(self): config.get_env() assert os.environ.get('ENV_MODE') == 'development'