def draft_release_with_name( self, name: str )->Release: releases = list(self.repository.releases()) release = _.find(releases, lambda rls: rls.draft and rls.name == name) return release
def release_tags(github_helper: GitHubRepositoryHelper, repo: git.Repo) -> [str]: def is_valid_semver(tag_name): try: parse_version_info(tag_name) return True except ValueError: warning('{tag} is not a valid SemVer string'.format(tag=tag_name)) return False release_tags = github_helper.release_tags() # you can remove the directive to disable the undefined-variable error once pylint is updated # with fix https://github.com/PyCQA/pylint/commit/db01112f7e4beadf7cd99c5f9237d580309f0494 # included # pylint: disable=undefined-variable tags = _ \ .chain(repo.tags) \ .map(lambda tag: {"tag": tag.name, "commit": tag.commit.hexsha}) \ .filter(lambda item: _.find(release_tags, lambda el: el == item['tag'])) \ .filter(lambda item: is_valid_semver(item['tag'])) \ .key_by('commit') \ .map_values('tag') \ .value() # pylint: enable=undefined-variable return tags
def reducer(acc, value): el = _.find(acc, lambda x: x.get(on) == value.get(on)) if el: for k,v in value.items(): if v is not None: el[k] = v else: acc.append(value) return acc
def execute_actions(all_group_details, sc_client): """ Syncs the user base as per the user provided input file :param all_group_details: All the group details in the organisation of requesting user :param sc_client: Client to access the SafetyCulture methods :return: None """ for keys in actions.keys(): group_list = actions[keys]['groups'] if actions[keys]['action'] == 'add': data = actions[keys]['user_data'] response = json.loads(sc_client.add_user_to_org(data)) if response: user_id = response['user']['user_id'] if group_list != []: for group_name in group_list: target_group = _.find(all_group_details['groups'], {'name': group_name}) if target_group: data = {'user_id': user_id} sc_client.add_user_to_group( target_group['id'], data) if actions[keys]['action'] == 'add to group': user_id = actions[keys]['user_id'] for group_name in group_list: target_group = _.find(all_group_details['groups'], {'name': group_name}) if target_group: sc_client.add_user_to_group(target_group['id'], {'user_id': user_id}) if actions[keys]['action'] == 'deactivate': user_id = actions[keys]['user_id'] data = {'status': 'inactive'} sc_client.update_user(user_id, data) if actions[keys]['action'] == 'remove from group': user_id = actions[keys]['user_id'] for group_name in group_list: target_group = _.find(all_group_details['groups'], {'name': group_name}) if target_group: sc_client.remove_user(target_group['id'], user_id)
def get_all_users_and_groups(api_token): """ Exports a dictionary of all active users from iAuditor organisation and their associated groups :return: A sorted dictionary of all active users and their associated groups """ sc_client = sp.SafetyCulture(api_token) org_id = sc_client.get_my_org() groups_list = [] user_map = {} users_of_org = json.loads(sc_client.get_users_of_group(org_id)) for user in users_of_org['users']: if user['status'] != 'active': continue email = user['email'] user_map[email] = { 'groups': [], 'firstname': user['firstname'], 'lastname': user['lastname'], 'user_id': user['user_id'] } all_group_details = json.loads(sc_client.get_all_groups_in_org().content) groups_list = [g['id'] for g in all_group_details['groups']] for group_id in groups_list: users_in_group = json.loads(sc_client.get_users_of_group(group_id)) groups = all_group_details['groups'] target_group = _.find(groups, {'id': group_id}) group_name = target_group['name'] for user in users_in_group['users']: if user['status'] != 'active': continue email = user['email'] user_map[email]['user_id'] = user['user_id'] if email in user_map: if group_name not in user_map[email]['groups']: user_map[email]['groups'].append(str(group_name)) user_map[email]['groups'].append(str(group_id)) else: user_map[email]['groups'] = [group_name] user_map[email]['groups'] = [group_name] sorted_user_map = OrderedDict(sorted(user_map.items(), key=lambda t: t[0])) return sorted_user_map
def release_tags(self, ) -> typing.List[str]: def is_valid_semver(tag_name): try: version.parse_to_semver(tag_name) return True except ValueError: logger.warning( '{tag} is not a valid SemVer string'.format(tag=tag_name)) return False release_tags = self.github_helper.release_tags() tags = _ \ .chain(self.git_helper.repo.tags) \ .map(lambda tag: {"tag": tag.name, "commit": tag.commit.hexsha}) \ .filter(lambda item: _.find(release_tags, lambda el: el == item['tag'])) \ .filter(lambda item: is_valid_semver(item['tag'])) \ .key_by('commit') \ .map_values('tag') \ .value() return tags
async def refresh_auth(payload: Token, request: Request): credentials_exception = HTTPException( status_code=401, detail="Could not validate credentials", headers={"WWW-Authenticate": "Bearer"}, ) try: # TODO: get access_token from middleware, check that user ids match so you can refresh isTokenInCache = _.find(refresh_token_blacklist_cache, lambda token: token == payload.access_token) if (isTokenInCache): raise credentials_exception decoded_token = jwt.decode(payload.access_token, config.env.token_secret_key, algorithm=config.env.token_algorithm) user_id = decoded_token.get("user_id") if user_id is None or user_id != request.state.user_id: raise credentials_exception is_refresh_token = decoded_token.get("refresh_token") if is_refresh_token != True: raise HTTPException(400, "Invalid token type") jwt_token = __create_access_token({"user_id": user_id}) return { "bearer": { "access_token": jwt_token, "token_type": "bearer" }, "refresh": { "access_token": payload.access_token, "token_type": "refresh" } } except PyJWTError: raise credentials_exception
async def login_user(credentials: LoginRequest): email: str = credentials.email password: str = credentials.password user = _.find(user_repository, {"email": email, "password": password}) if user is None: raise HTTPException(status_code=400, detail="Invalid credentials or user not found") # Note that you can pass any data to encode, for example the user IP if you want tokens to be assigned to a single IP jwt_token = __create_access_token({"user_id": user["id"]}) refresh_token = __create_refresh_token({"user_id": user["id"]}) return { "bearer": { "access_token": jwt_token, "token_type": "bearer" }, "refresh": { "access_token": refresh_token, "token_type": "refresh" } }
def get_key(self): c = self.app.conf spinner = self.app.spinner gpg_keys = self.app.gpg_keys gpg_key = None if len(gpg_keys) == 1: gpg_key = gpg_keys[0] elif len(gpg_keys) > 1: spinner.stop() answer = munchify( inquirer.prompt([ inquirer.List('gpg_key', message='choose a gpg key', choices=_.map( gpg_keys, lambda x: x.pub.key.short + ': ' + x.name + ' <' + x.email + '>')) ])).gpg_key spinner.start() gpg_key = _.find( gpg_keys, lambda x: x.pub.key.short == answer[:answer.index(':')]) if not gpg_key: raise Exception('failed to find gpg key') return gpg_key
index += 1 if index > 20: return # load up the taxonomy categories = [] with open("taxonomy.csv", "r") as csvfile: cats = csv.DictReader(csvfile) for cat in cats: categories.append(cat) directories = _.keys(categories[0]) print("Supported directories: {}".format(json.dumps(directories))) find = _.find(categories, {'gcid': "gcid:{}".format(args.gcid)}) if find is None: find = {'gcid': 'gcid:{}'.format(args.gcid)} print("") print("Current config: {}".format(json.dumps(find))) for directory in directories: # handle gcid condition if directory == 'google': find['google'] = args.name continue # handle gcid condition if directory == 'googleEs': find['googleEs'] = args.name
def reference_type_for_type_identifier(reference_type_identifier: str): return _.find( REFERENCE_TYPES, lambda ref_type: ref_type.identifier == reference_type_identifier)
def is_compound_document_included(self, included, item): return _.find(self.payload.get('included', {}), { 'id': item.get('id'), 'type': item.get('type') })
def auto_compund(wallet_address, private_key, min_amount_to_harvest): w3 = Web3(Web3.HTTPProvider('https://bsc-dataseed1.binance.org:443')) auto_contract = w3.eth.contract(address=AUTO_CONTRACT, abi=AUTO_ABI) pancake_swap_contract = w3.eth.contract(address=PANCAKE_SWAP_CONTRACT, abi=PANCAKE_SWAP_ABI) auto_farms_data = requests.get(AUTO_FARM_INFO_URL).json() auto_pool_ids = _.get(auto_farms_data, 'pools', {}).keys() def get_auto_wbnb_price(): pancake_swap_price = requests.get(PANCAKE_PRICE_URL).json() auto_price_usd = Decimal(_.get(pancake_swap_price, 'prices.AUTO', 0)) wbnb_price_usd = Decimal(_.get(pancake_swap_price, 'prices.WBNB', 0)) return auto_price_usd, wbnb_price_usd auto_price_usd, wbnb_price_usd = get_auto_wbnb_price() if not auto_pool_ids or not auto_price_usd or not wbnb_price_usd: print("Missing pool and/or price data. Exiting...") exit() def withdraw_auto_token_if_necessary(acc_auto_withdraw_gwei, pool_id): pool_id = int(pool_id) current_pending_auto_gwei = auto_contract.functions.pendingAUTO( pool_id, wallet_address).call() current_pending_auto_eth = w3.fromWei(current_pending_auto_gwei, 'ether') if current_pending_auto_eth * auto_price_usd > min_amount_to_harvest: print( f"- Current pending auto for pool id {pool_id}: {current_pending_auto_eth} = ${current_pending_auto_eth * auto_price_usd:.2f}. Withdrawing...", end='') # withdrawing a pool with 0 only withdraw the reward = Harvest tx = auto_contract.functions.withdraw(pool_id, 0).buildTransaction( { 'from': wallet_address, 'nonce': w3.eth.getTransactionCount(wallet_address), 'gas': 500000 }) signed_tx = w3.eth.account.signTransaction(tx, private_key) tx_hash = w3.eth.sendRawTransaction(signed_tx.rawTransaction) tx_receipt = w3.eth.waitForTransactionReceipt(tx_hash) print( f"...done.\n tx hash = {tx_receipt['transactionHash'].hex()}") return acc_auto_withdraw_gwei + current_pending_auto_gwei return acc_auto_withdraw_gwei connected = w3.isConnected() if connected: print( f"Checking each pool to see if auto rewards meet the ${min_amount_to_harvest} threshold" ) harvested_auto_amt_gwei = _.reduce(auto_pool_ids, withdraw_auto_token_if_necessary, 0) harvested_auto_amt_eth = w3.fromWei(harvested_auto_amt_gwei, 'ether') print( f"- Total harvested: Auto {harvested_auto_amt_eth} = ${harvested_auto_amt_eth * auto_price_usd:.2f}" ) if harvested_auto_amt_gwei > 0: ## SELL HALF AUTO FOR WBNB half_harvested_auto_amt_gwei = int(harvested_auto_amt_gwei / 2) # get expceted WBNB amount from pancake amounts_out = pancake_swap_contract.functions.getAmountsOut( amountIn=half_harvested_auto_amt_gwei, path=[AUTO_TOKEN_CONTRACT, WRAP_BNB_TOKEN_CONTRACT], ).call() wbnb_amt_gwei = int(amounts_out[1] * (1 - PANCAKE_ASSUMED_SLIPPAGE)) print( f"- Swapping AUTO {half_harvested_auto_amt_gwei} for WBNB {wbnb_amt_gwei} on pancakeswap...", end='') tx = pancake_swap_contract.functions.swapExactTokensForTokens( amountIn=half_harvested_auto_amt_gwei, amountOutMin=wbnb_amt_gwei, path=[AUTO_TOKEN_CONTRACT, WRAP_BNB_TOKEN_CONTRACT], to=wallet_address, deadline=int(time()) + 60 * 5 # give 5 minutes deadline ).buildTransaction({ 'from': wallet_address, 'nonce': w3.eth.getTransactionCount(wallet_address), 'gas': 500000 }) signed_tx = w3.eth.account.signTransaction(tx, private_key) tx_hash = w3.eth.sendRawTransaction(signed_tx.rawTransaction) tx_receipt = w3.eth.waitForTransactionReceipt(tx_hash) print( f"...done.\n tx hash = {tx_receipt['transactionHash'].hex()}") transfer_log = _.find( tx_receipt['logs'], lambda x: x['address'] == WRAP_BNB_TOKEN_CONTRACT) if not transfer_log or not transfer_log.get('data'): print( f"Error: Could not find log for the WBNB transfer. Transaction must have failed." ) exit(-1) ## ADD LIQUIDITY TO PANCAKE SWAP. SWAP AUTO & WBNB FOR WBNB-AUTO LP TOKEN received_wbnb_amt_gwei = int(transfer_log['data'], 16) wbnb_amt_gwei = int(received_wbnb_amt_gwei * (1 - PANCAKE_ASSUMED_SLIPPAGE)) print( f"- Add lidquidity AUTO-WBNB: AUTO {half_harvested_auto_amt_gwei} for WBNB {wbnb_amt_gwei} on pancakeswap...", end='') tx = pancake_swap_contract.functions.addLiquidity( tokenA=AUTO_TOKEN_CONTRACT, tokenB=WRAP_BNB_TOKEN_CONTRACT, amountADesired=half_harvested_auto_amt_gwei, amountBDesired=wbnb_amt_gwei, amountAMin=0, amountBMin=0, to=wallet_address, deadline=int(time()) + 60 * 5 # give 5 minutes deadline ).buildTransaction({ 'from': wallet_address, 'nonce': w3.eth.getTransactionCount(wallet_address), 'gas': 500000 }) signed_tx = w3.eth.account.signTransaction(tx, private_key) tx_hash = w3.eth.sendRawTransaction(signed_tx.rawTransaction) tx_receipt = w3.eth.waitForTransactionReceipt(tx_hash) print( f"...done.\n tx hash = {tx_receipt['transactionHash'].hex()}") def find_transfer_lp_logs(log): # Search for the log transfering money to your wallet # Transfer log have 3 topics: function hash, from, to. # Looking for `from = 0x0` & `to = wallet_address` topics = log['topics'] if len(topics) == 3 and topics[1].hex() == ROOT_ADDR: # extract the leading 0 from the address in the log addr = '0x' + topics[2].hex()[ADDR_SIZE - len(wallet_address) + 2:] return addr == wallet_address.lower() transfer_lp_log = _.find(tx_receipt['logs'], find_transfer_lp_logs) if not transfer_lp_log or not transfer_lp_log.get('data'): print( f"Error: Could not find log for the LP transfer. Transaction must have failed." ) exit(-1) liquidity_created = int(transfer_lp_log['data'], 16) print( f"- Add {w3.fromWei(liquidity_created, 'ether')} token back into AUTO-WBNB LP vault...", end='') tx = auto_contract.functions.deposit( AUTO_WBNB_POOL_ID, liquidity_created).buildTransaction({ 'from': wallet_address, 'nonce': w3.eth.getTransactionCount(wallet_address), 'gas': 500000 }) signed_tx = w3.eth.account.signTransaction(tx, private_key) tx_hash = w3.eth.sendRawTransaction(signed_tx.rawTransaction) tx_receipt = w3.eth.waitForTransactionReceipt(tx_hash) print(f"...end.\n tx hash = {tx_receipt['transactionHash'].hex()}") else: print("Connection Error!")
for config in args.config: picked_role_config_group = None configuration = _.replace(config[0], '.', '_') value = config[1] specified_role_config_group = _.get(config, '[2]', None) specified_role_config_group_display_name = _.get(config, '[2]', None) if (specified_role_config_group_display_name and _.get(role_config_group_name_displayname_mapping, "" + specified_role_config_group_display_name, None)): specified_role_config_group = role_config_group_name_displayname_mapping[ specified_role_config_group_display_name] if configuration in config_by_group: structures_for_config = config_by_group[configuration] picked_role_config_group = specified_role_config_group or _.find( structures_for_config, lambda x: _.ends_with(x, '-BASE')) if _.index_of(structures_for_config, picked_role_config_group) != -1: # updating configuration print("Updating: '" + configuration + "' for '" + picked_role_config_group + "' config group") updating_counter += 1 try: new_config = cm_client.ApiConfig(name=configuration, value=value) new_config_list = cm_client.ApiConfigList([new_config]) res = role_config_group_resource_api.update_config( cluster_name=cluster, role_config_group_name=picked_role_config_group, service_name=service, message="",