def send_api_request(test_case_name, api, interface=2, request_data='request.json', credential=None): config = ConfigHelper() body = {} params = {} if api['method'] == 'GET' or api['method'] == 'DELETE': params = parse_test_data_json(test_case_name, request_data) else: body = parse_test_data_json(test_case_name, request_data) query_url = api['path'] if params: query_url += '?' + urllib.parse.urlencode(params) token = None if int(interface) == 2: token = http_helper.get_jwt(api['method'], query_url, '', json.dumps(body) if body else '') return http_helper.send_request( config.get_data_from_config('CM', 'protocol') + config.get_data_from_config('CM', 'ip') + ':' + str(config.get_data_from_config('CM', 'port')), api, interface, credential, params=params if params else None, body=json.dumps(body) if body else None, jwt_token=token)
def get_jwt(http_method, raw_url, header, request_body, private_key=None, app_id=None, algorithm='HS256', version='V1'): string_for_hash = http_method.upper() + '|' + raw_url.lower( ) + '|' + header + '|' + request_body hash_obj = hashlib.sha256(string_for_hash.encode('utf-8')) hash_byte = hash_obj.digest() base64_byte = codecs.encode(hash_byte, 'base64') hash_string = base64_byte.decode('utf-8') hash_string = hash_string[:-1] config = ConfigHelper() issue_time = time.time() if private_key is None: private_key = config.get_data_from_config('CM', 'api_key') if app_id is None: app_id = config.get_data_from_config('CM', 'application_id') payload = { 'appid': app_id, 'iat': issue_time, 'version': version, 'checksum': hash_string } token = jwt.encode(payload, private_key, algorithm=algorithm).decode('utf-8') return token
def get_cm_login_session(credential): config = ConfigHelper() cm_login = config.get_data_from_config('CM', 'address') cm_cookies = ExtendSeleniumLibrary().get_cm_cookies(cm_login, credential) s = requests.Session() for cookie in cm_cookies: s.cookies.set(cookie['name'], cookie['value']) return s
def remove_hiding_pages_from_system_configuration(): xpath = './/P[@Name="m_MenuIdHideList"]' cm_root_path = ConfigHelper().get_data_from_config('CM', 'folder') page_numbers = ConfigHelper().get_data_from_config('CM', 'remove_hiding') sys_conf_path = os.path.join(cm_root_path, 'SystemConfiguration.xml') hide_pages = get_attribute_value(sys_conf_path, xpath, 'Value').split(';') for n in page_numbers: if n in hide_pages: hide_pages.remove(n) update_attribute(sys_conf_path, xpath, 'Value', ';'.join(hide_pages))
def audit_should_be(event_type_id, expected_count, expected_description, expected_result=1): config = ConfigHelper() records = TbUserAccessLog.find_records_by_event_type(event_type_id) assert_equal(len(records), int(expected_count)) for r in records: assert_equal(r.UserID, config.get_data_from_config('CM', 'app_name')) assert_equal(r.Result, int(expected_result)) assert_not_equal(re.match(expected_description, r.Description), None)
def uploaded_ioc_tables_should_be(request, table): config = ConfigHelper() data = request.json()['Data'] for i in data['UploadedResultInfoList']: if i['UploadedStatus'] != 1: continue record = table.find_by_file_hash_id(i['FileHashID']) assert_equal(record.FileName, i['FileName']) assert_equal(record.UploadedFrom, 1) assert_equal(record.UploadedBy, config.get_data_from_config('CM', 'app_name'))
def ioc_membership_validation(res, expected): config = ConfigHelper() from db.models.tb_IOCFileList import TbIOCFileList hashid = res.json()['Data']['UploadedResultInfoList'][0]['FileHashID'] if expected['UploadedFrom'] == 1: uploadedby = config.get_data_from_config('CM', 'app_name') if expected['UploadedFrom'] == 2: uploadedby = config.get_data_from_config('CM', 'admin', 'account') record = TbIOCFileList.find_by_file_hash_id(hashid) if record.UploadedFrom != expected['UploadedFrom']: raise AssertionError('UploadedFrom value is not correct') if record.UploadedBy != uploadedby: raise AssertionError('UploadedBy value is not correct')
def initialize_api_key(app_name=None, is_enabled=1, latency_second=120): config = ConfigHelper() app_id = config.get_data_from_config('CM', 'application_id') if not TbExternalWebServiceConsumers.check_if_key_is_duplicated(app_id): api_key = config.get_data_from_config('CM', 'api_key_crypted') if not app_name: app_name = config.get_data_from_config('CM', 'app_name') res = TbExternalWebServiceConsumersFactory( ApplicationID=app_id, APIKey=api_key, IsEnabled=is_enabled, AllowedLatencyInSeconds=latency_second, ApplicationName=app_name)
def uploaded_so_tables_should_be(test_case_name, expected_data='request.json'): config = ConfigHelper() scan_action = {'LOG': 1, 'BLOCK': 2, 'QUARANTINE': 3} expected = parse_test_data_json(test_case_name, expected_data) record = TbBlacklistInfo.find_by_note(expected.get('note')) assert_equal(record.SLF_Type, 2) assert_equal(record.SourceType, 1) assert_equal(record.ScanAction, scan_action.get(expected.get('file_scan_action'))) source_record = TbBlacklistSourceInfo.find_by_key(record.SLF_Key) assert_equal(source_record.Source, 1) assert_equal(source_record.UploadedBy, config.get_data_from_config('CM', 'app_name'))
class ExtendDateTime(object): time_format = ConfigHelper().get_data_from_config('Time Format') date_format = time_format.split(' ')[0] + ' 00:00:00' @classmethod def current_time(cls): return datetime.datetime.now().strftime(cls.time_format) @classmethod def today(cls): return datetime.datetime.now().strftime(cls.date_format) @classmethod def get_date_by_offset(cls, offset, base_time=datetime.datetime.now()): if isinstance(base_time, str): base_time = datetime.datetime.strptime(base_time, cls.time_format) if isinstance(offset, datetime.timedelta): target_date = base_time + offset else: delta = datetime.timedelta(days=int(offset[1:])) target_date = eval("base_time %s delta" % offset[0]) return target_date @classmethod def get_time_stamp_by_offset(cls, offset): target_date = cls.get_date_by_offset(offset) return str(time.mktime(target_date.timetuple())).split('.')[0]
def __init__(self): self._cm_setting = ConfigHelper().get_data_from_config('CM') self._db_setting = ConfigHelper().get_data_from_config('DB') self._column_list = self._data_list = self._traffic_list = list() self._registered_products = [] self._server_ip = self._cm_setting['ip'] self._server_port = self._cm_setting['port'] self._server_protocol = self._cm_setting['protocol'] self._server_url = self.SERVER_URL % (self._server_protocol, self._server_ip, str(self._server_port)) self._db_server = self._db_setting['address'] self._db_name = self._db_setting['dbname'] self._db_user = self._db_setting['account'] self._db_password = self._db_setting['password'] self._log_tmp_path = None self._reg_tmp_path = None self._unreg_tmp_path = None self.reset()
def get_cm_cookies(self, url, credential=None): config = ConfigHelper() if credential is None: user = config.get_data_from_config('CM', 'admin', 'account') pwd = config.get_data_from_config('CM', 'admin', 'password') else: user = credential['user'] pwd = credential['pwd'] # else: # user = config.get_data_from_config('CM', 'admin', 'account') # pwd = config.get_data_from_config('CM', 'admin', 'password') driver = webdriver.Chrome() driver.get(url) WebDriverWait(driver, 10).until( EC.presence_of_element_located((By.ID, "txtUserName"))) driver.find_element_by_id("txtUserName").send_keys(user) driver.find_element_by_id("txtPassword").send_keys(pwd) driver.find_element_by_id("loginLink").click() cookies = driver.get_cookies() return cookies
def __init__(self): config = ConfigHelper() account = config.get_data_from_config('DB', 'account') password = config.get_data_from_config('DB', 'password') db_server = config.get_data_from_config('DB', 'address') db_name = config.get_data_from_config('DB', 'dbname') db_instance = config.get_data_from_config('DB', 'instance') db_port = config.get_data_from_config('DB', 'port') if db_instance: connection_str = "mssql+pyodbc://{0}:{1}@{2}\{3}/{4}?driver=SQL+Server".format( account, password, db_server, db_instance, db_name) else: connection_str = "mssql+pyodbc://{0}:{1}@{2}:{3}/{4}?driver=SQL+Server".format( account, password, db_server, db_port, db_name) self.connect(connection_str)
def make_connection(): config = ConfigHelper() sqlserver = config.get_data_from_config('DB', 'address') dbname = config.get_data_from_config('DB', 'dbname') username = config.get_data_from_config('DB', 'account') password = config.get_data_from_config('DB', 'password') sep = ';' conn_string = sep.join( ('DRIVER={SQL Server}', 'SERVER=' + sqlserver, 'DATABASE=' + dbname, 'UID=' + username, 'PWD=' + password)) cxn = pyodbc.connect(conn_string) return cxn
def change_testing_data(case_id): mock_server = ConfigHelper().get_data_from_config('Mock Server', 'address') uri = '/testing/ies_backend/response/%s' % case_id r = requests.put(mock_server + uri, verify=False) if r.json() != 'OK': raise EnvironmentError
import requests import random import threading from queue import Queue import time from lib.log import log_result from lib.replace import URLS from lib.cookie import get_cookie from lib.payload import ready_payload from lib.headers import prepare_headers from lib.ConfigHelper import ConfigHelper from lib.ProxyHelper import ProxyHelper ch = ConfigHelper() ph = ProxyHelper() print_lock = threading.Lock() words = ch.getWords() cookie = get_cookie() header = prepare_headers(cookie) link = URLS[ch.getSite()] def postJob(item): word = words[item] payload = ready_payload(word) s = requests.Session() if ch.enableProxy():
import requests import random import threading from queue import Queue import time from lib.log import log_result from lib.replace import replace from lib.ConfigHelper import ConfigHelper from lib.ProxyHelper import ProxyHelper ch = ConfigHelper() ph = ProxyHelper() print_lock = threading.Lock() words = ch.getWords() def requestJob(item): word = words[item] if ch.getSite()==3 and not 4<len(word)<16: with print_lock: print("["+threading.current_thread().name+"] "+word+" is UNAVAILABLE on twitter because it has illegal length.") elif ch.getSite()==10 and not len(word)<40: with print_lock: print("["+threading.current_thread().name+"] "+word+" is UNAVAILABLE on github because it has illegal length.") elif ch.getSite()==13 and not 2<len(word)<21: with print_lock: print("["+threading.current_thread().name+"] "+word+" is UNAVAILABLE on pastebin because it has illegal length.") else: link = replace(word) s = requests.Session()
def update_system_configuration(key, value): xpath = './/P[@Name="%s"]' % key cm_root_path = ConfigHelper().get_data_from_config('CM', 'folder') sys_conf_path = os.path.join(cm_root_path, 'SystemConfiguration.xml') update_attribute(sys_conf_path, xpath, 'Value', value)
def update_schedule_job_time(job_id, time): xpath = './/Job[@id="%s"]/Schedule' % job_id cm_root_path = ConfigHelper().get_data_from_config('CM', 'folder') sys_conf_path = os.path.join(cm_root_path, 'ScheduleJobSetting.xml') update_attribute(sys_conf_path, xpath, 'interval', time)
import os import threading from queue import Queue import time import requests from lib.ConfigHelper import ConfigHelper ch = ConfigHelper() class ProxyHelper(): def __init__(self): self.session = requests.Session() self.proxies = ch.getProxies() self.numProxies = len(ch.getProxies()) self.print_lock = threading.Lock() self.queue = Queue() self.good = [] self.bad = [] def checkJob(self, proxy): #sess = self.setProxy(self.session, proxy) proxyDict = {'http:': proxy, 'https:': proxy, 'socks': proxy} try: r = self.session.get('https://google.com', timeout=4, proxies=proxyDict) if r.status_code is 200: self.good.append(proxy) with self.print_lock: print("%s is working..." % proxy)
import re import sys from lib.ConfigHelper import ConfigHelper, PLACEHOLDER ch = ConfigHelper() # Site URLs URLS = { 1: ch.getCustomUrl(), 2: "https://api.mojang.com/users/profiles/minecraft/%s", 3: "https://api.twitter.com/i/users/username_available.json?username=%s", 4: "https://instagram.com/accounts/web_create_ajax/attempt/", 5: "https://steamcommunity.com/id/%s", 6: "https://steamcommunity.com/groups/%s", 7: "https://soundcloud.com/%s", 8: "https://passport.twitch.tv/usernames/%s", 9: "https://mixer.com/api/v1/channels/%s", 10: "https://github.com/%s", 11: "https://about.me/%s", 12: "https://checkerapi.com/check/youtube/%s", 13: "http://pastebin.com/u/%s", 14: "https://giphy.com/channel/%s", 15: "https://www.reddit.com/api/username_available.json?user=%s" } def replace(word): # Finds and replaces matches of the name variable with the actual word to insert in URL if ch.getSite() == 1: x = re.sub(PLACEHOLDER, word, URLS[1]) return x