def file_select(query): file = pathlib.Path(query.location.address) / query.endpoint if file.suffix == '.csv': df = pandas.read_csv(file) return flatten_json(df.to_dict('records')) elif file.suffix == '.json': json_obj = json.load(file.open()) return flatten_json(json_obj)
def selenium_select(query): location = query.location download_location = datetime.datetime.now().strftime( "%Y%m%d%H%M%S") + "_" + uuid.uuid4().hex options = Options() options.headless = True path = os.path.join( config.storage_location, datetime.datetime.now().strftime("%Y%m%d%H%M%S") + "_" + uuid.uuid4().hex) os.mkdir(path) prefs = {"download.default_directory": path} options.add_experimental_option("prefs", prefs) driver = webdriver.Chrome('/usr/lib/chromium-browser/chromedriver', options=options) loc = {"driver": driver, "options": options, "download_directory": path} try: if location.database: exec(location.database, globals(), loc) exec(query.body, globals(), loc) driver.quit() except ValueError as e: driver.quit() raise if type(loc['output']) == dict: loc['output'] = pandas.DataFrame(loc['output']) if type(loc['output']) == pandas.core.frame.DataFrame: loc['output'] = clean_dataframe(loc['output']) loc['output'] = flatten_json(loc['output'].to_dict('records')) return loc['output']
def ldap_select(query): location = query.location dc = 'dc=' + ',dc='.join(split_strip(location.database, ".")) if location.subtype.name == 'Active Directory': auth = NTLM else: auth = SIMPLE server = Server(location.address, get_info=ALL) conn = Connection(server, 'cn=' + location.username + ',' + dc, location.password, auto_bind=True, authentication=auth) formatted_queries = [] if query.notasi_query: notasi_query = pandas.read_sql( query.notasi_query, db_session.notasi_engine()).to_dict('records') for row in notasi_query: key_list = list(row.keys()) formatted_query = query.body for key in key_list: formatted_query = formatted_query.replace( '{' + str(key) + '}', str(row[key])) formatted_queries.append(formatted_query) if not formatted_queries: formatted_queries.append(query.body) data = [] for formatted_query in formatted_queries: conn.search(dc, formatted_query, attributes=[ALL_ATTRIBUTES]) for entry in conn.entries: dict1 = json.loads(entry.entry_to_json())["attributes"] dict1['dn'] = json.loads(entry.entry_to_json())["dn"] data.append(dict1) data = flatten_json(data) return (data)
def sql_select(query): location = query.location connection_string = location.subtype.dialect + \ "://" + location.username + \ (":" + location.password if location.password else "") + \ "@" + location.address + \ ":" + location.port + \ ("/" + location.database if location.database else "") + \ ("?driver=ODBC+DRIVER+17+for+SQL+Server" if location.subtype.dialect == 'mssql+pyodbc' else "") print(connection_string) location_engine = sa.create_engine(connection_string) location_connection = location_engine.connect() formatted_queries = [] if query.notasi_query: notasi_query = pandas.read_sql( query.notasi_query, db_session.notasi_engine()).to_dict('records') for row in notasi_query: key_list = list(row.keys()) formatted_query = query.body for key in key_list: formatted_query = formatted_query.replace( '{' + str(key) + '}', str(row[key])) formatted_queries.append(formatted_query) if not formatted_queries: formatted_queries.append(query.body) data = [] for formatted_query in formatted_queries: df = pandas.read_sql_query(formatted_query, location_engine) data.append(df) data = json.loads( json.dumps(pandas.concat(data).to_dict('records'), default=default)) return flatten_json(data)
def update_users( query , session ): if query.notasi_query: notasi_query = pandas.read_sql(query.notasi_query, db_session.notasi_engine()).to_dict('records') for row in notasi_query: user = session.query(User).filter_by(username=row["username"]).first() if user: if user.name !=row["name"]: user.name =row["name"] else: user = User() session.add(user) user.username = row["username"] user.name = row["name"] response = [{"job":"users added"}] return flatten_json(response)
def http_select(query): url_string = query.location.address + query.endpoint headers_string = query.head if query.head else '' data_string = query.body if query.body else '' request_vars = [] responses = [] if query.notasi_query: notasi_query = pandas.read_sql( query.notasi_query, db_session.notasi_engine()).to_dict('records') for row in notasi_query: key_list = list(row.keys()) url = url_string headers = headers_string data = data_string for key in key_list: if url: url = url.replace('{' + str(key) + '}', str(row[key])) if headers: headers = headers.replace('{' + str(key) + '}', str(row[key])) if data: data = data.replace('{' + str(key) + '}', str(row[key])) if headers: headers = json.loads(headers) if data: data = json.loads(data) request_vars.append({'url': url, 'headers': headers, 'data': data}) else: if headers_string: headers = json.loads(headers_string) else: header = [] if data_string: data = json.loads(data_string) else: data = [] request_vars.append({ 'url': url_string, 'headers': headers, 'data': data }) if query.request_method.name == "GET": for request_var in request_vars: response = requests.get(url=request_var['url'], headers=request_var['headers'], data=request_var['data']).json() if type(response) == dict: responses.append( flatten_json(pandas.DataFrame(response, index=[0]))) else: responses.append(flatten_json(pandas.DataFrame(response))) elif query.request_method.name == "POST": for request_var in request_vars: response = requests.post(url=request_var['url'], headers=request_var['headers'], data=request_var['data']).json() responses.append(pandas.DataFrame(response, index=[0])) result = [item for sublist in responses for item in sublist] return result