def use_lambda_file(self,lambda_file): file_path = Files.path_combine(self.get_root_folder(), lambda_file) if Files.exists(file_path) is False: return { 'status': 'error', 'data': 'could not find lambda file `{0}` in root folder `{1}`'.format(lambda_file, self.get_root_folder())} target_file = Files.path_combine(self.tmp_folder, '{0}.py'.format(self.lambda_name)) Files.copy(file_path,target_file) return { 'status': 'ok', 'file_path': file_path, 'target_file': target_file }
def pip_install_dependency(target): path_lambda_dependencies = Files.path_combine('.', '../../../_lambda_dependencies/') folder_create(path_lambda_dependencies) path_install = Files.path_combine(path_lambda_dependencies, target) if folder_not_exists(path_install): return Process.run('pip3', ['install','-t',path_install,target]) return folder_exists(path_install)
def web_root(self): if os.getenv('AWS_REGION') is not None: # if we are in AWS return Files.path_combine('.', './osbot_browser/web_root') if 'test/browser' in Files.current_folder( ): # if we are in an unit test return Files.path_combine('.', '../../osbot_browser/web_root') if 'test_QA/browser' in Files.current_folder( ): # todo: find a better way to handle the path when executing from UnitTests return Files.path_combine('.', '../../osbot_browser/web_root') parent_folder = Files.folder_name(__file__) if 'serverless-render/osbot_browser/browser' in parent_folder: return Files.path_combine(parent_folder, '../web_root') return None
def create__by_key(self): all_data = {} file_filter = Files.path_combine(self.file_system.folder_data, '**/*.json') for path in Files.find(file_filter): if self.filename_metadata not in path: # don't load metadata file data = Json.load_file(path) key = data.get('Key') all_data[key] = { 'path': path.replace(self.file_system.folder_data, '')[1:], 'links': {}, 'data': data, } for link in self.links.all(): from_key = link[0] link_type = link[1] to_key = link[2] issue = all_data.get(from_key) if issue: links = issue.get('links') if links.get(link_type) is None: links[link_type] = [] links[link_type].append(to_key) Json.save_file_pretty(self.path__by_key(), all_data) return all_data
def get_files(self): all_files = [] for root, dirs, files in os.walk(self.tmp_folder): for file in files: file_path = Files.path_combine(root,file).replace(self.tmp_folder,'') all_files.append(file_path) return all_files
def issues(self): data = [] path_metadata = self.path_metadata() for path in Files.find(Files.path_combine(self.path(), '*.json')): if path != path_metadata: data.append(Json.load_file(path)) return data
def create_temp_file(self, new_code=None): self.folder = Files.temp_folder('tmp_lambda_') if new_code: self.lambda_code = new_code self.tmp_file = Files.path_combine(self.folder, '{0}.py'.format(self.file_name)) Files.write(self.tmp_file, self.lambda_code) assert Files.exists(self.tmp_file) return self
def __init__(self, web_root=None): self.src_tmp = '/tmp/temp_web_server' self.python_path = 'python3' if web_root: self.web_root = web_root else: self.web_root = self.src_tmp + '/html' self.html_file = Files.path_combine(self.web_root, 'index.html') self.port = Misc.random_number(10000, 60000) self.server_proc = None
def __init__(self, web_page, headless=True): self.web_page = web_page self.title = 'browser view' self.web_root = Files.path_combine(Files.parent_folder(__file__), '../web_root') self.api_browser = API_Browser(headless=headless).sync__setup_browser() self.web_server = Web_Server(self.web_root) self.render_page = Render_Page(api_browser=self.api_browser, web_server=self.web_server)
def upload_dependency(target): s3 = S3() s3_bucket = AWS_Config().lambda_s3_bucket() s3_file = 'lambdas-dependencies/{0}.zip'.format(target) path_libs = Files.path_combine('../../../_lambda_dependencies/', target) if Files.not_exists(path_libs): raise Exception(f"In Lambda upload_dependency, could not find dependency for: {target} , which resolved to {path_libs}") s3.folder_upload(path_libs, s3_bucket, s3_file) return s3.file_exists(s3_bucket, s3_file)
def run(event, context): file_name = event.get('file_name') # get file_name from lambda params tmp_path = '/tmp' # location of lambda temp folder tmp_file = Files.path_combine( tmp_path, file_name) # create file name (in temp folder) Files.write(tmp_file, 'some text') # create file (with some text) return Files.find(tmp_path + '/*.*') # return list of files in temp folder
def test_add_file(self): lambda_file = Files.path_combine( __file__, '../../../../osbot_aws/lambdas/dev/hello_world.py') assert Files.exists(lambda_file) self.package.add_file(lambda_file) assert self.package.get_files() == ['/hello_world.py'] self.package.aws_lambda.handler = 'hello_world.run' self.package.update() assert self.package.invoke() == 'From lambda code, hello None'
def __init__(self): self.web_page = '/datatables/simple.html' self.web_root = Files.path_combine(Files.parent_folder(__file__), '../web_root') self.api_browser = API_Browser().sync__setup_browser() self.render_page = Render_Page(api_browser=self.api_browser, web_root=self.web_root) self.table_width = '100%' self.columns_defs = None self.table_title = None
def __init__(self, headless=True): self.web_page = '/vis-js/simple.html' self.web_root = Files.path_combine(Files.parent_folder(__file__), '../web_root') chrome = Chrome().headless(headless) self.api_browser = API_Browser(chrome.sync().browser()) self.render_page = Render_Page(api_browser=self.api_browser, web_root=self.web_root) self.bot_name = 'GS_Bot' self.options = None
def load_dependency(target): if os.getenv('AWS_REGION') is None: return from osbot_aws.apis.S3 import S3 import shutil import sys s3 = S3() s3_bucket = AWS_Config().lambda_s3_bucket() s3_key = 'lambdas-dependencies/{0}.zip'.format(target) tmp_dir = Files.path_combine('/tmp/lambdas-dependencies', target) #return s3.file_exists(s3_bucket,s3_key) if s3.file_exists(s3_bucket,s3_key) is False: raise Exception("In Lambda load_dependency, could not find dependency for: {0}".format(target)) if file_not_exists(tmp_dir): # download dependency zip_file = s3.file_download(s3_bucket, s3_key,False) # download zip file with dependencies shutil.unpack_archive(zip_file, extract_dir = tmp_dir) # unpack them if tmp_dir not in sys.path: # if not currently in the path sys.path.append(tmp_dir) # add tmp_dir to the path that python uses to check for dependencies return Files.exists(tmp_dir)
def path_issue(self, issue_key): file_name = "{0}.json".format(issue_key) return Files.path_combine(self.path(), file_name)
def path(self): return Files.path_combine(self.file_system.folder_data, self.file_system.safe_string(self.name))
def path_metadata(self): return Files.path_combine(self.path(), self.filename_metadata)
def __init__(self, contents='...', extension='tmp'): self.tmp_file = random_filename(extension) self.tmp_folder = Files.temp_folder() self.file_path = Files.path_combine(self.tmp_folder, self.tmp_file) self.contents = contents
def __init__(self, folder_root): self.folder_root = folder_root self.folder_data = Files.path_combine(self.folder_root, 'data') self.folder_indexes = Files.path_combine(self.folder_root, 'indexes')
def __init__(self): self.root_folder = Files.path_combine(__file__ , '../../../GraphSV-demo-data') self.data_folder = Files.path_combine(self.root_folder,'csv-demo-data' ) self.graph_sv = Graph_SV(self.root_folder)
def file_path(self, title): return Files.path_combine(self.data_folder, title)
def __init__(self,tmp_img=None, clip=None, headless=False): self.headless = headless self.path_views = Files.path_combine(Files.parent_folder(__file__),'../../osbot_browser/web_root') self.render_page = Render_Page(headless=self.headless, web_root=self.path_views) self.tmp_img = tmp_img self.clip = clip
def remove_files(self,pattern): for file in self.get_files(): if pattern in file: file_to_delete = Files.path_combine(self.tmp_folder,file[1:]) Files.delete(file_to_delete)
def add_folder(self, source, ignore=None): destination = Files.path_combine(self.tmp_folder,Files.file_name(source)) if folder_not_exists(destination): folder_copy(source=source, destination=destination,ignore_pattern=ignore) self.remove_files('__pycache__') return self
def get_root_folder(): return Files.path_combine(__file__, '../..')
def path_for(self, file_type): return Files.path_combine(self.folder_indexes, file_type + '.json')
def path_links(self): return Files.path_combine(self.file_system.folder_data, 'links.json')
def path_to_file(self, file_path): # has path traversal vulnerability if file_path and len(file_path) > 0 and file_path[0] == '/': file_path = file_path[1:] return Files.path_combine(self.web_root, file_path)