def create__by_key(self): all_data = {} file_filter = Files.path_combine(self.file_system.folder_data, '**/*.json') for path in Files.find(file_filter): if self.filename_metadata not in path: # don't load metadata file data = Json.load_file(path) key = data.get('Key') all_data[key] = { 'path': path.replace(self.file_system.folder_data, '')[1:], 'links': {}, 'data': data, } for link in self.links.all(): from_key = link[0] link_type = link[1] to_key = link[2] issue = all_data.get(from_key) if issue: links = issue.get('links') if links.get(link_type) is None: links[link_type] = [] links[link_type].append(to_key) Json.save_file_pretty(self.path__by_key(), all_data) return all_data
def save_active_server_details(self, file): build_id = self.get_active_build_id() server, token = CodeBuild_Jupyter( build_id).get_server_details_from_logs() config = {'build_id': build_id, 'server': server, 'token': token} Json.save_file(file, config) return config
def create_metadata(self): default_metadata = { 'Issue Type': self.name, 'Key Id': self.file_system.safe_string(self.name), 'Next Key': 1 } path_metadata = self.path_metadata() Json.save_file_pretty(path_metadata, default_metadata) return Files.exists(path_metadata)
def create__by_link_type(self): data = {} for link in self.links.all(): (from_key, link_type, to_key) = link if data.get(link_type) is None: data[link_type] = {} if data[link_type].get(from_key) is None: data[link_type][from_key] = [] data[link_type][from_key].append(to_key) Json.save_file_pretty(self.path__by_link_type(), data) return data
def create__by_values(self): issues = self.get__by_key() data = {} for key, issue in issues.items(): for field, value in issue.get('data').items(): if field in ['Issue Type', 'Key']: continue if data.get(value) is None: data[value] = [] data[value].append((key, field)) Json.save_file_pretty(self.path__by_values(), data) return data
def issues(self): data = [] path_metadata = self.path_metadata() for path in Files.find(Files.path_combine(self.path(), '*.json')): if path != path_metadata: data.append(Json.load_file(path)) return data
def issue_add(self, data): metadata = self.metadata() issue_key = "{0}-{1}".format(metadata.get('Key Id'), metadata.get('Next Key')) issue_path = self.path_issue(issue_key) metadata['Next Key'] += 1 data['Key'] = issue_key data['Issue Type'] = self.name Json.save_file_pretty(issue_path, data) Json.save_file_pretty(self.path_metadata(), metadata) if Files.exists(issue_path): return { 'status': 'ok', 'data': 'Issue added ok', 'issue_key': issue_key } return {'status': 'error', 'data': 'Issue not saved ok'}
def setUp(self): #self.server = 'http://localhost:8888' #self.image_name = 'jupyter/datascience-notebook:9b06df75e445' #self.docker_jp = Docker_Jupyter(self.image_name) #self.token = self.docker_jp.token() data = Json.load_file('/tmp/active_jupyter_server.yml') self.token = data.get('token') self.server = data.get('server') self.api = Jupyter_API_Actions(self.server, self.token) self.result = None
def get_graph_data(self, graph_name): params = {'params': ['raw_data', graph_name, 'details'], 'data': {}} data = Lambda('osbot_jira.lambdas.graph').invoke(params) if type(data) is str: s3_key = data s3_bucket = Globals.lambda_s3_bucket tmp_file = S3().file_download_and_delete(s3_bucket, s3_key) data = Json.load_file_and_delete(tmp_file) return data return data
def by_issue_type(self,issue_type_name, indexed_by=None): data = [] file_filter = "{0}/{1}/{2}".format(self.file_system.folder_data, issue_type_name ,'*.json') for path in Files.find(file_filter): if self.filename_metadata not in path: # don't load metadata file data.append(Json.load_file(path)) if indexed_by is None: return data indexed_data = {} for item in data: key = item.get('Key') index = item.get(indexed_by) if index: if indexed_data.get(index) is None: indexed_data[index] = {} indexed_data[index][key] = item return indexed_data
def setUp(self): self.headless = False #self.server = 'http://localhost:8888' #self.image_name = 'jupyter/datascience-notebook:9b06df75e445' self.notebook_name = 'work/test-1.ipynb' # self.docker_jp = Docker_Jupyter(self.image_name) # self.token = self.docker_jp.token() data = Json.load_file('/tmp/active_jupyter_server.yml') self.token = data.get('token') self.server = data.get('server') self.jp_api = Jupyter_API_Actions(server=self.server, token=self.token) self.jp_web = Jupyter_Web_Cell(token=self.token, headless=self.headless) self.jp_cell = Jupyter_Web_Cell(token=self.token, headless=self.headless) self.notebook_name = 'dev_coding' self.notebook_path = '{0}.ipynb'.format(self.notebook_name) self.result = None
def get_data_for(self, file_type): return Json.load_file(self.path_for(file_type))
def load(self, path): data = Json.load_file(path) if data: self.nodes = data['nodes'] self.edges = data['edges'] return self
def metadata(self): if self.exists(): return Json.load_file(self.path_metadata())
def all(self): path = self.path_links() if Files.exists(path): return Json.load_file(path) return []
def save(self, links): path = self.path_links() Json.save_file_pretty(path, links) return self
def issue_get(self, issue_key): issue_path = self.path_issue(issue_key) if Files.exists(issue_path): return Json.load_file(issue_path)
def save(self, path=None): if path is None: path = Files.temp_file('graph.json') data = {'nodes': self.nodes, 'edges': self.edges} return Json.save_file_pretty(path, data)