def test_read_file(self): with self.assertRaises(SystemExit) as cm: utils.read_file("file-do-not-exist") self.assertEqual(cm.exception.code, 1) self.assertEqual( utils.read_file(self.tmp_file.name), "col1,col2,col3\nval11,val12,val13\nval21,val22,val23")
def pull(self, targetFolder): create_dirs(targetFolder) file_name = create_timestamp_csv(targetFolder) self._logger.info('create file:' + file_name) query = read_file(get_current_file_path(__file__) + "/" + self.sql_file_name) self._logger.info('the query is: ' + query) print(query) [rows, _] = self._db_connector.execute_query(query) save_as_csv(file_name, self.headers, rows)
def download_dashboards(self): """ Method saves JSON file of all dashboards that were uploaded/updated in the same session :return: """ today = datetime.now() dashboards = self._ui_mgr.get_dashboards(self.environment) for d in dashboards: file_name = self.output + "/" + d.display_name + "_dashboard.json" content = read_file(file_name) download_path = self.download + "/" + d.display_name + "_" + d.env + "_" + today.strftime( '%Y%m%d%H%M') + "_dashboard.json" save_file(download_path, content) logger.info("Dashboard %s (%s) saved successfully" % (d.display_name, d.wave_id))
def data_replacement_json(file_name, dataset): """ Method uses Jinja Templates to replace a set of values in provided JSON file and returns replaced JSON :param file_name: :param dataset: :return json: """ template_file = read_file(file_name) template_file = template_file.replace('\\', 'BACKSLASH').replace( '{{', 'OPEN_BRACKET').replace('}}', 'CLOSE_BRACKET') template_file = template_file.replace('{_{', '{{').replace('}_}', '}}') template = Template(template_file) json = template.render(dataset) json = json.replace('BACKSLASH', '\\').replace('OPEN_BRACKET', '{{').replace('CLOSE_BRACKET', '}}') return json
def generate_json(self, ds_name, dashboard_env): """ Method uses string replacement to insert datasets into JSON of the provided Dashboard :param dashboard_id: :param dashboard_env: :return: """ template_name = self._wave_ui_service.execute(ds_name) dashboard_name = self._entity_service.get_dashboard_by_name( ds_name).display_name datasets = self._entity_service.get_datasets_by_env(dashboard_env) data = {} for ds in datasets: data[ds.type] = ds.name dashboard = read_file(template_name) for key, value in data.items(): dashboard = dashboard.replace("%(" + key + ")s", value) with open(self.output + '/' + dashboard_name + '_dashboard.json', 'w') as fout: fout.write(str(dashboard)) return dashboard
def test_save_file(self): utils.save_file(self.tmp_file.name, "some content") self.assertEqual(utils.read_file(self.tmp_file.name), "some content")
def generate_dashboard_json(self, display_name, dashboard_name): """ Method returns generated layouts, widgets, and steps JSON from metadata :param display_name: :param dashboard_name: :return layouts, widgets, steps: """ try: #layouts = [] widgets = [] steps = [] pages = [] file_content = read_file(self.output + "/" + display_name + "_dashboard_metadata.json") metadata = json.loads(file_content) metadata.sort(key=lambda x: x[NAME].lower()) for p_index, page in enumerate(metadata): layouts = [] p_name = page[NAME] p_display = page[DISP_NAME] p_template = page[TEMPLATE] p_containers = page[CONTAINERS] for c_index, container in enumerate(p_containers): c_name = container[NAME] c_template = container[TEMPLATE] c_widgets = container[WIDGETS] layouts.append( self.generate_layouts_json(c_name, container[COLSPAN], container[COL], container[ROW], container[ROWSPAN])) # add widget_properties for w_index, widget in enumerate(c_widgets): w_name = widget[NAME] w_display = widget[DISP_NAME] w_steps = widget[STEPS] w_properties = widget[PROPERTIES] w_type = widget[TYPE] w_template = widget[TEMPLATE] w_font = widget[FONT_SIZE] w_prop_dict = {} for wp_index, w_property in enumerate(w_properties): w_prop_dict[w_property[KEY]] = w_property[VAL] # generating static text widgets if w_type == STATIC_TEXT: text_widget_dataset = { NAME: w_name, TEXT: w_prop_dict[TEXT], FONT_SIZE: w_font, TEXT_CLR: w_prop_dict[TEXT_CLR], ALIGN: w_prop_dict[ALIGN] } widgets.append( self.generate_widget_step_json( w_template, text_widget_dataset)) # generating link widgets if w_type == LINK: link_widget_dataset = { NAME: w_name, DISP_NAME: w_display, URL: w_prop_dict[URL], DEST_TYPE: w_prop_dict[DEST_TYPE], FONT_SIZE: w_font, TEXT_CLR: w_prop_dict[TEXT_CLR], ALIGN: w_prop_dict[ALIGN], DASH_LINK: w_prop_dict[self.environment + "_" + DASH_LINK] } widgets.append( self.generate_widget_step_json( w_template, link_widget_dataset)) # generating navigation widgets if w_type == NAVIGATION: nav_widget_dataset = { NAME: w_name, DISP_NAME: w_display, FONT_SIZE: w_font } widgets.append( self.generate_widget_step_json( w_template, nav_widget_dataset)) for s_index, step in enumerate(w_steps): s_name = step[NAME] s_prop = step[PROPERTIES] s_type = step[TYPE] s_template = step[TEMPLATE] # generating listselector widgets if w_type == LISTSELECTOR: listselector_widget_dataset = { NAME: w_name, STEP_NAME: s_name, DISP_NAME: w_display } widgets.append( self.generate_widget_step_json( w_template, listselector_widget_dataset)) if w_type == BAR_CHART: # generating chart widgets bar_chart_dataset = { NAME: w_name, STEP_NAME: s_name, BINS: w_prop_dict[BINS], AXIS_MODE: w_prop_dict[AXIS_MODE], VIS_TYPE: w_prop_dict[VIS_TYPE], CHART_TITLE: w_prop_dict[CHART_TITLE], TITLE_1: w_prop_dict[TITLE_1], SUM: w_prop_dict[SUM], SHOW_TITLE: w_prop_dict[SHOW_TITLE], SHOW_AXIS: w_prop_dict[SHOW_AXIS], SHOW_ACT: w_prop_dict[SHOW_ACT], FONT_SIZE: w_font, COL_MAP: w_prop_dict[COL_MAP], SHOW_LGND: w_prop_dict[SHOW_LGND] } widgets.append( self.generate_widget_step_json( w_template, bar_chart_dataset)) if w_type == LINE_CHART: # generating line chart widgets line_chart_dataset = { NAME: w_name, STEP_NAME: s_name, AXIS_MODE: w_prop_dict[AXIS_MODE], VIS_TYPE: w_prop_dict[VIS_TYPE], MEASURE: w_prop_dict[MEASURE], SHOW_DASH: w_prop_dict[SHOW_DASH], FILL_AREA: w_prop_dict[FILL_AREA], CHART_TITLE: w_prop_dict[CHART_TITLE], TITLE_1: w_prop_dict[TITLE_1], SHOW_TITLE: w_prop_dict[SHOW_TITLE], SHOW_AXIS: w_prop_dict[SHOW_AXIS], SHOW_ACT: w_prop_dict[SHOW_ACT], SHOW_LGND: w_prop_dict[SHOW_LGND], SHOW_ZERO: w_prop_dict[SHOW_ZERO], FONT_SIZE: w_font } widgets.append( self.generate_widget_step_json( w_template, line_chart_dataset)) if s_type == CHART: ch_prop_dict = {} for ch_index, ch_property in enumerate(s_prop): ch_prop_dict[ ch_property[KEY]] = ch_property[VAL] saql_json = self.saql_path + "/" + dashboard_name + "/" + ch_prop_dict[ SAQL_NAME] saql_json = read_file(saql_json) saql_json = json.dumps(saql_json).strip('"') # generation of chart steps if w_type == BAR_CHART or w_type == LINE_CHART: chart_step_dataset = { STEP_NAME: s_name, SAQL_QUERY: saql_json, AXIS_MODE: w_prop_dict[AXIS_MODE], VIS_TYPE: w_prop_dict[VIS_TYPE], CHART_TITLE: w_prop_dict[CHART_TITLE], TITLE_1: w_prop_dict[TITLE_1], SHOW_TITLE: w_prop_dict[SHOW_TITLE], SHOW_AXIS: w_prop_dict[SHOW_AXIS], SHOW_ACT: w_prop_dict[SHOW_ACT], FONT_SIZE: w_font, SHOW_LGND: w_prop_dict[SHOW_LGND] } else: chart_step_dataset = { STEP_NAME: s_name, SAQL_QUERY: saql_json } steps.append( self.generate_widget_step_json( s_template, chart_step_dataset)) # generation of cost bucket/MoM calculations if w_type == DYN_TEXT: dyn_text_dataset = { NAME: w_name, STEP_NAME: s_name, VAR: ch_prop_dict[VAR], FONT_SIZE: w_font } widgets.append( self.generate_widget_step_json( w_template, dyn_text_dataset)) if w_type == NUM: num_dataset = { NAME: w_name, STEP_NAME: s_name, VAR: ch_prop_dict[VAR], COMPACT: ch_prop_dict[COMPACT], FONT_SIZE: w_font } widgets.append( self.generate_widget_step_json( w_template, num_dataset)) # for dropdown/filtering steps else: s_prop_dict = {} for p_index, property in enumerate(s_prop): s_prop_dict[property[KEY]] = property[VAL] saql_json = self.generate_saql_step_json( dashboard_name, s_prop_dict[SAQL_NAME], s_prop_dict[GROUP]) saql_json = json.dumps(saql_json).strip('"') listselector_step_dataset = { STEP_NAME: s_name, GROUP: s_prop_dict[GROUP], SAQL_QUERY: saql_json, SELECT_MODE: s_prop_dict[SELECT_MODE] } steps.append( self.generate_widget_step_json( s_template, listselector_step_dataset)) layouts.append( self.generate_layouts_json( w_name, widget[COLSPAN], widget[COL], widget[ROW], widget[ROWSPAN], w_prop_dict.get(STYLE, ""))) widgets.append( self.generate_container_json(c_template, c_name)) layouts = ",".join(layouts) pages.append( self.generate_page_json(p_name, p_display, p_template, layouts)) widgets = ",".join(widgets) steps = ",".join(steps) pages = ",".join(pages) return pages, widgets, steps except Exception as e: exception_handler("", e)
def uploadToWave(self, toBeProcessedFile, access_token): """ throw HttpError to be caught :param toBeProcessedFile: :param access_token: :return: """ print('uploading .....%s' % toBeProcessedFile) #create an InsightsExternalData object base64_meta_json = None metaJSONdata = self._metadata_for_dataset(toBeProcessedFile) if metaJSONdata is not None: base64_meta_json = base64.b64encode(bytearray(metaJSONdata, 'UTF-8')) insight_object_data = { "Format": "Csv", "EdgemartAlias": self._dataset, "Operation": self._mode, "Action": "none", "MetadataJson": base64_meta_json.decode() if base64_meta_json is not None else '' } # JsonUtils.pretty_print(insight_object_data) # insight_object_data = json.dumps(insight_object_data) headers = {'Authorization': 'Bearer ' + access_token, 'Content-Type': 'application/json'} insight_object_parent_id = self._wave_connector.postInsightsExternalData(insight_object_data) #no need to convert metadata since the file is already uploaded to the dataset when it was created file_size_in_mb = float(os.path.getsize(toBeProcessedFile))/float(MB_CONVERSION) print(str(file_size_in_mb) + ' MB') #if file is > 8 MB then split the file if( file_size_in_mb > 8): fileparts = self.splitFile(toBeProcessedFile) for i in range(len(fileparts)): base64data = fileparts[i] json_content = { "DataFile": base64data.decode('ascii'), "InsightsExternalDataId": insight_object_parent_id, "PartNumber": i+1 } req = request.Request(self._request_external_data_part_url(), headers=headers, data=json.dumps(json_content).encode('ascii')) with request.urlopen(req) as response: insight_part_response = response.read() insight_object_response = json.loads(insight_part_response) if('id' not in insight_object_response): raise Exception('Something went wrong with creating the InsightsExternalData object -- see error: ' + str(insight_part_response)) else: data = utils.read_file(toBeProcessedFile) # with open(toBeProcessedFile, 'r') as f: # data = f.read() base64data = base64.b64encode(bytearray(data, 'UTF-8')) json_content = { "DataFile": base64data.decode('ascii'), "InsightsExternalDataId": insight_object_parent_id, "PartNumber": 1 } req = request.Request(self._request_external_data_part_url(), headers=headers, data=json.dumps(json_content).encode('ascii')) with request.urlopen(req) as response: insight_part_response = response.read() print('POST done') insight_object_response = json.loads(insight_part_response) if('id' not in insight_object_response): raise Exception('Something went wrong with creating the InsightsExternalData object -- see error: ' + str(response.read())) self.send_data_request(headers, insight_object_parent_id)