def __str__(self): json_repr = self.body # follow AWSCloudFormation/latest/UserGuide/template-anatomy.html # to print out the template sorted_keys = [ 'AWSTemplateFormatVersion', 'Description', 'Parameters', 'Mappings', 'Conditions', 'Resources', 'Properties', 'Function', 'Outputs', ] def comparator(rhs, lhs): rhs_idx = sorted_keys.index(rhs) lhs_idx = sorted_keys.index(lhs) return -1 if rhs_idx < lhs_idx else lhs_idx t = OrderedDict(sorted(json_repr.items(), key=lambda x: x[0], cmp=comparator)) return t.to_dict()
def dump(self, jsonpath: Union[str, pathlib.Path]) -> NoReturn: """評価結果を JSON 形式で保存する Args: jsonpath (Union[str, pathlib.Path]): 保存ファイル名 """ data = OrderedDict() data['pred_file'] = str(self.pred_file) data['label_file'] = str(self.label_file) data['quantitative_evaluation'] = { attr: qe._asdict() for attr, qe in self.quantitative_evaluation.items() } mean_dict = self.mean_quantitative_evaluation._asdict() data['mean_quantitative_evaluation'] = mean_dict attr_to_errata = OrderedDict() for attr_key, _errata in self.errata_dict.items(): errata_dict = OrderedDict() for errata_key, data in _errata._asdict().items(): errata_dict[errata_key] = data.to_dict() attr_to_errata[attr_key] = errata_key data['errata'] = attr_to_errata jsonpath = pathlib.Path(jsonpath) json.dump(data, jsonpath.open('w', encoding='utf-8'), ensure_ascii=False, indent=4)
def test_set_elements_from_serialized(self): tm = self.tm elements = OrderedDict() elements["col1"] = StringArrayMeta() elements["col2"] = StringArrayMeta() elements = TableElementMap(elements) serialized = elements.to_dict() tm.set_elements(serialized) self.assertEqual(serialized, tm.elements.to_dict())
def test_set_elements_from_serialized(self): tm = self.tm elements = OrderedDict() elements["col1"]=StringArrayMeta() elements["col2"]=StringArrayMeta() elements = TableElementMap(elements) serialized = elements.to_dict() tm.set_elements(serialized) self.assertEqual(serialized, tm.elements.to_dict())
def test_set_elements(self): tm = self.tm elements = OrderedDict() elements["col1"] = StringArrayMeta() elements["col2"] = StringArrayMeta() elements = TableElementMap(elements) tm.set_elements(elements) self.assertEqual(elements, tm.elements) tm.process.report_changes.assert_called_once_with([["elements"], elements.to_dict()])
def test_set_elements(self): tm = self.tm elements = OrderedDict() elements["col1"]=StringArrayMeta() elements["col2"]=StringArrayMeta() elements = TableElementMap(elements) tm.set_elements(elements) self.assertEqual(elements, tm.elements) tm.process.report_changes.assert_called_once_with( [["elements"], elements.to_dict()])
def generate_transaction(): sender_address = request.form['sender_address'] sender_public_key = request.form['sender_public_key'] sender_private_key = request.form['sender_private_key'] recipient_address = request.form['recipient_address'] value = request.form['amount'] try: group = PairingGroup('MNT224') shortSig = ShortSig(group) sender_global_public_key = sender_public_key sender_public_key = sender_public_key.encode() sender_public_key = bytesToObject(sender_public_key, group) print('\n\nsender_public_key : ', sender_public_key) sender_private_key = sender_private_key.encode() sender_private_key = bytesToObject(sender_private_key, group) print('\n\nsender_private_key : ', sender_private_key) global_public_key = sender_public_key user_secret_key = sender_private_key # msg = { # "sender_address": sender_address, # "sender_public_key": str(sender_public_key), # "recipient_address": recipient_address, # "value": value # } transaction = OrderedDict({ 'sender_address': sender_address, 'sender_public_key': sender_global_public_key, 'recipient_address': recipient_address, 'value': value }) transaction = json.dumps(transaction) transaction = SHA256.new( data=str(transaction).encode('utf-8')).hexdigest() print("\n\nmsg", transaction) signature = shortSig.sign(global_public_key, user_secret_key, transaction) signature_bytes = objectToBytes(signature, group) signature_str = signature_bytes.decode() print('\n\nsignature : ', signature) except: # sender_global_public_key = sender_public_key transaction = Transaction(sender_address, sender_private_key, sender_global_public_key, recipient_address, value) signature_str = transaction.sign_transaction() print('\n\ntransaction : ', transaction) print('\n\ntransaction.to_dict() : ', transaction.to_dict()) print('\n\ntransaction.sign_transaction() : ', transaction.sign_transaction()) transaction = OrderedDict({ 'sender_address': sender_address, 'sender_global_public_key': sender_global_public_key, 'recipient_address': recipient_address, 'value': value }) print('\n\ntransaction : ', transaction) response = {'transaction': transaction, 'signature': signature_str} return jsonify(response), 200
class GPE: def __init__(self,offline=False,test_n_plots=6,test_n_samples=1000,max_row_width=4): """ Initialize Graph Projection Explorer """ print("\n\n" + '#' * 75) print("Bokeh Graph Projection Explorer V.0.0.1") print('Class Format') print('#' * 75 + '\n\n') self.test_n_plots = test_n_plots self.test_n_samples = test_n_samples self.max_row_width = max_row_width self.testmode = False self.n_newplots = 0 if offline: # Operate in non command line argument mode self.testmode = True self.verbose = True else: parser = argparse.ArgumentParser() parser.add_argument("--dir", help="directory housing data") parser.add_argument("--mode", help="Options: Test, Presentation, Default") parser.add_argument("--downsample", help="If provided, randomly samples the data the provided number of times") parser.add_argument("--verbose", help="If True, Prints messages to console where server is running") self.args = parser.parse_args() try: self.args.downsample = int(self.args.downsample) except: TypeError() if self.args.verbose: self.verbose = True print "Verbose Turned On" if isinstance(self.args.mode, str): if self.args.mode.lower() == 'test': if self.verbose: print('Test Mode Selected') self.testmode = True else: raise ValueError("Mode argument: " + self.args.mode + "Not among valid modes") else: raise ValueError("Mode Argument must be of type str. Instead recieved: " + str(type(self.args.mode))) # Build Test Data if it exists if self.testmode: self.data_dir = self.gen_test_data() else: self.data_dir = self.args.dir # Initialize self.init_data() self.init_color() self.init_controls() if self.verbose: print("Initialization Timings") print("\tData Init: "+str(self.init_data_time)) print("\tColor Init: " + str(self.init_color_time)) print("\tControl Init: " + str(self.init_control_time)) def read_data(self): """ :return: """ if self.verbose: print('\nReading Data\n') # Check Data assert os.path.isdir(self.data_dir) vecdir = os.path.join(self.data_dir,'vectors') graphdir = os.path.join(self.data_dir,'graphs') assert os.path.isdir(vecdir) try: assert os.path.isdir(graphdir) except AssertionError: pass vec_files = [os.path.join(vecdir,file) for file in os.listdir(vecdir) if file.endswith(".csv")] vec_files.sort() assert len(vec_files) > 0 self.plot_df = OrderedDict() self.data_df = OrderedDict() self.maps_dict = OrderedDict() self.true_cols = [] self.initial_plot_2_data_mapper = {} n_plot = 1 self.n_plots = 0 for i,f in enumerate(vec_files): if self.verbose: print("Reading File: %s"%f) file_prefix = f.split('/')[-1].split('.')[0] + "_" df = pd.read_csv(f) # Sometimes Unnamed: 0 creeps in by mistake of the user if "Unnamed: 0" in df.columns: df.drop(["Unnamed: 0"],axis=1,inplace=True) # Confirm Data Dim if i == 0: self.n,p = df.shape else: n,p = df.shape #assert n == self.n if isinstance(self.args.downsample, int): if self.verbose: print("Downsampling: %d"%self.args.downsample) df = df.sample(n=int(self.args.downsample),replace=False,random_state=1,axis=0) print(df.shape) self.n, p = df.shape # Test if D1 and D2 columns are found has_d1 = 0 has_d2 = 0 for col in df.columns: if 'D1' in col: has_d1 += 1 elif 'D2' in col: has_d2 += 1 if has_d1 == 1 and has_d2 == 1: has_both = True else: has_both = False if has_d1 > 1: warnings.warn("Too many column headers contain D1, cannot disambiguate") if has_d2 > 1: warnings.warn("Too many column headers contain D2, cannot disambiguate") if has_d1 != has_d2: warnings.warn("The number of D1 and D2 variable do not match") # Now that data validation is done, actually add data to self.df for col in df.columns: if 'D1' in col and has_both: self.data_df[file_prefix + col] = df[col].values.tolist() self.plot_df['Plot_%d_x' % n_plot] = df[col].values.tolist() self.plot_df[file_prefix + col] = df[col].values.tolist() self.true_cols.append(file_prefix + col) self.initial_plot_2_data_mapper['Plot_%d_x' % n_plot] = file_prefix + col elif 'D2' in col and has_both: self.data_df[file_prefix + col] = df[col].values.tolist() self.plot_df['Plot_%d_y' % n_plot] = df[col].values.tolist() self.plot_df[file_prefix + col] = df[col].values.tolist() self.true_cols.append(file_prefix + col) self.initial_plot_2_data_mapper['Plot_%d_y' % n_plot] = file_prefix + col else: self.data_df[file_prefix + col] = df[col].values.tolist() self.plot_df[file_prefix + col] = df[col].values.tolist() self.true_cols.append(file_prefix + col) if has_both: self.maps_dict["Plot_%d" % n_plot] = ('Plot_%d_x' % n_plot,'Plot_%d_y' % n_plot) n_plot += 1 def init_data(self): """ Load and Validate Data :return: """ t0 = time() if self.verbose: print("Initializing Data Resources") self.read_data() self.n_plots = len(self.maps_dict.keys()) self.color = "__COLOR__" self.plot_df["__COLOR__"] = ["#80bfff"] * self.n self.plot_df["__selected__"] = np.ones_like(self.n, dtype=np.bool).tolist() self.data_df = pd.DataFrame(self.data_df) self.data_dict = self.data_df.to_dict(orient='list') assert isinstance(self.data_dict, dict) self.plot_df = pd.DataFrame(self.plot_df) self.plot_dict = self.plot_df.to_dict(orient='list') assert isinstance(self.data_dict, dict) # Used for indexing Selected Data self.inds_bool = np.ones_like(np.arange(self.n), dtype=np.bool) self.source = ColumnDataSource(data=self.plot_dict) self.table_source = ColumnDataSource(data=self.plot_df[self.true_cols].to_dict(orient='list')) self.init_data_time = time() - t0 return self.init_data_time def gen_test_data(self): """ Generate Test Data, Store in temp dir and return dir path :return: dir path """ if self.verbose: print('Generating Test Data') # Initialize Temp Testing Dir Structure tmpdir = tempfile.mkdtemp() tmpdir_p = os.path.join(tmpdir,'vectors') tmpdir_g = os.path.join(tmpdir,'graphs') os.mkdir(tmpdir_p) os.mkdir(tmpdir_g) assert isinstance(self.test_n_plots, int) assert isinstance(self.test_n_samples, int) # Make Blob data X, y = make_blobs(n_samples=self.test_n_samples, n_features=self.test_n_plots * 2, centers=6, cluster_std=0.75, random_state=1) # Store blob data in test dir for i in range(self.test_n_plots): cols = X[:, (i * 2):((i * 2) + 2)] #cols[np.random.choice(range(self.test_n_samples),1),:] = [np.null,np.null] df = pd.DataFrame(data=cols, columns=('D1','D2')) df.to_csv(os.path.join(tmpdir_p,'P%d.csv'%i)) meta_df = pd.DataFrame({'Meta':['Class: ' + str(label) for label in y]}) meta_df.to_csv(os.path.join(tmpdir_p, 'Meta.csv')) # Generate Graph Data # TODO return tmpdir def init_color(self): """ :return: """ t0 = time() if self.verbose: print("Initializing Color Resources") self.color_map_dict = {col: list(enumerate(get_color_map(self.data_dict[col]))) for col in self.data_dict.keys()} self.init_color_time = time() - t0 return self.init_color_time def init_controls(self): """ :return: """ t0 = time() if self.verbose: print("Initializing Controls") # Initialize Controls self.color_selection = Select(title="Color By", options=self.data_dict.keys(), value=self.data_dict.keys()[0]) self.selection_label = TextInput(value="MyGroup#1", title="Selection Label:") self.add_selection_label = Button(label="Add Selection Label") self.write_mod_file = Button(label="Download", button_type="primary") self.write_mod_file.callback = CustomJS(args=dict(source=self.source), code=open(os.path.join(os.path.dirname(__file__), "download.js")).read()) self.tooltip_select = MultiSelect(title='Tooltip',value = [self.data_dict.keys()[0]], options=[(key,key.upper()) for key in self.data_dict.keys()]) # Declare Tooltip Contents self.tooltip_list = [(col, "@" + col) for col in self.tooltip_select.value] self.init_control_time = time() - t0 return self.init_control_time def add_selection(self): """ Add new column to source containing copy of selection :return: """ self.source.add(self.source.data['__selected__'],name=self.selection_label.value) def make_plot(self,title, x, y): """ :param title: :param x: :param y: :return: """ print(title,x,y) t0 = time() pt = PanTool() lst = LassoSelectTool() pst = PolySelectTool() bst = BoxSelectTool() wzt = WheelZoomTool() tt = TapTool() st = SaveTool() ut = UndoTool() rt = RedoTool() p = figure( tools=[pt,lst,pst,bst,wzt,tt,st,ut,rt], plot_width=400, plot_height=400, title=self.initial_plot_2_data_mapper[x]+" vs. "+self.initial_plot_2_data_mapper[y], webgl=accelerator) # configure so that no drag tools are active p.toolbar.active_drag = pt # configure so that Bokeh chooses what (if any) scroll tool is active p.toolbar.active_scroll = wzt # configure so that a specific PolySelect tap tool is active p.toolbar.active_tap = tt p.xaxis.axis_label = self.initial_plot_2_data_mapper[x] p.yaxis.axis_label = self.initial_plot_2_data_mapper[y] c = p.circle(x=x, y=y, size=5, color="__COLOR__", alpha=.75, source=self.source, hover_color='white', hover_alpha=1, hover_line_color='grey') c.data_source.on_change('selected', self.update) # Edge generator ''' self.graph_set = [{i: [[1,0.15],[2,0.5],[3,0.99]] for i in range(self.n)}] self.edge_colors = qual_2_color(['g'+str(i) for i,_ in enumerate(self.graph_set)]) self.edge_sources = [ColumnDataSource({'x0': [], 'y0': [], 'x1': [], 'y1': [], 'alpha': []}) for i in self.graph_set] self.edge_segments = [p.segment(x0='x0', y0='y0', x1='x1', y1='y1', color=self.edge_colors[i], alpha='alpha', line_width=3, #line_dash=[1,1], source=self.edge_sources[i]) for i, _ in enumerate(self.graph_set)] for i, _ in enumerate(self.graph_set): code1 = """ var links = %s; var data = {'x0': [], 'y0': [], 'x1': [], 'y1': [], 'alpha': []}; var cdata = circle.get('data'); var indices = cb_data.index['1d'].indices; for (i=0; i < indices.length; i++) { ind0 = indices[i] for (j=0; j < links[ind0].length; j++) { ind1 = links[ind0][j][0]; w = links[ind0][j][1]; """ % self.graph_set[i] code2 = "data['x0'].push(cdata['" + x + "'][ind0]);\n" + \ "data['y0'].push(cdata['" + y + "'][ind0]);\n" + \ "data['x1'].push(cdata['" + x + "'][ind1]);\n" + \ "data['y1'].push(cdata['" + y + "'][ind1]);\n" + \ "data['alpha'].push([w]);\n" code3 = "}}segment.set('data', data);" code = code1 + code2 + code3 callback = CustomJS(args={'circle': c.data_source, 'segment': self.edge_segments[i].data_source}, code=code) p.add_tools(HoverTool(tooltips=None, callback=callback, renderers=[c])) ''' p.select(BoxSelectTool).select_every_mousemove = False p.select(LassoSelectTool).select_every_mousemove = False # Plot Controls xdim_select = Select(title="X Dim", options=self.data_dict.keys(), value=self.initial_plot_2_data_mapper[x],width=400) ydim_select = Select(title="Y Dim", options=self.data_dict.keys(), value=self.initial_plot_2_data_mapper[y],width=400) xdim_select.on_change('value', self.plot_update) ydim_select.on_change('value', self.plot_update) remove = Button(label="Remove", button_type="danger",width=400) remove.on_click(partial(self.remove_plot,title,x,y)) self.plot_control_dict[title] = {'x':xdim_select, 'y':ydim_select, 'xprev':xdim_select.value, 'yprev':ydim_select.value, 'figure':p, 'tooltip':HoverTool(tooltips=self.tooltip_list,point_policy='snap_to_data',show_arrow=False)} # Give the hover tool a tool tip self.plot_control_dict[title]['figure'].add_tools(self.plot_control_dict[title]['tooltip']) # Form Tab plot_options = WidgetBox(xdim_select,ydim_select,remove) tab1 = Panel(child=self.plot_control_dict[title]['figure'], title=title,width=400,height=400) tab2 = Panel(child=plot_options, title="options",width=400,height=400) tabs = Tabs(tabs=[tab1, tab2],width=400,height=400) self.tab_list.append(tabs) self.circle_list.append(c) print('Plot Time: ' + str(time() - t0)) return tabs, c def change_color(self,attrname, old, new): """ Change Color :return: """ if self.verbose: print("---self.change_color---") self.source.patch({"__COLOR__": self.color_map_dict[self.color_selection.value]}) if self.verbose: print('New Color: '+self.color_selection.value) def refresh_plots(self): """ Refresh plots :return: """ if self.verbose: print('---self.refresh_plots---') print(self.layout) print(self.layout.children) self.layout.children[0] = self.make_all_plots() print(self.layout) print(self.layout.children) #controls = self.make_all_controls() #dt = self.make_data_table() #self.layout.children[1] = row(controls, dt) def add_plot(self): """ Add new plot to the :return: """ if self.verbose: print('---self.add_plot---') self.n_newplots += 1 d1 = self.true_cols[np.random.randint(len(self.true_cols))] d2 = self.true_cols[np.random.randint(len(self.true_cols))] self.source.add(self.source.data[d1],name='NewPlot%d_x'%self.n_newplots) self.source.add(self.source.data[d2], name='NewPlot%d_y'%self.n_newplots) self.maps_dict['NewPlot%d'%self.n_newplots] = ('NewPlot%d_x'%self.n_newplots, 'NewPlot%d_y'%self.n_newplots) self.initial_plot_2_data_mapper['NewPlot%d_x'%self.n_newplots] = d1 self.initial_plot_2_data_mapper['NewPlot%d_y'%self.n_newplots] = d2 if self.verbose: print('---self.add_plot---Done') self.refresh_plots() def remove_plot(self,plot_title,x,y): """ Removes a plot :return: """ if self.verbose: print('\n---self.remove_plot---') print('Getting Rid of') print(self.maps_dict[plot_title]) self.maps_dict.pop(plot_title) print(x,y) #self.source.data.pop(x) #self.source.data.pop(y) print('After poping') print(self.maps_dict) print(self.source.data.keys()) pprint(self.source.data) self.refresh_plots() def dist_plot(self): """ :return: """ dist_plots = [] self.dist_dict = {} for col in self.true_cols[0:8]: x = self.data_dict[col] # create the horizontal histogram hhist, hedges = np.histogram(x, bins=20) hzeros = np.zeros(len(hedges) - 1) hmax = max(hhist) * 1.1 LINE_ARGS = dict(color="#3A5785", line_color=None) ph = figure(toolbar_location=None, plot_width=200, plot_height=100,title=col, y_range=(0, hmax), y_axis_location="left") ph.xgrid.grid_line_color = None ph.yaxis.major_label_orientation = np.pi / 4 ph.background_fill_color = "#fafafa" ph.quad(bottom=0, left=hedges[:-1], right=hedges[1:], top=hhist, color="white", line_color="#3A5785") hh1 = ph.quad(bottom=0, left=hedges[:-1], right=hedges[1:], top=hzeros, alpha=0.5, **LINE_ARGS) # Add to dist dict self.dist_dict[col] = {'plot':ph, 'hh1':hh1, 'hedges':hedges} # Add to grid plot dist_plots.append(ph) gp = gridplot(dist_plots, plot_width=300, plot_height=200, ncols=4, height=400,toolbar_location=None,) return gp def make_all_plots(self): """ :return: """ self.tab_list = [] self.plot_list = [] self.circle_list = [] self.plot_control_dict = {} self.n_plots = len(self.maps_dict.keys()) # Make Each Plot for f in self.maps_dict.keys(): xs = self.maps_dict[f][0] ys = self.maps_dict[f][1] self.make_plot(f, xs, ys) #if self.verbose: print("Grid of plots: "+str(self.tab_list)) return gridplot(self.tab_list,ncols=self.max_row_width,plot_width=250, plot_height=250) def make_all_controls(self): # Controls self.addplot = Button(label="Add New Plot", button_type="success") self.addplot.on_click(self.add_plot) self.add_selection_label.on_click(self.add_selection) self.tooltip_select.on_change('value',self.tooltip_update) self.color_selection.on_change('value',self.change_color) controls = [self.color_selection, self.tooltip_select, self.selection_label, self.add_selection_label, self.addplot, self.write_mod_file] return widgetbox(children=controls) def pseudo_update(self,n_selected): """ :param n_selected: :return: """ assert self.n >= n_selected pseudo_selected = np.random.choice(np.arange(self.n),size=n_selected) pseudo_new = {'1d':{'indices':pseudo_selected}} self.update(1,1,pseudo_new) def tooltip_update(self,attrname, old, new): """ Updates tooltip content for each plot :param attrname: :param old: :param new: :return: """ if self.verbose: print("\n------self.tooltip_update-----") self.tooltip_list = [(col.encode("utf-8"), "@" + col.encode("utf-8")) for col in self.tooltip_select.value] self.refresh_plots() #if self.verbose: print(contents) #for p in self.plot_control_dict: # self.plot_control_dict[p]['tooltip'].tooltips = contents # self.plot_control_dict[p]['tooltip'].plot = self.plot_control_dict[p]['figure'] # if self.verbose: print(self.plot_control_dict[p]['tooltip'].tooltips) def plot_update(self,attrname, old, new): """ Updates plot contents, title and axis labels :param attrname: :param old: :param new: :return: """ # modify column value of data source if self.verbose: print("\n------self.plot_update-----") for p in self.plot_control_dict: xy_mod = False # X if self.plot_control_dict[p]['x'].value != self.plot_control_dict[p]['xprev']: if self.verbose: print('X change on %s'%p) # patch self.source.patch({p+"_x": list(enumerate(self.data_dict[self.plot_control_dict[p]['x'].value]))}) # update prev self.plot_control_dict[p]['xprev'] = self.plot_control_dict[p]['x'].value self.plot_control_dict[p]['figure'].xaxis.axis_label = self.plot_control_dict[p]['x'].value xy_mod = True # Y if self.plot_control_dict[p]['y'].value != self.plot_control_dict[p]['yprev']: if self.verbose: print('Y change on %s' % p) # patch self.source.patch({p+"_y": list(enumerate(self.data_dict[self.plot_control_dict[p]['y'].value]))}) # update prev self.plot_control_dict[p]['yprev'] = self.plot_control_dict[p]['y'].value self.plot_control_dict[p]['figure'].yaxis.axis_label = self.plot_control_dict[p]['y'].value xy_mod = True # update title text if xy_mod: self.plot_control_dict[p]['figure'].title.text = self.plot_control_dict[p]['x'].value + " vs. " + self.plot_control_dict[p]['y'].value def update(self,attrname, old, new): """ :param attrname: :param old: :param new: :return: """ if self.verbose: print("\n------self.update-----") t0 = time() # Update Selected try: inds = np.array(new['1d']['indices']) if len(inds) == 0 or len(inds) == self.n: print("NOTHING SELECTED") pass else: print('Selected Set Size: ' + str(len(inds))) # Modify Dist Plots if self.verbose: print('\tUpdating DistPlots') for dist in self.dist_dict: x = np.array(self.data_dict[dist]) xbins = self.dist_dict[dist]['hedges'] hhist1, _ = np.histogram(x[inds], bins=xbins) self.dist_dict[dist]['hh1'].data_source.data["top"] = hhist1 # Modify Data Table indbool = self.inds_bool indbool[inds] = False self.source.patch({"__selected__":list(enumerate(indbool))}) full_table_dict = self.data_dict self.table_source.data = {col: np.array(full_table_dict[col])[inds] for col in full_table_dict.keys()} # Hack to stop string index type error that occurs when you change color except FloatingPointError: print("NOTHING SELECTED, error caught") pass self.update_time = time()-t0 print(self.update_time) def make_data_table(self): # Add Table columns = [TableColumn(field=col, title=col) for col in self.data_df.keys()] dt = DataTable(source=self.table_source, columns=columns, width=1800, height=400, scroll_to_selection=False) return WidgetBox(dt) def go(self): """ :return: """ print("\n\n" + '#' * 75) print("Server Engaged") print('#' * 75 + '\n\n') plots = self.make_all_plots() controls = self.make_all_controls() dt = self.make_data_table() dp = self.dist_plot() dp = gridplot(dp, plot_width=300, plot_height=200, ncols=2, height=400) dp = Panel(child=dp, title="Distribution Plot", width=400, height=400) dt = Panel(child=dt, title="Data Table", width=400, height=400) dtdp = Tabs(tabs=[dp,dt],width=400,height=400) self.layout = column(children=[plots,row(controls,dtdp)]) curdoc().add_root(self.layout) curdoc().title = 'Graph Projection Explorer'
class lx_data(object): """ Serialization interface between python dicts and LX1 XML. Designed to be inherited so you can implement your own data input and output functions that build the self.data AutoVivification object (See lx_sales_order class for an example). Don't forget to set the object_type variable to define the xml data type. When we execute an update, we choose which lx_data subclass to hand the data to based on the object_type of the class and the file. After building the self.data dict, this object is passed to the upload_data function of the lx_connection object. It will call generate_xml to convert the self.data dict into an xml file, then upload it to the server. Alternatively, this class can be used to convert an LX1 xml file into an lx_data dict by passing the XML into the constructor. """ def __init__(self, data=None): """ Either parse XML from LX1, or call self.extract on a browse_record """ super(lx_data, self).__init__() # clear instance properties self._attachments = [] self.upload_file_name = '' self.browse_record = None # handle data param if data and isinstance(data, browse_record): self.browse_record = data self._validate_required_fields() self.extract(data) elif data and isinstance(data, (dict, OrderedDict, list)): self.data = data elif data: raise TypeError( 'Data must be a browse record, dict, OrderedDict or list') # list of file name prefix's that this class should handle when receiving them from LX1 object_type = [] # List of fields that should be truthy on the browse record. See _validate_required_fields required_fields = [] # Used in the xml template if _use_xml_template is True. Should be set in child classes that are extracted message_identifier = None # file name generated and set by the upload function upload_file_name = '' # When instantialising from a browse_record, save a reference to it browse_record = None # extract function adds tuples containing attachment (contents, name, extension, type) to be uploaded along with main file _attachments = None # Use the generic xml template defined in generate_xml _use_xml_template = True def _validate_required_fields(self): """ Check that all required_fields are satisfied, otherwise raise an osv exception with a description of the browse record and fields affected. Works on many2one relational fields using dot notation And one2many relational field if the o2m field is the first field in the chain. o2m fields should be written like [field_name]. Examples: date product_id.name [move_lines].name """ if not self.browse_record: raise ValueError('Missing self.browse_record') invalid_fields = [] # Iterate over required_fields checking if they have been satisfied for required_field in self.required_fields: if '.' not in required_field: # simple field check if not self.browse_record[required_field]: invalid_fields.append(required_field) else: # relational field check def check(self, target, fields): """ Convert fields into a dot notation query and execute it on target """ query = '%s.%s' % (target, '.'.join(fields)) try: res = eval(query) if isinstance(res, (unicode, str)): res = res.strip() if not res: invalid_fields.append(required_field) except Exception as e: invalid_fields.append(required_field) fields = required_field.split('.') # check for one2many type field if fields[0][0:1] == '[' and fields[0][-1:] == ']': one2many_field = fields[0][1:-1] if not self.browse_record[one2many_field]: invalid_fields.append(one2many_field) else: for record_index in xrange( 0, len(self.browse_record[one2many_field])): target = 'self.browse_record.%s[%d]' % ( one2many_field, record_index) check(self, target, fields[1:]) else: check(self, 'self.browse_record', fields) # raise exception if necessary if invalid_fields: invalid_fields_str = '' for field in invalid_fields: if '.' in field: field_parts = map( lambda p: '_' in p and p.split('_')[0] or p, field.split('.')) invalid_fields_str += '\n%s' % ' -> '.join( map(lambda p: p.title().replace('[', ''), field_parts)) else: invalid_fields_str += '\n%s' % field except_args = (self.browse_record._description, self.browse_record[self.browse_record._rec_name], invalid_fields_str) raise except_osv( _("Required Fields Invalid"), _('The following required fields were invalid for %s "%s": \n\n %s' ) % except_args) else: return None def safe_get(self, dictionary, key): """ Returns self.data[key] or None if it does not exist """ if key in dictionary: return dictionary[key] else: return None def insert_data(self, insert_target, params): """ Insert keys and values from params into self.data at insert_target. Calling this method twice on the same key will convert the key from a dict to a list of dicts. In this way it can handle multiple xml nodes with the same name. @param dict params: keys and values to insert into self.data @param str insert_target: dot separated values for insert target. For example 'order.customer' inserts to self.data['order']['customer'] """ # save reference to the target key inside the nested dictionary self.data target = self.data for target_key in insert_target.split('.'): parent = target target = target[target_key] # have we already saved data to this key? If yes, convert it to a list of dicts if isinstance(target, (AutoVivification, OrderedDict)) and len(target) != 0: autoviv = False parent[target_key] = [target] target = parent[target_key] elif isinstance(target, list): autoviv = False else: autoviv = True if autoviv: # add data to the empty dict like normal for param_name in params: param_value = params[param_name] if not param_name == 'self': target[param_name] = param_value else: # create new dict to be added to the list of dicts val = AutoVivification() for param_name in params: param_value = params[param_name] if not param_name == 'self': val[param_name] = param_value target.append(val) def add_attachments(self, pool, cr, uid, model, ids, report_name, file_name_prefix, report_type, data_type='pdf'): """ Generate attachments for ids and insert the data into self._attachments @param dict pool: openerp object pool @param string model: The model for which to create the report @param list ids: The ids of the records for which to create the report @param string report_name: The internal technical name of the report to be used @param string file_name_prefix: The prefix for the file name to be added to _ObjectId @param string report_type: the type string to be entered into the tuple. This will be used when uploading data to LX1 @param string data_type: the type of data returned by this call. Used internally by the report mechanism """ report_obj = pool.get('ir.actions.report.xml') if not hasattr(ids, '__iter__'): ids = [ids] report_data = {'report_type': data_type, 'model': model} for obj_id in ids: file_name = string_to_file_name('%s_%d' % (file_name_prefix, obj_id)) report_contents, report_extension = report_obj.render_report( cr, uid, [obj_id], report_name, report_data) self._attachments.append( (report_contents, file_name, report_extension, report_type)) def generate_xml(self): """ If _use_xml_template is true, puts self.data inside the appropriate node in the XML header and then Returns a StringIO containing an XML representation of self.data nested dict """ # add xml template if self._use_xml_template is truthy if self._use_xml_template: assert self.message_identifier, "message_identifier variable not set!" content = self.data self.data = OrderedDict([ ( '__attrs__', OrderedDict([ # add xmlns etc to root element (ServiceRequest) ('xmlns', 'http://www.aqcon.com/lxone/inboundService'), ('xmlns:xsi', 'http://www.w3.org/2001/XMLSchema-instance'), ('xsi:schemaLocation', 'http://www.aqcon.com/lxone/inboundService /home/openerp/openerp/lx-one/schemas/InboundService.xsd' ), ])), ('ServiceRequestHeader', OrderedDict([ ('ServiceRequestor', 'LX One'), ('ServiceProvider', 'LX One'), ('ServiceIdentifier', 'OpenERP'), ('MessageIdentifier', self.message_identifier), ('RequestDateTime', datetime.now().isoformat()), ('ResponseRequest', 'Never'), ])), ('ServiceDefinition', content), ]) # validate self.data and convert autoviv to ordered dict if needed self._check_ordered_dicts_only(self.data) if type(self.data) == AutoVivification: self.data = self.data.to_dict() # convert to pretty XML output = StringIO.StringIO() xd = XMLDumper(output, XML_DUMP_PRETTY | XML_STRICT_HDR) xd.XMLDumpKeyValue('ServiceRequest', self.data) output.seek(0) return output def _check_ordered_dicts_only(self, struct): """ Looks at every element recursively in struct and raises a TypeError if it finds a regular dict """ for key in struct: val = struct[key] if type(val) == dict: raise TypeError( 'Regular dict found! Should only use ordered dicts') elif type(val) == OrderedDict: self._check_ordered_dicts_only(val) @staticmethod def reorganise_data(data, header, namespace): """ This method is called by the poll function to give each object type the opportunity to reorganise the data received from LX1, after it is parsed from the XML file and before it is used to generate updates. @param AutoVivification data: The parsed XML from LX1 @return data """ return data, header, namespace def extract(self, record): """ Called by the constructor when given a browse_record. This method should extract the browse_record's data into the self.data object. This method is a stub that you have to implement in an inheriting model. @param browse_record record: browse_record from which to extract data @return self: allow for chaining """ raise NotImplemented( 'Please implement this method in your inherited model') def process(self, pool, cr): """ Called by process_all which is triggered by the OpenERP poll function. Override this method to do something with self.data in OpenERP. Any exceptions should be raised. @param pool: OpenERP object pool @param cr: OpenERP database cursor """ raise NotImplemented( 'Please implement this method in your inherited model')