def __init__(self, parent): super().__init__(parent) self.__parent__ = parent self.parent_model = parent.model self.partslist = parent.partslist self.rank_title = 'rank' self.quantities_key = 'quantity' self.key_column = 'key' self.ui = Ui_Dialog() self.ui.setupUi(self) # list view self.model_rank = Model(self, Item(), Column()) self.ui.listView.setModel(self.model_rank) self.ui.listView.setItemDelegate(Delegate()) # list view 2 self.model_quantity = Model(self, Item(), Column()) self.ui.listView_2.setModel(self.model_quantity) self.ui.listView_2.setItemDelegate(Delegate()) # list view 3 self.model_key = Model(self, Item(), Column()) self.ui.listView_3.setModel(self.model_key) self.ui.listView_3.setItemDelegate(Delegate()) # table view self.model_csv = Model(self, Item(), Column()) self.ui.tableView.setModel(self.model_csv) self.ui.tableView.setItemDelegate(Delegate()) self.ui.pushButton.clicked.connect(self.open_file)
def reader(filename, ignoring, scheme=None): #os.chdir(os.path.sep.join(os.path.sep.split(filename)[:-1])) os.chdir(filename) if scheme is None: try: scheme = read_scheme(filename) except: scheme = {} if not os.path.exists(filename + '.zis'): fil = csv.reader(open(filename + '.csv')) else: fil = csv.reader(open(filename + '.zis')) #need to implement pulke ish for i, line in enumerate(fil): if i == 0: columns = [[title] for title in line] elif ignoring <= i: for j, item in enumerate(line): columns[j].append(item) columnlist = [] if scheme != {}: for item in columns: if len(item) > 1: columnlist.append( Column(item[0], scheme.type_by_field(item[0]), item[1:])) else: columnlist.append( Column(item[0], scheme.type_by_field(item[0]), [])) table = Table(columnlist) os.chdir("..") return table os.chdir("..") return columnlist
def _get_rel_data_restricted(self, sql): saved = self._restricted_table_call table_name, relation, pk, pk_value = saved['table_name'], saved['relation'], saved['pk'], saved['pk_value'] relation_fk = Structure.get_foreign_key_for_table(relation, table_name) if not self._cache.relation_exists(table_name, relation): if sql._select_args: sql.add_select_arg(Column(relation, relation_fk)) if sql._limit: union_sql = [] union_parameters = [] union_dict = {} for id in self._cache.get_all_keys(table_name, pk): limiting_sql = copy.deepcopy(sql) limiting_sql.add_where_literal(Column(relation, relation_fk) == id) union_dict = limiting_sql.build_select() union_sql.append('(%s)' % union_dict['sql']) union_parameters.extend(union_dict['parameters']) union_dict['sql'], union_dict['parameters'] = ' UNION '.join(union_sql), union_parameters data = Query().execute_and_fetch(**union_dict) self._cache.save_relation(table_name, relation, data) else: sql.add_where_literal(Column(relation, relation_fk).in_(self._cache.get_all_keys(table_name, pk))) data = Query().execute_and_fetch(**sql.build_select()) self._cache.save_relation(table_name, relation, data) return ResultSet(self._cache.get_relation_set(relation, relation_fk, pk_value), relation, self._cache)
class WebRating(Table): """The table specified in Task 3""" table_name = 'test' id = Column('id', Integer()) url = Column('url', String()) date = Column('date', DateTime()) rating = Column('rating', String())
def __init__(self, database_file): self.mallCol = Column(Store.MALL_NAME, 2.0, 8.0) self.zipcodeCol = Column(Store.ZIPCODE, 2.0, 1.0) self.gstCol = Column(Store.GST_NO, 2.0, 1.0) self.storeCol = Column(Store.STORE_NAME, 2.0, 8.0) self.readCSV(database_file); for store in self.data: self.mallCol.initAddRow(store) self.zipcodeCol.initAddRow(store) self.gstCol.initAddRow(store) self.storeCol.initAddRow(store)
def test_get_column_value(self): col1 = Column("rrr", 0, "str", "sample1") col2 = Column("abc", 1, "int", "sample1") col3 = Column("xyz", 2, "str", "sample1") cols = [col1, col2, col3] line = ["text for col1", 99, "text for col3"] self.assertEqual(main.get_column_value(cols, line, "rrr"), "text for col1") self.assertEqual(main.get_column_value(cols, line, "abc"), 99) self.assertEqual(main.get_column_value(cols, line, "xyz"), "text for col3")
def createColumns(infopath): infos = json.load(open(infopath,'r')) data = [] for info in infos: data.append(Store(info)) mallCol = Column(Store.MALL_NAME, 2.0, 8.0) storeCol = Column(Store.STORE_NAME, 2.0, 8.0) for store in data: mallCol.initAddRow(store) storeCol.initAddRow(store) print(str(len(data)) + '----' + str(len(storeCol.values.keys()))) return storeCol, mallCol
def setView(self, view, logging=None): self.parsedView = view if 'derived_table' in self.parsedView: if 'extends__all' in self.parsedView: self.extends = self.parsedView['extends__all'][0][0] if 'explore_source' in self.parsedView['derived_table']: self.viewType = 'NDT' exploreSource = self.parsedView['derived_table'][ 'explore_source'] if 'name' in exploreSource: self.exploreSourceName = self.parsedView['derived_table'][ 'explore_source']['name'] if 'columns' in exploreSource: columns = exploreSource['columns'] for column_ in columns: columnObj = Column() columnObj.setColumn(column_, "COLUMN") self.columns.append(columnObj) if 'derived_columns' in exploreSource: columns = exploreSource['derived_columns'] for column_ in columns: columnObj = Column() columnObj.setColumn(column_, "DERIVED_COLUMN") self.columns.append(columnObj) else: if 'sql' in self.parsedView['derived_table']: self.sql = self.parsedView['derived_table']['sql'] self.sql = self.sql.replace('"', '\"') if 'persist_for' in self.parsedView['derived_table']: self.persistedSQL = self.parsedView['derived_table'][ 'persist_for'] self.persistedSQL = self.persistedSQL.replace('"', "\"") self.persistedType = 'PERSIST_FOR' if 'sql_trigger_value' in self.parsedView['derived_table']: self.persistedSQL = self.parsedView['derived_table'][ 'sql_trigger_value'] self.persistedSQL = self.persistedSQL.replace('"', "\"") self.persistedType = 'SQL_TRIGGER_VALUE' self.viewType = 'PDT' elif 'sql_table_name' in view: self.viewType = 'VIEW' self.sql_table_name = self.parsedView['sql_table_name'] if 'name' in self.parsedView: self.name = self.parsedView['name'] if 'extends__all' in self.parsedView: self.extends = self.parsedView['extends__all'][0][0]
def createColumnsCsv(infopath): temp = pd.read_csv(infopath, dtype={'zipcode':np.str}) temp.fillna('', inplace=True) temp = temp.to_dict('records') data = [] for s in temp: data.append(Store(s)) mallCol = Column(Store.MALL_NAME, 2.0, 8.0) storeCol = Column(Store.STORE_NAME, 2.0, 8.0) for store in data: mallCol.initAddRow(store) storeCol.initAddRow(store) print(str(len(data)) + '----' + str(len(storeCol.values.keys()))) return storeCol, mallCol
def read_data_from_xml(self, file_path): xml_tree = ElementTree.parse(file_path) root = xml_tree.getroot() for child in root: for attrib_name in child.attrib.keys(): if attrib_name not in self.column_map: column = Column(attrib_name, file_path) self.column_map[attrib_name] = column self.column_map[attrib_name].add_value(child.attrib[attrib_name]) for attrib in child: if attrib.tag not in self.column_map: column = Column(attrib.tag, file_path) self.column_map[attrib.tag] = column self.column_map[attrib.tag].add_value(attrib.text)
def __init__(self, app): super().__init__() self.copy_data = None self.ui = Ui_MainWindow() self.ui.setupUi(self) self.resize(800, 600) self.model = Model(self, Item(), Column(['File name', 'Resolution', 'Settings'])) self.ui.tableView.setModel(self.model) self.ui.tableView.setItemDelegate(Delegate()) self.ui.tableView.clicked.connect(self.tableview_clicked) self.ui.tableView.customContextMenuRequested.connect( self.context_menu_tableview) self.tool_bar = ToolBar(self.ui.toolBar) self.ui.toolBar.addWidget(self.tool_bar) self.tool_bar.ui.toolButton.clicked.connect(self.open_files) self.tool_bar.ui.toolButton_3.clicked.connect( self.ui.image_view.recognize) self.tool_bar.ui.toolButton_4.clicked.connect(self.split_cells) self.tool_bar.ui.toolButton_5.clicked.connect( self.ui.image_view.graphics_view_update) self.tool_bar.ui.toolButton_6.clicked.connect( self.ui.image_view.graphics_view_update) self.tool_bar.ui.toolButton_7.clicked.connect( self.ui.image_view.graphics_view_update) self.tool_bar.ui.toolButton_8.clicked.connect(self.ocr)
def createRandomTable(cls, name, nrow, ncol, ncolstr=0, low_int=0, hi_int=100, table_cls=None): """ Creates a table with random integers as values Input: name - name of the table nrow - number of rows ncol - number of columns ncolstr - number of columns with strings low_int - smallest integer hi_int - largest integer table_cls - Table class to use; default is Table """ if table_cls is None: table_cls = cls ncol = int(ncol) nrow = int(nrow) table = cls(name) ncolstr = min(ncol, ncolstr) ncolint = ncol - ncolstr c_list = range(ncol) random.shuffle(c_list) for n in range(ncol): column = Column("Col_" + str(n)) if c_list[n] <= ncolint - 1: values = np.random.randint(low_int, hi_int, nrow) values_ext = values.tolist() else: values_ext = ut.randomWords(nrow) #values_ext.append(None) column.addCells(np.array(values_ext)) table.addColumn(column) table.setFilepath(settings.SCISHEETS_DEFAULT_TABLEFILE) return table
def get_column_by_id(self, id): (status, result) = self._send_template_request('getColumn', {'column_id': id}) if status: return Column(self, result) else: return None
def __iter__(self): '''Will return an iterable of all columns''' self.columns = [] subpath = os.path.join(self.file_name, '*') for column_file in glob.glob(subpath): self.columns.append(iter(Column(column_file))) return self
def get_columns_from_file_data(file_lines: List[List[str]], file_has_headers: bool) -> List[Column]: """ Extracts the columns information from the file and automatically sets their name, type and nullable according to the corresponding data :param file_lines: All the lines from the csv already interpreted as a 2D table of strings :param file_has_headers: If the file has headers in the first line :return: The list of interpreted columns """ first_line = file_lines[0] first_data_line = file_lines[1] if file_has_headers else file_lines[0] cols = [] for index, data in enumerate(first_line): col_name = first_line[index] if file_has_headers else "col{}".format(index + 1) col_data = first_data_line[index] if re.match(r"^[1-9]\d*([,.]\d+)?$", col_data): data_type = "number" elif re.match(r"\d{2,4}[-\/]\d{2,4}[-\/]\d{2,4}", col_data): data_type = "date" else: data_type = "str" cols.append(Column(col_name, index, data_type, first_data_line[index])) return cols
def __init__(self): '''Defining the class attributes here''' #In case the user makes an invalid move, the error attribute will show the user direction to do it again. self.error = "\nThis is an invalid move. Look at the board carefully and play again.\n" #Board_full attribute shows if the board is completely full or not. self.board_full = False #Creating a list in order to store board data self.board_list = [] #Creating 7 positions to create the row of the board for i in range(7): #Storing the column information into the board list self.board_list.append(Column(i)) #Winner attribute shows if there is a winner in the game or not self.winner = False #Chip_color attribute shows whose turn it is at the moment, default is red, meaning the player with red chips will start making move first. self.chip_color = 'R' #Creating a base board to begin with self.output_list = [['.', '.', '.', '.', '.', '.'], ['.', '.', '.', '.', '.', '.'], ['.', '.', '.', '.', '.', '.'], ['.', '.', '.', '.', '.', '.'], ['.', '.', '.', '.', '.', '.'], ['.', '.', '.', '.', '.', '.'], ['.', '.', '.', '.', '.', '.']]
def get_table_structure(self, table_name): """ 获取指定表结构信息 """ with self.db.cursor() as cursor: cursor.execute(f"""select t.TABLE_NAME AS tableName, t.COLUMN_NAME AS columnName, c.COMMENTS AS columnComment, t.NULLABLE AS nullable, t.DATA_DEFAULT, t.DATA_TYPE AS dataType, t.CHAR_LENGTH AS strLength, t.DATA_PRECISION AS numLength, t.DATA_SCALE AS numBit from user_tab_columns t, user_col_comments c where t.TABLE_NAME = c.TABLE_NAME and t.COLUMN_NAME = c.COLUMN_NAME and t.TABLE_NAME = '{table_name}' order by t.TABLE_NAME, t.COLUMN_ID""") columns = [] rows = cursor.fetchall() for row in rows: columns.append( Column(column_name=row[1].lower(), comments=row[2], nullable=row[3], data_default=row[4], datatype=row[5], char_length=row[6], data_precision=row[7], data_scale=row[8])) return columns
def create_measure(self, composition, measure): ''' Creates one printable measure and returns it as X * 16 matrix, where X is the number of columns, which is determined by the shortest note duration. e.g. if shortest note is 1/8, measure consists of 8 columns. ''' shortest = 1 for note in composition.notes: # e.g. if shortest note/rest is 1/8, measure contains 8 columns if measure == note.measure: # if note.duration < shortest: # shortest = note.duration # for rest in composition.rests: if measure == rest.measure: # if rest.duration < shortest: # shortest = rest.duration # columns = int(1 / shortest) * [None] # list of columns in this measure for i in range(len(columns)): # for each column in measure col = Column(composition, measure, (shortest * i) + shortest) # create new column columns[i] = col # add to the list of columns of this measure measure_matrix = [[None] * 16 for i in range(len(columns)) ] # create meatrix for the whole measure for j in range(16): # for i in range(len(columns)): # measure_matrix[i][j] = columns[i].rows[ j] # fill matrix according to list of columns return measure_matrix
def __init__(self, name, path, gtfsPath, cols, exists): self.name = name self.path = path self.gtfsPath = gtfsPath self.exists = exists self.columns = {} for col in cols: self.columns[col] = Column(col, self)
def select(self, *args): if args: map(lambda arg: self._check_is_instance(arg, 'Column'), args) args = list(args) args.append( Column(self._table_name, Structure.get_primary_key(self._table_name))) self._sql.add_select_args(args) return self._table_selected_instance()
def set_header_list(self, header_list): for column in header_list: if isinstance(column, Column): self.header_list.append(column) elif isinstance(column, dict): self.header_list.append(Column.from_dict(column)) else: self.header_list.append(Column(column)) self.header_list = header_list
def _get_fk_data(self, table_name, fk, fk_value): reltable = Structure.get_fk_referenced_table(table_name, fk) reltable_pk = Structure.get_primary_key(reltable) if not self._cache.relation_exists(table_name, reltable): sql = SQLBuilder(reltable) sql.add_where_literal(Column(reltable, reltable_pk).in_(self._cache.get_all_keys(table_name, fk))) data = Query().execute_and_fetch(**sql.build_select()) self._cache.save_relation(table_name, reltable, data) return Row(self._cache.get_relation_row(reltable, reltable_pk, fk_value), reltable, self)
def create_table(self): if os.path.exists(self.name): if self.ine: return else: raise TableExistsError(self.name) self.table = Table([Column(header,typ,[]) for (header,typ) in list(self.fields.items())]) os.mkdir(self.name) os.chdir(self.name) reader.write(self.name,self.table)
def get_columns(self): (status, result) = self._send_template_request('getColumns', {'project_id': self.id}) if status: columns = [] for column_info in result: columns.append(Column(self, column_info)) return columns else: return []
def test_simple_generate_sql(self): file_data = [ ["description", "id", "birthdate"], ["some text", "10", "10-01-1990"], ["some other text", "50", "15-05-2000"] ] cols = [ Column("desc", 0, "str", file_data[1][0]), Column("id", 1, "number", file_data[1][1]), Column("birthdate", 2, "date", file_data[1][2]) ] settings = ImportSettings(columns=cols, date_format="%d-%m-%Y") sql, mismatches = main.generate_sql(file_data[1:], settings, "test_table", False) self.assertEqual(sql, "INSERT INTO test_table (desc,id,birthdate) VALUES " "\n\t('some text',10,'1990-01-10')," "\n\t('some other text',50,'2000-05-15');")
def _load_columns(self): self._need_descriptor() for c in self._descriptor.columns: video_descriptor = None if c.type == self._db.protobufs.Video: video_descriptor = self._db._load_descriptor( self._db.protobufs.VideoDescriptor, 'tables/{:d}/{:d}_0_video_metadata.bin'.format( self._id, c.id)) self._columns.append(Column(self, c, video_descriptor))
def read_data_from_json(self, file_path): with open(file_path, 'r') as f: json_array = json.load(f) for node in json_array: for field in node.keys(): if field not in self.column_map: column = Column(field, file_path) self.column_map[field] = column if isinstance(node[field], list): for value in node[field]: self.column_map[field].add_value(str(value)) elif isinstance(node[field], dict): for field1 in node[field].keys(): if field1 not in self.column_map: column = Column(field1, file_path) self.column_map[field1] = column self.column_map[field1].add_value(str(node[field][field1])) else: self.column_map[field].add_value(str(node[field]))
def join(self, table, on=None): self._check_is_instance(table, 'Table') if on: self._check_is_instance(on, 'Literal') self._validate_on(table, on) else: Structure.tables_related(self._table_name, table._table_name) try: fk = Structure.get_foreign_key_for_table( self._table_name, table._table_name) except PyPgException: fk = Structure.get_foreign_key_for_table( table._table_name, self._table_name) on = Column(table._table_name, fk) == \ Column(self._table_name, Structure.get_primary_key(self._table_name)) else: on = Column(self._table_name, fk) == \ Column(table._table_name, Structure.get_primary_key(table._table_name)) self._sql.add_join(table._table_name, on) return self._table_select_instance()
def spawnColumns(self): for i,col in enumerate(self.lColumns): col.x -= self.player.vel.x if col.x <= -col.size: self.score += 1 del self.lColumns[0] self.lColumns.append(Column(self.lColumns[-1].x + self.COLUMN_SPACING)) if(pygame.Rect.colliderect(self.player.rect,col.rectTop) or \ pygame.Rect.colliderect(self.player.rect,col.rectBottom)): return True return False
def __init__(self, rows, cols, inputVector, coverage, desiredLocalActivity): self.rows = rows self.cols = cols self.desiredLocalActivity = desiredLocalActivity self.columns = [Column() for i in range(self.rows * self.cols)] self.mapRegionToInputVector(inputVector, coverage) self.mapRegionToOutputVector() self.inhibitionRadius = min(INITIAL_INHIBITION_RADIUS, self.rows, self.cols) self.updateColumnNeighbors()