def db_to_csv(): file_path = '{}/.stdm/db_backup/'.format(home) if not os.path.isdir(file_path): os.makedirs(os.path.dirname(file_path)) statistics = os.stat(file_path) os.chmod(file_path, statistics.st_mode | stat.S_IEXEC) os.chmod(file_path, 0777) for table in pg_tables(): backup_path = '{}/{}.csv'.format(file_path, table) out_file = open(backup_path, 'wb') out_csv = csv.writer(out_file) f = open(backup_path, 'wb') sql = 'SELECT * FROM {}'.format(table) result = _execute(sql) cursor = result.cursor out_csv.writerow([x[0] for x in cursor.description]) out_csv.writerows(cursor.fetchall()) f.close()
def csv_to_db(): file_path = '{}/.stdm/db_backup/'.format(home) for table in pg_tables(): csv_path = '{}/{}.csv'.format(file_path, table) sql = "COPY {0} FROM '{1}' DELIMITER ',' CSV HEADER".format(table, csv_path) _execute(sql)
def db_user_tables(profile): """ Returns user accessible tables from database. :param profile: Current Profile :param include_views: A Boolean that includes or excludes Views :type include_views: Boolean :return: Dictionary of user tables with name and short name as a key and value. :rtype: Dictionary """ from stdm.data.pg_utils import ( pg_tables ) db_tables = [] tables = [ e.name for e in profile.entities.values() if e.TYPE_INFO in [ 'ENTITY', 'ENTITY_SUPPORTING_DOCUMENT', 'SOCIAL_TENURE', 'SUPPORTING_DOCUMENT', 'VALUE_LIST' ] ] for table in tables: if table in pg_tables(): db_tables.append(table) return db_tables
def load_link_tables(self, reg_exp=None, source=TABLES | VIEWS): self.cbo_ref_table.clear() self.cbo_ref_table.addItem("") ref_tables = [] source_tables = [] # Table source if (TABLES & source) == TABLES: ref_tables.extend(pg_tables(exclude_lookups=True)) for t in ref_tables: # Ensure we are dealing with tables in the current profile if not t in self._current_profile_tables: continue # Assert if the table is in the list of omitted tables if t in self._omit_ref_tables: continue if not reg_exp is None: if reg_exp.indexIn(t) >= 0: source_tables.append(t) else: source_tables.append(t) # View source if (VIEWS & source) == VIEWS: profile_user_views = profile_and_user_views(self._current_profile) source_tables = source_tables + profile_user_views self.cbo_ref_table.addItems(source_tables)
def db_to_csv(): file_path = '{}/.stdm/db_backup/'.format(home) if not os.path.isdir(file_path): os.makedirs(os.path.dirname(file_path)) statistics = os.stat(file_path) os.chmod(file_path, statistics.st_mode | stat.S_IEXEC) os.chmod(file_path, 0777) for table in pg_tables(): backup_path = '{}/{}.csv'.format(file_path, table) out_file = open(backup_path, 'wb') out_csv = csv.writer(out_file) f = open(backup_path, 'wb') sql = 'SELECT * FROM {}'.format(table) result = _execute(sql) cursor = result.cursor out_csv.writerow([x[0] for x in cursor.description]) out_csv.writerows(cursor.fetchall()) f.close()
def load_link_tables(self, reg_exp=None, source=TABLES|VIEWS): self.cbo_ref_table.clear() self.cbo_ref_table.addItem("") ref_tables = [] source_tables = [] #Table source if (TABLES & source) == TABLES: ref_tables.extend(pg_tables(exclude_lookups=True)) for t in ref_tables: #Ensure we are dealing with tables in the current profile if not t in self._current_profile_tables: continue #Assert if the table is in the list of omitted tables if t in self._omit_ref_tables: continue if not reg_exp is None: if reg_exp.indexIn(t) >= 0: source_tables.append(t) else: source_tables.append(t) # View source if (VIEWS & source) == VIEWS: profile_user_views = profile_and_user_views(self._current_profile) source_tables = source_tables + profile_user_views self.cbo_ref_table.addItems(source_tables)
def csv_to_db(): file_path = '{}/.stdm/db_backup/'.format(home) for table in pg_tables(): csv_path = '{}/{}.csv'.format(file_path, table) sql = "COPY {0} FROM '{1}' DELIMITER ',' CSV HEADER".format( table, csv_path) _execute(sql)