def load_objects(self): """Load data with included `data_filter` behaviour""" from xml.dom.minidom import parseString try: data = parseString(self.data_string) except UnicodeEncodeError: data = parseString(self.data_string.encode('utf-8')) target_node = self.config.get('iterator', 'target_node') nodes = data.getElementsByTagName(target_node) childnodes_mode = False if self.config.has_option('iterator', 'childnodes'): childnodes_mode = self.config.getboolean('iterator', 'childnodes') print('Loading nodes...') for node in progressbar(nodes): apt_dict = dict() if self.data_map: for attr, aliases in self.data_map: aliases = aliases.split(', ') attr_value = dict() for alias in aliases: if not childnodes_mode: attr_value[alias] = read_attr(node, alias, 'unicode') else: attr_value[alias] = read_child(node, alias, 'unicode') apt_dict[attr] = attr_value if len(attr_value) is 1: key, value = attr_value.popitem() apt_dict[attr] = value else: node_attrs = node.attributes.keys() for attr_name in node_attrs: apt_dict[attr_name] = read_attr(node, attr_name, 'unicode') self.objects.append(apt_dict)
def render_xls(self): """Render data as Excel file""" project = self.entity from xlwt import Workbook, easyxf workbook = Workbook() heading_xf = easyxf('font: bold on; align: wrap on, ' 'vert centre, horiz center') sheet = workbook.add_sheet('apartment') sheet.set_panes_frozen(True) sheet.set_horz_split_pos(1) sheet.set_remove_splits(True) attribs = self.config.get('project:data_renderer', 'apartment').split(', ') for attr_num, name in enumerate(attribs): sheet.col(attr_num).width = 256 * (len(name) + 3) sheet.write(0, attr_num, name, heading_xf) sheet.write(0, len(attribs), 'pl', heading_xf) entity_class = import_from('art3dutils.models', 'Apartment') instances = entity_class.fetch_all(project.title) for num, instance in enumerate(utils.progressbar(instances)): for attr_num, name in enumerate(attribs): sheet.write(num+1, attr_num, getattr(instance, name)) sheet.write(num+1, len(attribs), getattr(instance, 'pl')) workbook.save(self.output_path)
def load_objects(self): import xlrd wb = xlrd.open_workbook(file_contents=self.data_string) start_row = self.config.getint('iterator', 'start_row') sheets = [] if self.config.has_option('iterator', 'sheets'): sheet_names = self.config.get('iterator', 'sheets').split(', ') for sheet_name in sheet_names: sheet = wb.sheet_by_name(sheet_name=sheet_name.decode('utf-8')) sheets.append(sheet) else: sheets = wb.sheets() for sheet in sheets: row_range = range(start_row, sheet.nrows) if row_range: print(u'Processing sheet `{0}`...'.format(sheet.name)).encode('utf-8') for row_num in progressbar(row_range): apt_dict = dict() for attr, letters in self.data_map: letters = letters.split(', ') if len(letters) > 1: apt_dict[attr] = list() for letter in letters: apt_dict[attr].append( read_cell(sheet, row_num, ALPHA.index(letter), 'unicode') ) else: try: apt_dict[attr] = read_cell(sheet, row_num, ALPHA.index(letters[0]), 'unicode') #xfx = sheet.cell_xf_index(row_num, ALPHA.index(letters[0])) #xf = wb.xf_list[xfx] #bgx = xf.background.pattern_colour_index #print(letters[0], apt_dict[attr], bgx) except ValueError: pass if self.config.has_option('iterator', 'row_number'): substitute_attr = self.config.get('iterator', 'row_number') apt_dict[substitute_attr] = row_num if self.config.has_option('iterator', 'sheet_name'): substitute_attr = self.config.get('iterator', 'sheet_name') apt_dict[substitute_attr] = process_value(sheet.name) self.objects.append(apt_dict)
def load_objects(self): data = json.loads(self.data_string) if data: if self.config.has_option('iterator', 'root_container'): root_container = self.config.get('iterator', 'root_container') data = data[root_container] print('Loading JSON objects...') for obj in progressbar(data): apt_dict = dict() if self.data_map: for attr, alias in self.data_map: try: apt_dict[attr] = process_value(obj[alias], 'unicode') except KeyError: pass else: for attr, value in obj.items(): apt_dict[attr] = value self.objects.append(apt_dict)
def get_progress(iterator, prefix='', *args, **kwargs): if not iterator: print(yellow('Empty {title}.'.format(title=prefix or 'container'))) return [] return core_utils.progressbar(iterator, prefix, *args, **kwargs)
def render_json(self): project = self.entity import codecs import art3dutils.models as models output_file_path = self.output_path config = self.config dict_ = OrderedDict() #add additional fields if any if config.has_section('project:extras'): extra_attribs = dict() for attrib, short_type in config.items('project:extras'): extra_attribs[attrib] = tuple(short_type.split(', ')) models.ATTR_TYPES.update(extra_attribs) filters = [] if config.has_option('project:data_renderer', 'filters'): filter_names = config.get('project:data_renderer', 'filters').split(', ') config.remove_option('project:data_renderer', 'filters') for filter_name in filter_names: filter_ = import_from('art3d_hydra.' 'filters.{0}'.format(project.title), filter_name) filters.append(filter_) for entity, attribs in config.items('project:data_renderer')[2:]: id_pattern = config.get(entity, 'id_pattern') dict_['{0}s'.format(entity)] = OrderedDict() print('Rendering {0}s...'.format(entity)) entity_class = import_from('art3dutils.models', entity.title()) instances = entity_class.fetch_all(project.title) for instance in utils.progressbar(instances): instance_dict = OrderedDict() for attrib in attribs.split(', '): short, typ = models.ATTR_TYPES[attrib] value = getattr(instance, attrib) instance_dict[short] = utils.process_value(value, typ) # insert fixed room counts if attrib == 'available_detail' \ and config.has_option('project', 'room_counts'): room_counts = config.get('project', 'room_counts').split(', ') room_counts = [int(rc) for rc in room_counts] room_counts.append('t') instance_dict[short] = dict() for rc in room_counts: if rc in instance.available_detail: instance_dict[short][rc] = \ instance.available_detail[rc] else: instance_dict[short][rc] = 0 # calc total cost if attrib == 'total_cost' and not value: instance_dict[short] = instance.calc_total_cost() # check note for json if attrib == 'note': try: instance_dict[short] = json.loads(value) except (TypeError, ValueError): pass for filter_ in filters: filter_(instance_dict, instance) try: key = id_pattern.format(**instance) except TypeError: key = id_pattern.format( building_number=instance.building_number, number=instance.number) dict_['{0}s'.format(entity)][key] = instance_dict utils.create_dirs_in_path(output_file_path) with codecs.open(output_file_path, 'w', 'utf-8') as f: if not self.minified: data_string = json.dumps(dict_, ensure_ascii=False, indent=2, separators=(',', ':')) else: data_string = json.dumps(dict_, ensure_ascii=False) f.write(data_string)