def merge_json_capabilities(self, merge): d = JSONDecoder() e = JSONEncoder() capabilities = d.decode(self.json_capabilities) capabilities_merge = d.decode(merge) for (group, props) in capabilities_merge.items(): if capabilities.has_key(group): capabilities[group].update(props) else: capabilities[group] = props self.json_capabilities = e.encode(capabilities)
def set_metadata(id_item,param): """ Allows to set metadata of an item parameters: id_item: id of item param: dict with key 'metadata' return: empty string """ json_enc = JSONEncoder() param['metadata'] = json_enc.encode(param['metadata']) #set_metadata i._item_set_metadata(id_item,param) return ''
def set_metadata(id_item, param): """ Allows to set metadata of an item parameters: id_item: id of item param: dict with key 'metadata' return: empty string """ json_enc = JSONEncoder() param['metadata'] = json_enc.encode(param['metadata']) #set_metadata i._item_set_metadata(id_item, param) return ''
def add_keywords(i, data, ws_origTows_new, id_orig_itemToid_new_item, keyColl_origTokeyColl_new, returnkeywords=None): """ Allows to create keywords into a workspace. parameters: i: class instance Importer logged at DAM data:data to insert new keyword ws_origTows_new: dict with key 'id' ws read to file workspace.json end value 'id' workspace create . id_orig_itemToid_new_item: dict with key 'id' item read to file item.json end value 'id' item create . keyColl_origTokeyColl_new: dict with key 'id' read to file collections.json end value 'id' create for equivalent collection. returnkeywords: None if keyword haven't parent, else data of father return: """ logger.debug("-----ADD KEYWORD ID OLD WS %s" % data['workspace']) data['workspace_id'] = ws_origTows_new[str(data['workspace'])] logger.debug("-----new ID WS %s" % data['workspace_id']) items_flag = True data_app = dict(data) try: if data["parent_id"] == None: del data["parent_id"] else: del data["workspace_id"] data["parent_id"] = returnkeywords['id'] if data["associate_ancestors"] == False: del data["associate_ancestors"] if (len(data["items"]) == 0): del data["items"] items_flag = False else: del data["items"] if len(data["metadata_schema"]) == 0: del data["metadata_schema"] else: json_enc = JSONEncoder() data["metadata_schema"] = json_enc.encode(data["metadata_schema"]) except Exception, ex: logger.exception(ex)
def add_smartfolders(i, data, ws_origTows_new, keyColl_origTokeyColl_new): """ Allows to create smartfolders into a workspace. parameters: i: class instance Importer logged at DAM data: ws_origTows_new: dict with key 'id' ws read to file workspace.json end value 'id' workspace create. keyColl_origTokeyColl_new: dict with key 'id' read to file collections.json end value 'id' create for equivalent collection. returns: empty string """ json_enc = JSONEncoder() for q in data['queries']: q['id'] = keyColl_origTokeyColl_new[q['id']] data['queries'] = json_enc.encode(data['queries']) i._smartfolders_add(data) return ''
def add_smartfolders(i,data,ws_origTows_new, keyColl_origTokeyColl_new): """ Allows to create smartfolders into a workspace. parameters: i: class instance Importer logged at DAM data: ws_origTows_new: dict with key 'id' ws read to file workspace.json end value 'id' workspace create. keyColl_origTokeyColl_new: dict with key 'id' read to file collections.json end value 'id' create for equivalent collection. returns: empty string """ json_enc = JSONEncoder() for q in data['queries']: q['id'] = keyColl_origTokeyColl_new[q['id']] data['queries'] = json_enc.encode(data['queries']) i._smartfolders_add(data) return ''
def add_keywords(i,data, ws_origTows_new, id_orig_itemToid_new_item,keyColl_origTokeyColl_new, returnkeywords = None): """ Allows to create keywords into a workspace. parameters: i: class instance Importer logged at DAM data:data to insert new keyword ws_origTows_new: dict with key 'id' ws read to file workspace.json end value 'id' workspace create . id_orig_itemToid_new_item: dict with key 'id' item read to file item.json end value 'id' item create . keyColl_origTokeyColl_new: dict with key 'id' read to file collections.json end value 'id' create for equivalent collection. returnkeywords: None if keyword haven't parent, else data of father return: """ logger.debug("-----ADD KEYWORD ID OLD WS %s" %data['workspace']) data['workspace_id'] = ws_origTows_new[str(data['workspace'])] logger.debug("-----new ID WS %s" %data['workspace_id']) items_flag = True data_app = dict(data) try: if data["parent_id"] == None: del data["parent_id"] else: del data["workspace_id"] data["parent_id"] = returnkeywords['id'] if data["associate_ancestors"] == False: del data["associate_ancestors"] if (len(data["items"]) == 0): del data["items"] items_flag = False else: del data["items"] if len(data["metadata_schema"]) == 0: del data["metadata_schema"] else: json_enc = JSONEncoder() data["metadata_schema"] = json_enc.encode(data["metadata_schema"]) except Exception, ex: logger.exception(ex)
if __name__ == '__main__': options = main() archive_name = options.filename + '.tar' backup_file = os.path.join(options.path,archive_name) basedir = tempfile.mkdtemp() try: e = Exporter(options.host, options.port, options.api_key, options.user, options.password) e.login() json_enc = JSONEncoder() #backup degli utenti f = file(os.path.join(basedir, 'users.json'), 'w') f.write(json_enc.encode(e._api_get_users())) f.close() for w in e._workspace_get_list(): #Backup workspace logger.info("Export workspace %s" % w['id']) workspacedir = os.path.join(basedir,'w_' + str(w['id'])) os.mkdir(workspacedir) items = e._workspace_get_items(w['id']) logger.info("workspace.json") f = file(os.path.join(workspacedir, 'workspace.json'), 'w') f.write(json_enc.encode(e._workspace_get(w['id']))) f.close()
class _Handler(sax.ContentHandler): def __init__(self, device_class=None, merge=False): # Initialized flag self.initialized = False # Parsing version flag self.parse_version = False # JSON encoder self.e = JSONEncoder() # Device class self.device_class = device_class # are we merging? self.merge = merge def startElement(self, name, attrs): if name in ('wurfl', 'wurfl_patch'): self.initialized = True self.start_time = time() self.stats = {'nb_devices':0, 'errors':[], 'nb_merges':0} else: if not self.initialized: raise ParseError("Invalid XML format") if name == 'ver': self.stats['version'] = '' self.parse_version = True elif name == 'device': self.device = {} self.device['id'] = attrs.get('id', '') self.device['user_agent'] = attrs.get('user_agent', '') self.device['fall_back'] = attrs.get('fall_back', '') self.device['actual_device_root'] = attrs.get('actual_device_root', False) and True # Prepare the capabilities self.capabilities = {} elif name == 'group': self.current_group = attrs.get('id','') self.capabilities[self.current_group] = {} elif name == 'capability': value = attrs.get('value', '') if value == 'true' or value == 'false': value = (value == 'true') elif value.isdigit(): value = int(value) self.capabilities[self.current_group][attrs.get('name','')] = value def endElement(self, name): if name == 'device': # Process the capabilities self.device['json_capabilities'] = self.e.encode(self.capabilities) # Save the device model if self.device_class: try: try: self.device_class.objects.create(**self.device) self.stats['nb_devices'] += 1 except IntegrityError: if self.merge: device = self.device_class.objects.get(id=self.device['id']) device.merge_json_capabilities(self.device['json_capabilities']) device.save() self.stats['nb_merges'] += 1 else: raise except Exception, err: self.stats['errors'].append(str(err)) elif name in ('wurfl', 'wurfl_patch'): # End of the update self.stats['time_for_update'] = time() - self.start_time
options = main() archive_name = options.filename + '.tar' backup_file = os.path.join(options.path, archive_name) basedir = tempfile.mkdtemp() try: e = Exporter(options.host, options.port, options.api_key, options.user, options.password) e.login() json_enc = JSONEncoder() #backup degli utenti f = file(os.path.join(basedir, 'users.json'), 'w') f.write(json_enc.encode(e._api_get_users())) f.close() for w in e._workspace_get_list(): #Backup workspace logger.info("Export workspace %s" % w['id']) workspacedir = os.path.join(basedir, 'w_' + str(w['id'])) os.mkdir(workspacedir) items = e._workspace_get_items(w['id']) logger.info("workspace.json") f = file(os.path.join(workspacedir, 'workspace.json'), 'w') f.write(json_enc.encode(e._workspace_get(w['id']))) f.close() #Backup collections