def update_1(self): """ change exec_value of fields that are not list as a list """ db = WFItem.get_db() docs = list(db.view('couchflow/items_cloned', include_docs=True).all()) print 'Updating %s items' % len(docs), invalid = 0 update_docs = [] for rdoc in docs: doc = rdoc['value'] if 'fields_properties' not in doc: print 'have not fields_properties', doc['_id'] continue for _id, prop in doc['fields_properties'].iteritems(): for field in prop['list']: if not 'exec_value' in field or not field['exec_value']: field['exec_value'] = [] invalid += 1 if type(field['exec_value']) is not list: field['exec_value'] = [field['exec_value']] invalid += 1 update_docs.append(doc) db.bulk_save(update_docs) print ' [DONE]' print 'updated %s invalid fields' % invalid print 'Successfully Updated db'
def gen_authorize_csv(request): if not (request.user.is_superuser or AUTHORIZE_GROUP in request.user.group_names): return HttpResponseForbidden('403 Forbidden') filename = time.strftime(CSV_FILENAME) response = HttpResponse(content_type='text/csv') response['Content-Disposition'] = 'attachment; filename=%s' % filename csvfile = csv.writer(response) csvfile.writerow([z for (x, y, z) in CSV_COLUMNS]) db = WFItem.get_db() items = db.view('couchflow/filter_items', include_docs=True)[AUTHORIZE_STATE] def get_field_utf8(*args): """thank you python2""" retval = (get_field(*args) or [''])[0] if type(retval) == unicode: return retval.encode("utf-8", "replace") return retval for item in items: csvfile.writerow([get_field_utf8(item['doc'], x, y) for (x, y, z) in CSV_COLUMNS]) return response
def index(request): user = request.user if not user.is_staff: return HttpResponseRedirect("/circulation/user/me") filter_wflows(user) if user.is_superuser: user = None tasks = get_tasks(user=user) status_tasks = get_status(user=user) modules = ['estado', 'pendientes'] context = {'tasks': tasks, 'modules': modules, 'status_tasks': status_tasks, 'module_name': 'Inicio'} context['can_authorize'] = False # "not user" means "is admin" here if not user or AUTHORIZE_GROUP in request.user.group_names: db = WFItem.get_db() items_to_authorize = len(db.view('couchflow/filter_items', include_docs=True)[AUTHORIZE_STATE]) if items_to_authorize: context['can_authorize'] = True context['items_to_authorize'] = items_to_authorize return render_to_response('staticviews/adquisition.html', context, context_instance=RequestContext(request))
def handle(self, *args, **options): # filename = args['filename'] subfields = True filename = options["from"] items = [] db = WFItem.get_db() orig_item = WFItem.view("couchflow/item_names", include_docs=True)["libro"].first() org_json_item = orig_item.to_json() counter = 0 i = 0 for record in iter_iso_records(filename, subfields): i += 1 json_item = clone_json(org_json_item) json_item["fields_properties"]["99989"]["list"][0]["exec_value"] = ["catalogado"] copies = [] has_isbn = False for key, fields in record.iteritems(): key = "%03d" % int(key) if key == "020": has_isbn = True if key == "852": copies = fields continue add_field(key, fields, json_item) if not has_isbn and options["fake_isbn"]: json_item["fields_properties"]["1020"]["list"][0]["subfields"]["a"]["exec_value"] = [uuid.uuid4().hex] items.append(json_item) # add existences for field in copies: json_item = clone_json(json_item) add_field("852", [field], json_item) add_field("1111", [{"_": "existencia"}], json_item) items.append(json_item) if len(items) >= options["per_iter"]: if not options["dry_run"]: db.bulk_save(items, use_uuids=False) counter += len(items) items = [] print "%s %s items (total %s)" % ( "Parsed" if options["dry_run"] else "Inserted", options["per_iter"], counter, ) if items and not options["dry_run"]: db.bulk_save(items, use_uuids=False) counter += len(items) print print "Done, %s items" % counter print ""
def authorize_clear(request): if request.method != 'POST' or not (request.user.is_superuser or AUTHORIZE_GROUP in request.user.group_names): return HttpResponseForbidden('403 Forbidden') db = WFItem.get_db() items = db.view('couchflow/filter_items', include_docs=True)[AUTHORIZE_STATE] docs = [] for item in items: doc = item['doc'] try: doc["fields_properties"]["99989"]["list"][0]["exec_value"][0] = \ AUTHORIZE_STATE_AFTER except (KeyError, IndexError), e: print "Error changing state of document: %s %s" % (type(e), e) else: docs.append(doc)
def update_0(self): """ add support to repeteable fields with subfields """ db = WFItem.get_db() docs = list(db.view('couchflow/item_names', include_docs=True).all()) docs += list(db.view('couchflow/items_cloned', include_docs=True).all()) doc_key_values = {'indicator1': '', 'indicator2':'', 'subfields':{}, 'exec_value':[]} print 'Updating %s items' % len(docs), update_docs = [] for rdoc in docs: doc = rdoc['value'] if 'fields_properties' not in doc: print 'have not fields_properties', doc['_id'] continue for _id, prop in doc['fields_properties'].iteritems(): prop['doc_type'] = 'Fields' new_field = {'doc_type': 'Field'} for key, value in doc_key_values.iteritems(): new_field[key] = prop.get(key, value) if key in prop: del(prop[key]) prop['list'] = [new_field] update_docs.append(doc) db.bulk_save(update_docs) print ' [DONE]' print 'Successfully Updated db'
def handle(self, *args, **options): if len(args) != 2: raise CommandError("Usage: mass_field_type_change " + self.args) field_type, field_id = args subfield_id = None if '_' in field_id: field_id, subfield_id = field_id.split('_', 1) db = WFItem.get_db() docs = list(db.view('couchflow/item_names', include_docs=True).all()) docs += list( db.view('couchflow/items_cloned', include_docs=True).all()) print 'Updating %s items' % len(docs) error_count = 0 unchanged_count = 0 update_docs = [] old_types = {} def update(target): if target['type'] == field_type: return False else: old_types.setdefault(target['type'], 0) old_types[target['type']] += 1 target['type'] = field_type return True for rdoc in docs: doc = rdoc['value'] try: field = doc['fields_properties'][field_id] changed = False if subfield_id: for field_i in field['list']: changed = changed or \ update(field_i['subfields'][subfield_id]) else: changed = changed or update(field) if changed: update_docs.append(doc) else: unchanged_count += 1 except KeyError: error_count += 1 print "KeyError count: %s" % error_count print "Unchanged docs: %s" % unchanged_count if len(update_docs): print "Old types:" print " %s" % '\n '.join( ['%s: %s' % x for x in old_types.items()]) print "Number of documents to update: %s" % len(update_docs) print "Doing bulk save..." db.bulk_save(update_docs) print "Done" else: raise CommandError("No documents to update")
def handle(self, *args, **options): if len(args) != 2: raise CommandError("Usage: mass_field_type_change " + self.args) field_type, field_id = args subfield_id = None if '_' in field_id: field_id, subfield_id = field_id.split('_', 1) db = WFItem.get_db() docs = list(db.view('couchflow/item_names', include_docs=True).all()) docs += list(db.view('couchflow/items_cloned', include_docs=True).all()) print 'Updating %s items' % len(docs) error_count = 0 unchanged_count = 0 update_docs = [] old_types = {} def update(target): if target['type'] == field_type: return False else: old_types.setdefault(target['type'], 0) old_types[target['type']] += 1 target['type'] = field_type return True for rdoc in docs: doc = rdoc['value'] try: field = doc['fields_properties'][field_id] changed = False if subfield_id: for field_i in field['list']: changed = changed or \ update(field_i['subfields'][subfield_id]) else: changed = changed or update(field) if changed: update_docs.append(doc) else: unchanged_count += 1 except KeyError: error_count += 1 print "KeyError count: %s" % error_count print "Unchanged docs: %s" % unchanged_count if len(update_docs): print "Old types:" print " %s" % '\n '.join(['%s: %s' % x for x in old_types.items()]) print "Number of documents to update: %s" % len(update_docs) print "Doing bulk save..." db.bulk_save(update_docs) print "Done" else: raise CommandError("No documents to update")
def handle(self, *args, **options): #filename = args['filename'] subfields = True filename = options['from'] items = [] db = WFItem.get_db() orig_item = WFItem.view('couchflow/item_names', include_docs=True)['libro'].first() org_json_item = orig_item.to_json() counter = 0 i = 0 for record in iter_iso_records(filename, subfields): i += 1 json_item = clone_json(org_json_item) json_item["fields_properties"]['99989']['list'][0][ 'exec_value'] = ['catalogado'] copies = [] has_isbn = False for key, fields in record.iteritems(): key = "%03d" % int(key) if key == '020': has_isbn = True if key == "852": copies = fields continue add_field(key, fields, json_item) if not has_isbn and options['fake_isbn']: json_item["fields_properties"]['1020']['list'][0] \ ['subfields']['a']['exec_value'] = [uuid.uuid4().hex] items.append(json_item) # add existences for field in copies: json_item = clone_json(json_item) add_field("852", [field], json_item) add_field("1111", [{"_": "existencia"}], json_item) items.append(json_item) if len(items) >= options['per_iter']: if not options['dry_run']: db.bulk_save(items, use_uuids=False) counter += len(items) items = [] print '%s %s items (total %s)' % ( 'Parsed' if options['dry_run'] else 'Inserted', options['per_iter'], counter) if items and not options['dry_run']: db.bulk_save(items, use_uuids=False) counter += len(items) print print "Done, %s items" % counter print ""