def get_args(self): parser = utils.get_command_line_parser(description= 'Fix documents in CouchDB.') parser.add_option('-d', '--dry-run', action='store_true', dest='dry_run', default=False, help='do not perform save; for debug') return parser.parse_args()
def get_args(): parser = utils.get_command_line_parser(description='Send unsent messages.') parser.add_option('-d', '--dry-run', action='store_true', dest='dry_run', default=False, help='do not send messages; for debug') return parser.parse_args()
def get_command_line_parser(): parser = utils.get_command_line_parser( description='Dump all data into a tar file.') parser.add_option('-d', '--dumpfile', action='store', dest='dumpfile', metavar='DUMPFILE', help='name of dump file') return parser
def get_args(): parser = utils.get_command_line_parser( description='Get document(s) from CouchDB and write to JSON file.') parser.add_option("-w", "--write", action='store', dest='FILE', default=None, metavar="FILE", help="filepath of file to write") return parser.parse_args()
def get_args(): parser = utils.get_command_line_parser( description='Send info message to all users.') parser.add_option('-d', '--dry-run', action='store_true', dest='dry_run', default=False, help='do not send messages; for debug') (options, args) = parser.parse_args() utils.load_settings(filepath=options.settings) send_info_email('accounts.csv', sender='Per Kraulis <*****@*****.**>', options=options)
def __init__(self): parser = utils.get_command_line_parser( description='Fix documents in CouchDB.') parser.add_option('-d', '--dry-run', action='store_true', dest='dry_run', default=False, help='do not perform save; for debug') (options, args) = parser.parse_args() utils.load_settings(filepath=options.settings) self.dry_run = options.dry_run self.db = utils.get_db() self.args = args self.prepare()
def get_args(): parser = utils.get_command_line_parser( description='Initialize the database, deleting all old data,' ' optionally load from dump file.') parser.add_option("-L", "--load", action='store', dest='FILE', default=None, metavar="FILE", help="filepath of dump file to load") return parser.parse_args()
import yaml from orderportal import constants from orderportal import settings from orderportal import utils def load_order_messages(db): "Load the order messages document." for key in ['ORDER_MESSAGES_FILEPATH', 'INITIAL_ORDER_MESSAGES_FILEPATH']: filepath = settings.get(key) if filepath: break else: raise KeyError('no order messages file specified') print('Order messages from', filepath) with open(filepath) as infile: doc = yaml.safe_load(infile) doc['_id'] = 'order_messages' doc[constants.DOCTYPE] = constants.META print('saving order messages in database') db.save(doc) if __name__ == '__main__': parser = utils.get_command_line_parser( description='Load the order messages document.') (options, args) = parser.parse_args() utils.load_settings(filepath=options.settings) db = utils.get_db() load_order_messages(db)
" Get the previous version of the document and save it as new. " from __future__ import print_function, absolute_import import sys from orderportal import utils def revert(db, docid): revisions = list(db.revisions(docid)) if len(revisions) < 2: sys.exit('no previous version to revert to') latest = revisions[0] previous = revisions[1] new = previous.copy() new['_rev'] = latest['_rev'] db.save(new) if __name__ == '__main__': parser = utils.get_command_line_parser(description= 'Get the previous version of the document and save it as new.') (options, args) = parser.parse_args() utils.load_settings(filepath=options.settings) db = utils.get_db() for docid in args: revert(db, docid)
for filename in os.listdir(path): name, ext = os.path.splitext(filename) if ext != '.js': continue if name.startswith('map_'): name = name[len('map_'):] elif name.startswith('reduce_'): name = name[len('reduce_'):] viewname = design + '/' + name if viewname not in viewnames: viewnames.append(viewname) for field in settings['ORDERS_SEARCH_FIELDS']: if constants.ID_RX.match(field): viewnames.append("fields/%s" % field) for viewname in viewnames: print('regenerating view', viewname) view = db.view(viewname) count = 0 for row in view: count += 1 if count > 4: break if __name__ == '__main__': parser = utils.get_command_line_parser( description='Reload all CouchDB design documents.') (options, args) = parser.parse_args() utils.load_settings(filepath=options.settings) db = utils.get_db() load_designs(db) regenerate_views(db)
def set_password(email, password, verbose=False): db = utils.get_db() view = db.view('account/email', include_docs=True) rows = list(view[email]) if len(rows) != 1: raise ValueError("no such account %s" % email) doc = rows[0].doc with AccountSaver(doc=doc, db=db) as saver: saver.set_password(password) if verbose: print('Password was set for', email) if saver['status'] != constants.ENABLED: saver['status'] = constants.ENABLED if verbose: print('Account was enabled.') if __name__ == '__main__': parser = utils.get_command_line_parser( description='Set the password for an account.') (options, args) = parser.parse_args() utils.load_settings(filepath=options.settings) email = input('Email address (=account name) > ') if not email: sys.exit('no email address provided') password = getpass.getpass('Password > ') if not password: sys.exit('no password provided') try: utils.check_password(password) except ValueError as msg: sys.exit(str(msg)) again_password = getpass.getpass('Password again > ') if password != again_password: sys.exit('passwords do not match')
def set_role(email, role): assert role in constants.ACCOUNT_ROLES db = utils.get_db() view = db.view('account/email', include_docs=True) rows = list(view[email]) if len(rows) != 1: raise ValueError("no such account %s" % email) doc = rows[0].doc with AccountSaver(doc=doc, db=db) as saver: saver['role'] = role if __name__ == '__main__': parser = utils.get_command_line_parser( description='Set the role for an account.') (options, args) = parser.parse_args() utils.load_settings(filepath=options.settings) email = input('Email address (=account name) > ') if not email: sys.exit('no email address provided') role = input("role [%s] > " % '|'.join(constants.ACCOUNT_ROLES)) if not role: sys.exit('no role provided') if role not in constants.ACCOUNT_ROLES: sys.exit("invalid role; must be one of %s" % ', '.join(constants.ACCOUNT_ROLES)) set_role(email, role) print('Set role for', email)
except KeyError: pass else: with MessageSaver(db=db) as saver: saver.set_params( account=account['email'], password_url=absolute_reverse_url('password'), password_code_url=absolute_reverse_url( 'password', email=account['email'], code=account['code']), code=account['code']) saver.set_template(template) saver['recipients'] = [account['email']] print(account['email']) time.sleep(PAUSE) def absolute_reverse_url(path, **kwargs): url = "https://ngisweden.scilifelab.se/{0}".format(path) if kwargs: url += '?' + urllib.urlencode(kwargs) return url if __name__ == '__main__': parser = utils.get_command_line_parser(description= "Enable all pending accounts.") (options, args) = parser.parse_args() utils.load_settings(filepath=options.settings) enable_accounts(utils.get_db())
def main(): parser = utils.get_command_line_parser(description='OrderPortal server.') (options, args) = parser.parse_args() utils.load_settings(filepath=options.settings) url = tornado.web.url handlers = [url(r'/', Home, name='home')] try: regexp = settings['ORDER_IDENTIFIER_REGEXP'] except KeyError: pass else: handlers.append(url(r"/order/({0})".format(regexp), Order, name='order_id')) handlers.append(url(r"/api/v1/order/({0})".format(regexp), OrderApiV1, name='order_id_api')) handlers.extend([ url(r'/order/([0-9a-f]{32})', Order, name='order'), url(r'/api/v1/order/([0-9a-f]{32})', OrderApiV1, name='order_api'), url(r'/order/([^/]+).csv', OrderCsv, name='order_csv'), url(r'/order/([^/]+).zip', OrderZip, name='order_zip'), url(r'/order/([0-9a-f]{32})/logs', OrderLogs, name='order_logs'), url(r'/order', OrderCreate, name='order_create'), url(r'/api/v1/order', OrderCreateApiV1, name='order_create_api'), url(r'/order/([0-9a-f]{32})/edit', OrderEdit, name='order_edit'), url(r'/order/([0-9a-f]{32})/transition/(\w+)', OrderTransition, name='order_transition'), url(r'/api/v1/order/([0-9a-f]{32})/transition/(\w+)', OrderTransitionApiV1, name='order_transition_api'), url(r'/order/([0-9a-f]{32})/clone', OrderClone, name='order_clone'), url(r'/order/([0-9a-f]{32})/file', OrderFile, name='order_file_add'), url(r'/order/([0-9a-f]{32})/file/([^/]+)',OrderFile,name='order_file'), url(r'/order/([0-9a-f]{32})/report', OrderReport, name='order_report'), url(r'/api/v1/order/([0-9a-f]{32})/report', OrderReportApiV1, name='order_report_api'), url(r'/order/([0-9a-f]{32})/report/edit', OrderReportEdit, name='order_report_edit'), url(r'/orders', Orders, name='orders'), url(r'/api/v1/orders', OrdersApiV1, name='orders_api'), url(r'/orders.csv', OrdersCsv, name='orders_csv'), url(r'/accounts', Accounts, name='accounts'), url(r'/api/v1/accounts', AccountsApiV1, name='accounts_api'), url(r'/accounts.csv', AccountsCsv, name='accounts_csv'), url(r'/account/([^/]+)', Account, name='account'), url(r'/api/v1/account/([^/]+)', AccountApiV1, name='account_api'), url(r'/account/([^/]+)/orders', AccountOrders, name='account_orders'), url(r'/api/v1/account/([^/]+)/orders', AccountOrdersApiV1, name='account_orders_api'), url(r'/account/([^/]+)/groups/orders', AccountGroupsOrders, name='account_groups_orders'), url(r'/api/v1/account/([^/]+)/groups/orders', AccountGroupsOrdersApiV1, name='account_groups_orders_api'), url(r'/account/([^/]+)/logs', AccountLogs, name='account_logs'), url(r'/account/([^/]+)/messages', AccountMessages, name='account_messages'), url(r'/account/([^/]+)/edit', AccountEdit, name='account_edit'), url(r'/group', GroupCreate, name='group_create'), url(r'/group/([0-9a-f]{32})', Group, name='group'), url(r'/group/([0-9a-f]{32})/edit', GroupEdit, name='group_edit'), url(r'/group/([0-9a-f]{32})/accept', GroupAccept, name='group_accept'), url(r'/group/([0-9a-f]{32})/decline', GroupDecline, name='group_decline'), url(r'/group/([0-9a-f]{32})/logs', GroupLogs, name='group_logs'), url(r'/groups', Groups, name='groups'), url(r'/search', Search, name='search'), url(r'/login', Login, name='login'), url(r'/logout', Logout, name='logout'), url(r'/reset', Reset, name='reset'), url(r'/password', Password, name='password'), url(r'/register', Register, name='register'), url(r'/registered', Registered, name='registered'), url(r'/account/([^/]+)/enable', AccountEnable, name='account_enable'), url(r'/account/([^/]+)/disable', AccountDisable,name='account_disable'), url(r'/account/([^/]+)/updateinfo', AccountUpdateInfo, name='account_update_info'), url(r'/forms', Forms, name='forms'), url(r'/form/([0-9a-f]{32})', Form, name='form'), url(r'/api/v1/form/([0-9a-f]{32})', FormApiV1, name='form_api'), url(r'/form/([0-9a-f]{32})/logs', FormLogs, name='form_logs'), url(r'/form', FormCreate, name='form_create'), url(r'/form/([0-9a-f]{32})/edit', FormEdit, name='form_edit'), url(r'/form/([0-9a-f]{32})/clone', FormClone, name='form_clone'), url(r'/form/([0-9a-f]{32})/pending', FormPending, name='form_pending'), url(r'/form/([0-9a-f]{32})/testing', FormTesting, name='form_testing'), url(r'/form/([0-9a-f]{32})/enable', FormEnable, name='form_enable'), url(r'/form/([0-9a-f]{32})/disable', FormDisable, name='form_disable'), url(r'/form/([0-9a-f]{32})/field', FormFieldCreate,name='field_create'), url(r'/form/([0-9a-f]{32})/field/([a-zA-Z][_a-zA-Z0-9]*)', FormFieldEdit, name='field_edit'), url(r'/form/([0-9a-f]{32})/field/([a-zA-Z][_a-zA-Z0-9]*)/descr', FormFieldEditDescr, name='field_edit_descr'), url(r'/form/([0-9a-f]{32})/orders', FormOrders, name='form_orders'), url(r'/form/([0-9a-f]{32})/orders.csv', FormOrdersCsv, name='form_orders_csv'), url(r'/news', News, name='news'), url(r'/new/([0-9a-f]{32})', NewsEdit, name='news_edit'), url(r'/new', NewsCreate, name='news_create'), url(r'/events', Events, name='events'), url(r'/event/([0-9a-f]{32})', Event, name='event'), url(r'/event', EventCreate, name='event_create'), url(r'/contact', Contact, name='contact'), url(r'/about', About, name='about'), url(r'/software', Software, name='software'), url(r'/infos', Infos, name='infos'), url(r'/info', InfoCreate, name='info_create'), url(r'/info/([^/]+)', Info, name='info'), url(r'/info/([^/]+)/edit', InfoEdit, name='info_edit'), url(r'/info/([^/]+)/logs', InfoLogs, name='info_logs'), url(r'/files', Files, name='files'), url(r'/file', FileCreate, name='file_create'), url(r'/file/([^/]+)', File, name='file'), url(r'/file/([^/]+)/meta', FileMeta, name='file_meta'), url(r'/file/([^/]+)/download', FileDownload, name='file_download'), url(r'/file/([^/]+)/edit', FileEdit, name='file_edit'), url(r'/api/v1/file/([^/]+)/edit', FileEditApiV1, name='file_edit_api'), url(r'/file/([0-9a-f]{32})/logs', FileLogs, name='file_logs'), url(r'/log/([0-9a-f]{32})', Log, name='log'), url(r'/([0-9a-f]{32})', Entity, name='entity'), url(r'/nsc/package/([0-9a-f]{32})', NscOrderPkgV1, name='nsc_order_package'), url(r'/admin/global_modes', GlobalModes, name='global_modes'), url(r'/admin/settings', Settings, name='settings'), url(r'/admin/text/([^/]+)', Text, name='text'), url(r'/admin/texts', Texts, name='texts'), url(r'/admin/order_statuses', OrderStatuses, name='order_statuses'), url(r'/admin/order_messages', AdminOrderMessages, name='admin_order_messages'), url(r'/admin/account_messages', AdminAccountMessages, name='admin_account_messages'), url(r'/site/([^/]+)', tornado.web.StaticFileHandler, {'path': utils.expand_filepath(settings['SITE_DIR'])}, name='site'), ]) handlers.append(url(r'/api/v1/(.*)', NoSuchEntityApiV1)) handlers.append(url(r'/(.*)', NoSuchEntity)) application = tornado.web.Application( handlers=handlers, debug=settings.get('TORNADO_DEBUG', False), cookie_secret=settings['COOKIE_SECRET'], xsrf_cookies=True, ui_modules=uimodules, template_path=os.path.join(settings['ROOT_DIR'], 'html'), static_path=os.path.join(settings['ROOT_DIR'], 'static'), login_url=r'/login') # Add href URLs for the status icons. # This depends on order status setup. for key, value in settings['ORDER_STATUSES_LOOKUP'].iteritems(): value['href'] = application.reverse_url('site', key + '.png') application.listen(settings['PORT'], xheaders=True) pid = os.getpid() logging.info("web server PID %s on port %s", pid, settings['PORT']) if options.pidfile: with open(options.pidfile, 'w') as pf: pf.write(str(pid)) tornado.ioloop.IOLoop.instance().start()
try: doc = db[docid] except couchdb.ResourceNotFound: print('no such document', docid, file=sys.stderr) else: docs.append(doc) if docs: if filepath: outfile = open(filepath, 'w') print('writing to', filepath) else: outfile = sys.stdout json.dump(docs, outfile, indent=2) else: print('no such document(s)', file=sys.stderr) if __name__ == '__main__': parser = utils.get_command_line_parser( description='Get document(s) from CouchDB and write to JSON file.') parser.add_option("-w", "--write", action='store', dest='FILE', default=None, metavar="FILE", help="filepath of file to write") (options, args) = parser.parse_args() utils.load_settings(filepath=options.settings) get_documents(utils.get_db(), docids=args, filepath=options.FILE)
def get_args(): parser = utils.get_command_line_parser( description= 'Get the previous version of the document and save it as new.') return parser.parse_args()
def get_args(): parser = utils.get_command_line_parser( description='Reload all CouchDB design documents.') return parser.parse_args()
def get_args(): parser = utils.get_command_line_parser( description='Set the password for an account.') return parser.parse_args()
except KeyError: pass else: with MessageSaver(db=db) as saver: saver.set_params(account=account['email'], password_url=absolute_reverse_url('password'), password_code_url=absolute_reverse_url( 'password', email=account['email'], code=account['code']), code=account['code']) saver.set_template(template) saver['recipients'] = [account['email']] print(account['email']) time.sleep(PAUSE) def absolute_reverse_url(path, **kwargs): url = "https://ngisweden.scilifelab.se/{0}".format(path) if kwargs: url += '?' + urllib.urlencode(kwargs) return url if __name__ == '__main__': parser = utils.get_command_line_parser( description="Enable all pending accounts.") (options, args) = parser.parse_args() utils.load_settings(filepath=options.settings) enable_accounts(utils.get_db())
def add_text(db, name, textfilepath, force=False): "Load the text from file, overwriting the current." with open(utils.expand_filepath(textfilepath)) as infile: text = infile.read() docs = [ r.doc for r in db.view('text/name', include_docs=True, key=name) ] try: doc = docs[0] if not force: sys.exit('text exists; not overwritten') except IndexError: doc = None with admin.TextSaver(doc=doc, db=db) as saver: saver['name'] = name saver['text'] = text print("Text '{0}' loaded".format(name)) if __name__ == '__main__': parser = utils.get_command_line_parser( description='Load a named text from file into the database.') (options, args) = parser.parse_args() utils.load_settings(filepath=options.settings) if len(args) != 2: sys.exit('Error: give name and filepath') add_text(utils.get_db(), name=args[0], textfilepath=args[1], force=options.force)
except KeyError: pass try: other_data.append(u"old portal uid: {0}".format(account['uid'])) except KeyError: pass doc['other_data'] = '\n'.join(other_data) doc['update_info'] = True doc['password'] = None # Role '2' is Drupal admin doc['role'] = set(account['roles']).difference(set([2])) \ and constants.ADMIN or constants.USER doc['owner'] = email doc['created'] = utils.epoch_to_iso(account['created']) doc['modified'] = utils.epoch_to_iso(account.get('last_access') or account['created']) print('loaded', email) db.save(doc) counter += 1 print(counter, 'accounts loaded') if __name__ == '__main__': parser = utils.get_command_line_parser(description= "Load accounts from old Drupal site JSON dump 'users.json'.") (options, args) = parser.parse_args() utils.load_settings(filepath=options.settings) db = utils.get_db() load_accounts(db) regenerate_views(db)
db.save(doc) count_items += 1 for attname, attinfo in atts.items(): # Handle problematic non-ASCII filenames attname2 = utils.to_ascii(attname) key = "{0}_att/{1}".format(doc['_id'], attname2) attachments[key] = dict(filename=attname2, content_type=attinfo['content_type']) infile.close() # This will be executed on the command line, so output to console, not log. print('undumped', count_items, 'items and', count_files, 'files from', filepath) if __name__ == '__main__': parser = utils.get_command_line_parser( description='Dump all data into a tar file.') parser.add_option('-d', '--dumpfile', action='store', dest='dumpfile', metavar='DUMPFILE', help='name of dump file') (options, args) = parser.parse_args() utils.load_settings(filepath=options.settings) db = utils.get_db() if options.dumpfile: filepath = options.dumpfile else: filepath = "dump_{0}.tar.gz".format(time.strftime("%Y-%m-%d")) if os.path.basename(filepath) == filepath: try: filepath = os.path.join(settings['BACKUP_DIR'], filepath) except KeyError: pass
def set_role(email, role): assert role in constants.ACCOUNT_ROLES db = utils.get_db() view = db.view('account/email', include_docs=True) rows = list(view[email]) if len(rows) != 1: raise ValueError("no such account %s" % email) doc = rows[0].doc with AccountSaver(doc=doc, db=db) as saver: saver['role'] = role if __name__ == '__main__': parser = utils.get_command_line_parser( description='Set the role for an account.') (options, args) = parser.parse_args() utils.load_settings(filepath=options.settings) email = raw_input('Email address (=account name) > ') if not email: sys.exit('no email address provided') role = raw_input("role [%s] > " % '|'.join(constants.ACCOUNT_ROLES)) if not role: sys.exit('no role provided') if role not in constants.ACCOUNT_ROLES: sys.exit("invalid role; must be one of %s" % ', '.join(constants.ACCOUNT_ROLES)) set_role(email, role) print('Set role for', email)
attname2 = utils.to_ascii(attname) key = "{0}_att/{1}".format(doc['_id'], attname2) attachments[key] = dict(filename=attname2, content_type=attinfo['content_type']) infile.close() print('undumped', count_items, 'items and', count_files, 'files from', filepath, file=sys.stderr) if __name__ == '__main__': parser = utils.get_command_line_parser( description='Dump all data into a tar file.') parser.add_option('-d', '--dumpfile', action='store', dest='dumpfile', metavar='DUMPFILE', help='name of dump file') (options, args) = parser.parse_args() utils.load_settings(filepath=options.settings) db = utils.get_db() if options.dumpfile: filepath = options.dumpfile else: filepath = "dump_{0}.tar.gz".format(time.strftime("%Y-%m-%d")) if os.path.basename(filepath) == filepath: try:
del project['history']['undefined'] except KeyError: pass else: changed = True if changed: print('saving', project['identifier'], project['fields']['lims_id'], project['status'], project['title']) db.save(project) else: print('no change for', project['identifier']) if __name__ == '__main__': parser = utils.get_command_line_parser( description='Load project info from Clarity LIMS into OrderPortal.') (options, args) = parser.parse_args() utils.load_settings(filepath=options.settings) db = utils.get_db() projects_lookup = get_old_portal_projects(db) print(len(projects_lookup), 'projects from old portal requiring more info') projects = projects_lookup.values() projects.sort(lambda i, j: cmp(i['modified'], j['modified'])) for project in projects: print(project['identifier'], project['modified']) clarity = Clarity() clarity_projects = clarity.get_all_projects() print(len(clarity_projects), 'projects in Clarity') for record in clarity_projects:
undump(db, dumpfilepath) designs.regenerate_views_indexes(db) except IOError: print('Warning: could not load', dumpfilepath) else: print('no dump file loaded') # Load texts from the initial texts YAML file. Only if missing in db! print('loading any missing texts from', INIT_TEXTS_FILEPATH) try: with open(INIT_TEXTS_FILEPATH) as infile: texts = yaml.safe_load(infile) except IOError: print('Warning: could not load', INIT_TEXTS_FILEPATH) texts = dict() for name in constants.TEXTS: if len(list(db.view('text/name', key=name))) == 0: with admin.TextSaver(db=db) as saver: saver['name'] = name saver['text'] = texts.get(name, '') if __name__ == '__main__': parser = utils.get_command_line_parser( description='Initialize the database, optionally load from dump file.') parser.add_option("-L", "--load", action='store', dest='FILE', default=None, metavar="FILE", help="filepath of dump file to load") (options, args) = parser.parse_args() utils.load_settings(filepath=options.settings) init_database(dumpfilepath=options.FILE)
def main(): parser = utils.get_command_line_parser(description='OrderPortal server.') (options, args) = parser.parse_args() utils.load_settings(filepath=options.settings) utils.initialize() url = tornado.web.url handlers = [ url(r'/', Home, name='home'), url(r'/status', Status, name="status") ] try: regexp = settings['ORDER_IDENTIFIER_REGEXP'] except KeyError: pass else: handlers.append( url(r"/order/({0})".format(regexp), Order, name='order_id')) handlers.append( url(r"/api/v1/order/({0})".format(regexp), OrderApiV1, name='order_id_api')) handlers.extend([ url(r'/order/([0-9a-f]{32})', Order, name='order'), url(r'/api/v1/order/([0-9a-f]{32})', OrderApiV1, name='order_api'), url(r'/order/([^/]+).csv', OrderCsv, name='order_csv'), url(r'/order/([^/]+).xlsx', OrderXlsx, name='order_xlsx'), url(r'/order/([^/]+).zip', OrderZip, name='order_zip'), url(r'/order/([0-9a-f]{32})/logs', OrderLogs, name='order_logs'), url(r'/order', OrderCreate, name='order_create'), url(r'/api/v1/order', OrderCreateApiV1, name='order_create_api'), url(r'/order/([0-9a-f]{32})/edit', OrderEdit, name='order_edit'), url(r'/order/([0-9a-f]{32})/transition/(\w+)', OrderTransition, name='order_transition'), url(r'/api/v1/order/([0-9a-f]{32})/transition/(\w+)', OrderTransitionApiV1, name='order_transition_api'), url(r'/order/([0-9a-f]{32})/owner', OrderOwner, name='order_owner'), url(r'/order/([0-9a-f]{32})/clone', OrderClone, name='order_clone'), url(r'/order/([0-9a-f]{32})/file', OrderFile, name='order_file_add'), url(r'/order/([0-9a-f]{32})/file/([^/]+)', OrderFile, name='order_file'), url(r'/order/([0-9a-f]{32})/report', OrderReport, name='order_report'), url(r'/api/v1/order/([0-9a-f]{32})/report', OrderReportApiV1, name='order_report_api'), url(r'/order/([0-9a-f]{32})/report/edit', OrderReportEdit, name='order_report_edit'), url(r'/orders', Orders, name='orders'), url(r'/api/v1/orders', OrdersApiV1, name='orders_api'), url(r'/orders.csv', OrdersCsv, name='orders_csv'), url(r'/orders.xlsx', OrdersXlsx, name='orders_xlsx'), url(r'/accounts', Accounts, name='accounts'), url(r'/api/v1/accounts', AccountsApiV1, name='accounts_api'), url(r'/accounts.csv', AccountsCsv, name='accounts_csv'), url(r'/accounts.xlsx', AccountsXlsx, name='accounts_xlsx'), url(r'/account/([^/]+)', Account, name='account'), url(r'/api/v1/account/([^/]+)', AccountApiV1, name='account_api'), url(r'/account/([^/]+)/orders', AccountOrders, name='account_orders'), url(r'/api/v1/account/([^/]+)/orders', AccountOrdersApiV1, name='account_orders_api'), url(r'/account/([^/]+)/groups/orders', AccountGroupsOrders, name='account_groups_orders'), url(r'/api/v1/account/([^/]+)/groups/orders', AccountGroupsOrdersApiV1, name='account_groups_orders_api'), url(r'/account/([^/]+)/logs', AccountLogs, name='account_logs'), url(r'/account/([^/]+)/messages', AccountMessages, name='account_messages'), url(r'/account/([^/]+)/edit', AccountEdit, name='account_edit'), url(r'/group', GroupCreate, name='group_create'), url(r'/group/([0-9a-f]{32})', Group, name='group'), url(r'/group/([0-9a-f]{32})/edit', GroupEdit, name='group_edit'), url(r'/group/([0-9a-f]{32})/accept', GroupAccept, name='group_accept'), url(r'/group/([0-9a-f]{32})/decline', GroupDecline, name='group_decline'), url(r'/group/([0-9a-f]{32})/logs', GroupLogs, name='group_logs'), url(r'/groups', Groups, name='groups'), url(r'/search', Search, name='search'), url(r'/login', Login, name='login'), url(r'/logout', Logout, name='logout'), url(r'/reset', Reset, name='reset'), url(r'/password', Password, name='password'), url(r'/register', Register, name='register'), url(r'/registered', Registered, name='registered'), url(r'/account/([^/]+)/enable', AccountEnable, name='account_enable'), url(r'/account/([^/]+)/disable', AccountDisable, name='account_disable'), url(r'/account/([^/]+)/updateinfo', AccountUpdateInfo, name='account_update_info'), url(r'/forms', Forms, name='forms'), url(r'/form/([0-9a-f]{32})', Form, name='form'), url(r'/api/v1/form/([0-9a-f]{32})', FormApiV1, name='form_api'), url(r'/form/([0-9a-f]{32})/logs', FormLogs, name='form_logs'), url(r'/form', FormCreate, name='form_create'), url(r'/form/([0-9a-f]{32})/edit', FormEdit, name='form_edit'), url(r'/form/([0-9a-f]{32})/clone', FormClone, name='form_clone'), url(r'/form/([0-9a-f]{32})/pending', FormPending, name='form_pending'), url(r'/form/([0-9a-f]{32})/testing', FormTesting, name='form_testing'), url(r'/form/([0-9a-f]{32})/enable', FormEnable, name='form_enable'), url(r'/form/([0-9a-f]{32})/disable', FormDisable, name='form_disable'), url(r'/form/([0-9a-f]{32})/field', FormFieldCreate, name='field_create'), url(r'/form/([0-9a-f]{32})/field/([a-zA-Z][_a-zA-Z0-9]*)', FormFieldEdit, name='field_edit'), url(r'/form/([0-9a-f]{32})/field/([a-zA-Z][_a-zA-Z0-9]*)/descr', FormFieldEditDescr, name='field_edit_descr'), url(r'/form/([0-9a-f]{32})/orders', FormOrders, name='form_orders'), url(r'/form/([0-9a-f]{32})/aggregate', FormOrdersAggregate, name='form_orders_aggregate'), url(r'/news', News, name='news'), url(r'/new/([0-9a-f]{32})', NewsEdit, name='news_edit'), url(r'/new', NewsCreate, name='news_create'), url(r'/events', Events, name='events'), url(r'/event/([0-9a-f]{32})', Event, name='event'), url(r'/event', EventCreate, name='event_create'), url(r'/contact', Contact, name='contact'), url(r'/about', About, name='about'), url(r'/software', Software, name='software'), url(r'/infos', Infos, name='infos'), url(r'/info', InfoCreate, name='info_create'), url(r'/info/([^/]+)', Info, name='info'), url(r'/info/([^/]+)/edit', InfoEdit, name='info_edit'), url(r'/info/([^/]+)/logs', InfoLogs, name='info_logs'), url(r'/files', Files, name='files'), url(r'/file', FileCreate, name='file_create'), url(r'/file/([^/]+)', File, name='file'), url(r'/file/([^/]+)/meta', FileMeta, name='file_meta'), url(r'/file/([^/]+)/download', FileDownload, name='file_download'), url(r'/file/([^/]+)/edit', FileEdit, name='file_edit'), url(r'/api/v1/file/([^/]+)/edit', FileEditApiV1, name='file_edit_api'), url(r'/file/([0-9a-f]{32})/logs', FileLogs, name='file_logs'), url(r'/log/([0-9a-f]{32})', Log, name='log'), url(r'/([0-9a-f]{32})', Entity, name='entity'), url(r'/admin/global_modes', GlobalModes, name='global_modes'), url(r'/admin/settings', Settings, name='settings'), url(r'/admin/text/([^/]+)', Text, name='text'), url(r'/admin/texts', Texts, name='texts'), url(r'/admin/order_statuses', OrderStatuses, name='order_statuses'), url(r'/admin/order_messages', AdminOrderMessages, name='admin_order_messages'), url(r'/admin/account_messages', AdminAccountMessages, name='admin_account_messages'), url(r'/site/([^/]+)', tornado.web.StaticFileHandler, {'path': utils.expand_filepath(settings['SITE_DIR'])}, name='site'), ]) handlers.append(url(r'/api/v1/(.*)', NoSuchEntityApiV1)) handlers.append(url(r'/(.*)', NoSuchEntity)) application = tornado.web.Application( handlers=handlers, debug=settings.get('TORNADO_DEBUG', False), cookie_secret=settings['COOKIE_SECRET'], xsrf_cookies=True, ui_modules=uimodules, template_path=os.path.join(settings['ROOT_DIR'], 'templates'), static_path=os.path.join(settings['ROOT_DIR'], 'static'), login_url=(settings['BASE_URL_PATH_PREFIX'] or '') + '/login') # Add href URLs for the status icons. # This depends on order status setup. for key, value in settings['ORDER_STATUSES_LOOKUP'].items(): value['href'] = application.reverse_url('site', key + '.png') application.listen(settings['PORT'], xheaders=True) pid = os.getpid() url = settings['BASE_URL'] if settings['BASE_URL_PATH_PREFIX']: url += settings['BASE_URL_PATH_PREFIX'] logging.info("web server %s (PID %s)", url, pid) if options.pidfile: with open(options.pidfile, 'w') as pf: pf.write(str(pid)) tornado.ioloop.IOLoop.instance().start()
docs = [] for docid in docids: try: doc = db[docid] except couchdb.ResourceNotFound: print('no such document', docid, file=sys.stderr) else: docs.append(doc) if docs: if filepath: outfile = open(filepath, 'w') print('writing to', filepath) else: outfile = sys.stdout json.dump(docs, outfile, indent=2) else: print('no such document(s)', file=sys.stderr) if __name__ == '__main__': parser = utils.get_command_line_parser( description='Get document(s) from CouchDB and write to JSON file.') parser.add_option("-w", "--write", action='store', dest='FILE', default=None, metavar="FILE", help="filepath of file to write") (options, args) = parser.parse_args() utils.load_settings(filepath=options.settings) get_documents(utils.get_db(), docids=args, filepath=options.FILE)
else: try: del doc['_rev'] except KeyError: pass db.save(doc) print('saved', doc['_id']) def add_docs(docs, data): if isinstance(data, dict): if not '_id' in data: raise KeyError docs.append(data) elif isinstance(data, list): for item in data: if isinstance(item, dict): if not '_id' in item: raise KeyError docs.append(item) else: raise ValueError else: raise ValueError if __name__ == '__main__': parser = utils.get_command_line_parser( description='Put document(s) to CouchDB from JSON file.') (options, args) = parser.parse_args() utils.load_settings(filepath=options.settings) put_documents(utils.get_db(), filepaths=args)
for field in fields: if field['type'] == constants.GROUP: continue value = record.get(field['identifier']) if isinstance(value, list): value = '\n'.join(value) values[field['identifier']] = value saver['fields'] = values saver['identifier'] = \ settings['ORDER_IDENTIFIER_FORMAT'].format(int(record['nid'])) counter = max(counter, int(record['nid'])) saver['history'] = {} saver.set_status('undefined') saver.check_fields_validity(fields) print('loaded', saver.doc['identifier'], saver.doc['title']) total += 1 print(total, 'orders loaded, counter', counter) meta['counter'] = counter db.save(meta) if __name__ == '__main__': parser = utils.get_command_line_parser(description= "Load orders from old Drupal site JSON dump 'orders.json'.") (options, args) = parser.parse_args() utils.load_settings(filepath=options.settings) db = utils.get_db() load_orders(db, authors=load_users(), form_iuid='ed37e428cd0e4b43894c9686555018a1') regenerate_views(db)
def get_args(): parser = utils.get_command_line_parser( description='Load the account messages document.') return parser.parse_args()
pass else: changed = True if changed: print('saving', project['identifier'], project['fields']['lims_id'], project['status'], project['title']) db.save(project) else: print('no change for', project['identifier']) if __name__ == '__main__': parser = utils.get_command_line_parser(description= 'Load project info from Clarity LIMS into OrderPortal.') (options, args) = parser.parse_args() utils.load_settings(filepath=options.settings) db = utils.get_db() projects_lookup = get_old_portal_projects(db) print(len(projects_lookup), 'projects from old portal requiring more info') projects = projects_lookup.values() projects.sort(lambda i,j: cmp(i['modified'], j['modified'])) for project in projects: print(project['identifier'], project['modified']) clarity = Clarity() clarity_projects = clarity.get_all_projects() print(len(clarity_projects), 'projects in Clarity') for record in clarity_projects:
" Get the previous version of the document and save it as new. " from __future__ import print_function, absolute_import import sys from orderportal import utils def revert(db, docid): revisions = list(db.revisions(docid)) if len(revisions) < 2: sys.exit('no previous version to revert to') latest = revisions[0] previous = revisions[1] new = previous.copy() new['_rev'] = latest['_rev'] db.save(new) if __name__ == '__main__': parser = utils.get_command_line_parser( description= 'Get the previous version of the document and save it as new.') (options, args) = parser.parse_args() utils.load_settings(filepath=options.settings) db = utils.get_db() for docid in args: revert(db, docid)
def set_password(email, password): db = utils.get_db() view = db.view('account/email', include_docs=True) rows = list(view[email]) if len(rows) != 1: raise ValueError("no such account %s" % email) doc = rows[0].doc with AccountSaver(doc=doc, db=db) as saver: saver.set_password(password) print('Set password for', email) if __name__ == '__main__': parser = utils.get_command_line_parser( description='Set the password for an account.') (options, args) = parser.parse_args() utils.load_settings(filepath=options.settings) email = raw_input('Email address (=account name) > ') if not email: sys.exit('no email address provided') password = getpass.getpass('Password > ') if not password: sys.exit('no password provided') try: utils.check_password(password) except ValueError, msg: sys.exit(str(msg)) again_password = getpass.getpass('Password again > ') if password != again_password: sys.exit('passwords do not match')
saver.set_email(email) saver['first_name'] = first_name saver['last_name'] = last_name saver['address'] = dict() saver['invoice_address'] = dict() saver['university'] = university saver['department'] = None saver['owner'] = email saver.set_password(password) saver['role'] = constants.ADMIN saver['status'] = constants.ENABLED print('Created admin account', email) if __name__ == '__main__': parser = utils.get_command_line_parser( description='Create a new admin account.') (options, args) = parser.parse_args() utils.load_settings(filepath=options.settings) email = raw_input('Email address (=account name) > ') if not email: sys.exit('no email address provided') password = getpass.getpass('Password > ') if not password: sys.exit('no password provided') try: utils.check_password(password) except ValueError, msg: sys.exit(str(msg)) again_password = getpass.getpass('Password again > ') if password != again_password: sys.exit('passwords do not match')
def get_args(): parser = utils.get_command_line_parser( description='Put document(s) to CouchDB from JSON file.') return parser.parse_args()
def get_args(): parser = utils.get_command_line_parser( description='Create a new admin account.') return parser.parse_args()